mirror of
https://github.com/KhronosGroup/Vulkan-Hpp
synced 2024-11-29 07:01:08 +00:00
54706 lines
2.3 MiB
54706 lines
2.3 MiB
// Copyright 2015-2024 The Khronos Group Inc.
|
|
//
|
|
// SPDX-License-Identifier: Apache-2.0 OR MIT
|
|
//
|
|
|
|
// This header is generated from the Khronos Vulkan XML API Registry.
|
|
|
|
#ifndef VULKAN_STRUCTS_HPP
|
|
#define VULKAN_STRUCTS_HPP
|
|
|
|
#include <cstring> // strcmp
|
|
|
|
namespace VULKAN_HPP_NAMESPACE
|
|
{
|
|
|
|
//===============
|
|
//=== STRUCTS ===
|
|
//===============
|
|
|
|
struct AcquireNextImageInfoKHR
|
|
{
|
|
using NativeType = VkAcquireNextImageInfoKHR;
|
|
|
|
static const bool allowDuplicate = false;
|
|
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eAcquireNextImageInfoKHR;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR AcquireNextImageInfoKHR( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain_ = {},
|
|
uint64_t timeout_ = {},
|
|
VULKAN_HPP_NAMESPACE::Semaphore semaphore_ = {},
|
|
VULKAN_HPP_NAMESPACE::Fence fence_ = {},
|
|
uint32_t deviceMask_ = {},
|
|
const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
|
|
: pNext( pNext_ )
|
|
, swapchain( swapchain_ )
|
|
, timeout( timeout_ )
|
|
, semaphore( semaphore_ )
|
|
, fence( fence_ )
|
|
, deviceMask( deviceMask_ )
|
|
{
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR AcquireNextImageInfoKHR( AcquireNextImageInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
AcquireNextImageInfoKHR( VkAcquireNextImageInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: AcquireNextImageInfoKHR( *reinterpret_cast<AcquireNextImageInfoKHR const *>( &rhs ) )
|
|
{
|
|
}
|
|
|
|
AcquireNextImageInfoKHR & operator=( AcquireNextImageInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
AcquireNextImageInfoKHR & operator=( VkAcquireNextImageInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::AcquireNextImageInfoKHR const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
|
|
VULKAN_HPP_CONSTEXPR_14 AcquireNextImageInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pNext = pNext_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 AcquireNextImageInfoKHR & setSwapchain( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
swapchain = swapchain_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 AcquireNextImageInfoKHR & setTimeout( uint64_t timeout_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
timeout = timeout_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 AcquireNextImageInfoKHR & setSemaphore( VULKAN_HPP_NAMESPACE::Semaphore semaphore_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
semaphore = semaphore_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 AcquireNextImageInfoKHR & setFence( VULKAN_HPP_NAMESPACE::Fence fence_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
fence = fence_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 AcquireNextImageInfoKHR & setDeviceMask( uint32_t deviceMask_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
deviceMask = deviceMask_;
|
|
return *this;
|
|
}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
|
|
|
|
operator VkAcquireNextImageInfoKHR const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkAcquireNextImageInfoKHR *>( this );
|
|
}
|
|
|
|
operator VkAcquireNextImageInfoKHR &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkAcquireNextImageInfoKHR *>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
# if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
# else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &,
|
|
const void * const &,
|
|
VULKAN_HPP_NAMESPACE::SwapchainKHR const &,
|
|
uint64_t const &,
|
|
VULKAN_HPP_NAMESPACE::Semaphore const &,
|
|
VULKAN_HPP_NAMESPACE::Fence const &,
|
|
uint32_t const &>
|
|
# endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( sType, pNext, swapchain, timeout, semaphore, fence, deviceMask );
|
|
}
|
|
#endif
|
|
|
|
#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
|
|
auto operator<=>( AcquireNextImageInfoKHR const & ) const = default;
|
|
#else
|
|
bool operator==( AcquireNextImageInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
# if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
# else
|
|
return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( swapchain == rhs.swapchain ) && ( timeout == rhs.timeout ) &&
|
|
( semaphore == rhs.semaphore ) && ( fence == rhs.fence ) && ( deviceMask == rhs.deviceMask );
|
|
# endif
|
|
}
|
|
|
|
bool operator!=( AcquireNextImageInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eAcquireNextImageInfoKHR;
|
|
const void * pNext = {};
|
|
VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain = {};
|
|
uint64_t timeout = {};
|
|
VULKAN_HPP_NAMESPACE::Semaphore semaphore = {};
|
|
VULKAN_HPP_NAMESPACE::Fence fence = {};
|
|
uint32_t deviceMask = {};
|
|
};
|
|
|
|
template <>
|
|
struct CppType<StructureType, StructureType::eAcquireNextImageInfoKHR>
|
|
{
|
|
using Type = AcquireNextImageInfoKHR;
|
|
};
|
|
|
|
struct AcquireProfilingLockInfoKHR
|
|
{
|
|
using NativeType = VkAcquireProfilingLockInfoKHR;
|
|
|
|
static const bool allowDuplicate = false;
|
|
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eAcquireProfilingLockInfoKHR;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR AcquireProfilingLockInfoKHR( VULKAN_HPP_NAMESPACE::AcquireProfilingLockFlagsKHR flags_ = {},
|
|
uint64_t timeout_ = {},
|
|
const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
|
|
: pNext( pNext_ )
|
|
, flags( flags_ )
|
|
, timeout( timeout_ )
|
|
{
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR AcquireProfilingLockInfoKHR( AcquireProfilingLockInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
AcquireProfilingLockInfoKHR( VkAcquireProfilingLockInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: AcquireProfilingLockInfoKHR( *reinterpret_cast<AcquireProfilingLockInfoKHR const *>( &rhs ) )
|
|
{
|
|
}
|
|
|
|
AcquireProfilingLockInfoKHR & operator=( AcquireProfilingLockInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
AcquireProfilingLockInfoKHR & operator=( VkAcquireProfilingLockInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::AcquireProfilingLockInfoKHR const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
|
|
VULKAN_HPP_CONSTEXPR_14 AcquireProfilingLockInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pNext = pNext_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 AcquireProfilingLockInfoKHR & setFlags( VULKAN_HPP_NAMESPACE::AcquireProfilingLockFlagsKHR flags_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
flags = flags_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 AcquireProfilingLockInfoKHR & setTimeout( uint64_t timeout_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
timeout = timeout_;
|
|
return *this;
|
|
}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
|
|
|
|
operator VkAcquireProfilingLockInfoKHR const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkAcquireProfilingLockInfoKHR *>( this );
|
|
}
|
|
|
|
operator VkAcquireProfilingLockInfoKHR &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkAcquireProfilingLockInfoKHR *>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
# if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
# else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::AcquireProfilingLockFlagsKHR const &, uint64_t const &>
|
|
# endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( sType, pNext, flags, timeout );
|
|
}
|
|
#endif
|
|
|
|
#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
|
|
auto operator<=>( AcquireProfilingLockInfoKHR const & ) const = default;
|
|
#else
|
|
bool operator==( AcquireProfilingLockInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
# if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
# else
|
|
return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && ( timeout == rhs.timeout );
|
|
# endif
|
|
}
|
|
|
|
bool operator!=( AcquireProfilingLockInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eAcquireProfilingLockInfoKHR;
|
|
const void * pNext = {};
|
|
VULKAN_HPP_NAMESPACE::AcquireProfilingLockFlagsKHR flags = {};
|
|
uint64_t timeout = {};
|
|
};
|
|
|
|
template <>
|
|
struct CppType<StructureType, StructureType::eAcquireProfilingLockInfoKHR>
|
|
{
|
|
using Type = AcquireProfilingLockInfoKHR;
|
|
};
|
|
|
|
struct AllocationCallbacks
|
|
{
|
|
using NativeType = VkAllocationCallbacks;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR AllocationCallbacks( void * pUserData_ = {},
|
|
PFN_vkAllocationFunction pfnAllocation_ = {},
|
|
PFN_vkReallocationFunction pfnReallocation_ = {},
|
|
PFN_vkFreeFunction pfnFree_ = {},
|
|
PFN_vkInternalAllocationNotification pfnInternalAllocation_ = {},
|
|
PFN_vkInternalFreeNotification pfnInternalFree_ = {} ) VULKAN_HPP_NOEXCEPT
|
|
: pUserData( pUserData_ )
|
|
, pfnAllocation( pfnAllocation_ )
|
|
, pfnReallocation( pfnReallocation_ )
|
|
, pfnFree( pfnFree_ )
|
|
, pfnInternalAllocation( pfnInternalAllocation_ )
|
|
, pfnInternalFree( pfnInternalFree_ )
|
|
{
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR AllocationCallbacks( AllocationCallbacks const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
AllocationCallbacks( VkAllocationCallbacks const & rhs ) VULKAN_HPP_NOEXCEPT : AllocationCallbacks( *reinterpret_cast<AllocationCallbacks const *>( &rhs ) )
|
|
{
|
|
}
|
|
|
|
AllocationCallbacks & operator=( AllocationCallbacks const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
AllocationCallbacks & operator=( VkAllocationCallbacks const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::AllocationCallbacks const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
|
|
VULKAN_HPP_CONSTEXPR_14 AllocationCallbacks & setPUserData( void * pUserData_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pUserData = pUserData_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 AllocationCallbacks & setPfnAllocation( PFN_vkAllocationFunction pfnAllocation_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pfnAllocation = pfnAllocation_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 AllocationCallbacks & setPfnReallocation( PFN_vkReallocationFunction pfnReallocation_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pfnReallocation = pfnReallocation_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 AllocationCallbacks & setPfnFree( PFN_vkFreeFunction pfnFree_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pfnFree = pfnFree_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 AllocationCallbacks & setPfnInternalAllocation( PFN_vkInternalAllocationNotification pfnInternalAllocation_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pfnInternalAllocation = pfnInternalAllocation_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 AllocationCallbacks & setPfnInternalFree( PFN_vkInternalFreeNotification pfnInternalFree_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pfnInternalFree = pfnInternalFree_;
|
|
return *this;
|
|
}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
|
|
|
|
operator VkAllocationCallbacks const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkAllocationCallbacks *>( this );
|
|
}
|
|
|
|
operator VkAllocationCallbacks &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkAllocationCallbacks *>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
# if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
# else
|
|
std::tuple<void * const &,
|
|
PFN_vkAllocationFunction const &,
|
|
PFN_vkReallocationFunction const &,
|
|
PFN_vkFreeFunction const &,
|
|
PFN_vkInternalAllocationNotification const &,
|
|
PFN_vkInternalFreeNotification const &>
|
|
# endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( pUserData, pfnAllocation, pfnReallocation, pfnFree, pfnInternalAllocation, pfnInternalFree );
|
|
}
|
|
#endif
|
|
|
|
bool operator==( AllocationCallbacks const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
#else
|
|
return ( pUserData == rhs.pUserData ) && ( pfnAllocation == rhs.pfnAllocation ) && ( pfnReallocation == rhs.pfnReallocation ) &&
|
|
( pfnFree == rhs.pfnFree ) && ( pfnInternalAllocation == rhs.pfnInternalAllocation ) && ( pfnInternalFree == rhs.pfnInternalFree );
|
|
#endif
|
|
}
|
|
|
|
bool operator!=( AllocationCallbacks const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
|
|
public:
|
|
void * pUserData = {};
|
|
PFN_vkAllocationFunction pfnAllocation = {};
|
|
PFN_vkReallocationFunction pfnReallocation = {};
|
|
PFN_vkFreeFunction pfnFree = {};
|
|
PFN_vkInternalAllocationNotification pfnInternalAllocation = {};
|
|
PFN_vkInternalFreeNotification pfnInternalFree = {};
|
|
};
|
|
|
|
struct ApplicationInfo
|
|
{
|
|
using NativeType = VkApplicationInfo;
|
|
|
|
static const bool allowDuplicate = false;
|
|
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eApplicationInfo;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR ApplicationInfo( const char * pApplicationName_ = {},
|
|
uint32_t applicationVersion_ = {},
|
|
const char * pEngineName_ = {},
|
|
uint32_t engineVersion_ = {},
|
|
uint32_t apiVersion_ = {},
|
|
const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
|
|
: pNext( pNext_ )
|
|
, pApplicationName( pApplicationName_ )
|
|
, applicationVersion( applicationVersion_ )
|
|
, pEngineName( pEngineName_ )
|
|
, engineVersion( engineVersion_ )
|
|
, apiVersion( apiVersion_ )
|
|
{
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR ApplicationInfo( ApplicationInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
ApplicationInfo( VkApplicationInfo const & rhs ) VULKAN_HPP_NOEXCEPT : ApplicationInfo( *reinterpret_cast<ApplicationInfo const *>( &rhs ) ) {}
|
|
|
|
ApplicationInfo & operator=( ApplicationInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
ApplicationInfo & operator=( VkApplicationInfo const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ApplicationInfo const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
|
|
VULKAN_HPP_CONSTEXPR_14 ApplicationInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pNext = pNext_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 ApplicationInfo & setPApplicationName( const char * pApplicationName_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pApplicationName = pApplicationName_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 ApplicationInfo & setApplicationVersion( uint32_t applicationVersion_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
applicationVersion = applicationVersion_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 ApplicationInfo & setPEngineName( const char * pEngineName_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pEngineName = pEngineName_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 ApplicationInfo & setEngineVersion( uint32_t engineVersion_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
engineVersion = engineVersion_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 ApplicationInfo & setApiVersion( uint32_t apiVersion_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
apiVersion = apiVersion_;
|
|
return *this;
|
|
}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
|
|
|
|
operator VkApplicationInfo const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkApplicationInfo *>( this );
|
|
}
|
|
|
|
operator VkApplicationInfo &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkApplicationInfo *>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
# if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
# else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &,
|
|
const void * const &,
|
|
const char * const &,
|
|
uint32_t const &,
|
|
const char * const &,
|
|
uint32_t const &,
|
|
uint32_t const &>
|
|
# endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( sType, pNext, pApplicationName, applicationVersion, pEngineName, engineVersion, apiVersion );
|
|
}
|
|
#endif
|
|
|
|
#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
|
|
std::strong_ordering operator<=>( ApplicationInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
if ( auto cmp = sType <=> rhs.sType; cmp != 0 )
|
|
return cmp;
|
|
if ( auto cmp = pNext <=> rhs.pNext; cmp != 0 )
|
|
return cmp;
|
|
if ( pApplicationName != rhs.pApplicationName )
|
|
if ( auto cmp = strcmp( pApplicationName, rhs.pApplicationName ); cmp != 0 )
|
|
return ( cmp < 0 ) ? std::strong_ordering::less : std::strong_ordering::greater;
|
|
if ( auto cmp = applicationVersion <=> rhs.applicationVersion; cmp != 0 )
|
|
return cmp;
|
|
if ( pEngineName != rhs.pEngineName )
|
|
if ( auto cmp = strcmp( pEngineName, rhs.pEngineName ); cmp != 0 )
|
|
return ( cmp < 0 ) ? std::strong_ordering::less : std::strong_ordering::greater;
|
|
if ( auto cmp = engineVersion <=> rhs.engineVersion; cmp != 0 )
|
|
return cmp;
|
|
if ( auto cmp = apiVersion <=> rhs.apiVersion; cmp != 0 )
|
|
return cmp;
|
|
|
|
return std::strong_ordering::equivalent;
|
|
}
|
|
#endif
|
|
|
|
bool operator==( ApplicationInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) &&
|
|
( ( pApplicationName == rhs.pApplicationName ) || ( strcmp( pApplicationName, rhs.pApplicationName ) == 0 ) ) &&
|
|
( applicationVersion == rhs.applicationVersion ) && ( ( pEngineName == rhs.pEngineName ) || ( strcmp( pEngineName, rhs.pEngineName ) == 0 ) ) &&
|
|
( engineVersion == rhs.engineVersion ) && ( apiVersion == rhs.apiVersion );
|
|
}
|
|
|
|
bool operator!=( ApplicationInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eApplicationInfo;
|
|
const void * pNext = {};
|
|
const char * pApplicationName = {};
|
|
uint32_t applicationVersion = {};
|
|
const char * pEngineName = {};
|
|
uint32_t engineVersion = {};
|
|
uint32_t apiVersion = {};
|
|
};
|
|
|
|
template <>
|
|
struct CppType<StructureType, StructureType::eApplicationInfo>
|
|
{
|
|
using Type = ApplicationInfo;
|
|
};
|
|
|
|
struct ApplicationParametersEXT
|
|
{
|
|
using NativeType = VkApplicationParametersEXT;
|
|
|
|
static const bool allowDuplicate = true;
|
|
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eApplicationParametersEXT;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR ApplicationParametersEXT(
|
|
uint32_t vendorID_ = {}, uint32_t deviceID_ = {}, uint32_t key_ = {}, uint64_t value_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
|
|
: pNext( pNext_ )
|
|
, vendorID( vendorID_ )
|
|
, deviceID( deviceID_ )
|
|
, key( key_ )
|
|
, value( value_ )
|
|
{
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR ApplicationParametersEXT( ApplicationParametersEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
ApplicationParametersEXT( VkApplicationParametersEXT const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: ApplicationParametersEXT( *reinterpret_cast<ApplicationParametersEXT const *>( &rhs ) )
|
|
{
|
|
}
|
|
|
|
ApplicationParametersEXT & operator=( ApplicationParametersEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
ApplicationParametersEXT & operator=( VkApplicationParametersEXT const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ApplicationParametersEXT const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
|
|
VULKAN_HPP_CONSTEXPR_14 ApplicationParametersEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pNext = pNext_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 ApplicationParametersEXT & setVendorID( uint32_t vendorID_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
vendorID = vendorID_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 ApplicationParametersEXT & setDeviceID( uint32_t deviceID_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
deviceID = deviceID_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 ApplicationParametersEXT & setKey( uint32_t key_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
key = key_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 ApplicationParametersEXT & setValue( uint64_t value_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
value = value_;
|
|
return *this;
|
|
}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
|
|
|
|
operator VkApplicationParametersEXT const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkApplicationParametersEXT *>( this );
|
|
}
|
|
|
|
operator VkApplicationParametersEXT &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkApplicationParametersEXT *>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
# if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
# else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, uint32_t const &, uint32_t const &, uint32_t const &, uint64_t const &>
|
|
# endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( sType, pNext, vendorID, deviceID, key, value );
|
|
}
|
|
#endif
|
|
|
|
#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
|
|
auto operator<=>( ApplicationParametersEXT const & ) const = default;
|
|
#else
|
|
bool operator==( ApplicationParametersEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
# if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
# else
|
|
return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( vendorID == rhs.vendorID ) && ( deviceID == rhs.deviceID ) && ( key == rhs.key ) &&
|
|
( value == rhs.value );
|
|
# endif
|
|
}
|
|
|
|
bool operator!=( ApplicationParametersEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eApplicationParametersEXT;
|
|
const void * pNext = {};
|
|
uint32_t vendorID = {};
|
|
uint32_t deviceID = {};
|
|
uint32_t key = {};
|
|
uint64_t value = {};
|
|
};
|
|
|
|
template <>
|
|
struct CppType<StructureType, StructureType::eApplicationParametersEXT>
|
|
{
|
|
using Type = ApplicationParametersEXT;
|
|
};
|
|
|
|
struct AttachmentDescription
|
|
{
|
|
using NativeType = VkAttachmentDescription;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR
|
|
AttachmentDescription( VULKAN_HPP_NAMESPACE::AttachmentDescriptionFlags flags_ = {},
|
|
VULKAN_HPP_NAMESPACE::Format format_ = VULKAN_HPP_NAMESPACE::Format::eUndefined,
|
|
VULKAN_HPP_NAMESPACE::SampleCountFlagBits samples_ = VULKAN_HPP_NAMESPACE::SampleCountFlagBits::e1,
|
|
VULKAN_HPP_NAMESPACE::AttachmentLoadOp loadOp_ = VULKAN_HPP_NAMESPACE::AttachmentLoadOp::eLoad,
|
|
VULKAN_HPP_NAMESPACE::AttachmentStoreOp storeOp_ = VULKAN_HPP_NAMESPACE::AttachmentStoreOp::eStore,
|
|
VULKAN_HPP_NAMESPACE::AttachmentLoadOp stencilLoadOp_ = VULKAN_HPP_NAMESPACE::AttachmentLoadOp::eLoad,
|
|
VULKAN_HPP_NAMESPACE::AttachmentStoreOp stencilStoreOp_ = VULKAN_HPP_NAMESPACE::AttachmentStoreOp::eStore,
|
|
VULKAN_HPP_NAMESPACE::ImageLayout initialLayout_ = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined,
|
|
VULKAN_HPP_NAMESPACE::ImageLayout finalLayout_ = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined ) VULKAN_HPP_NOEXCEPT
|
|
: flags( flags_ )
|
|
, format( format_ )
|
|
, samples( samples_ )
|
|
, loadOp( loadOp_ )
|
|
, storeOp( storeOp_ )
|
|
, stencilLoadOp( stencilLoadOp_ )
|
|
, stencilStoreOp( stencilStoreOp_ )
|
|
, initialLayout( initialLayout_ )
|
|
, finalLayout( finalLayout_ )
|
|
{
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR AttachmentDescription( AttachmentDescription const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
AttachmentDescription( VkAttachmentDescription const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: AttachmentDescription( *reinterpret_cast<AttachmentDescription const *>( &rhs ) )
|
|
{
|
|
}
|
|
|
|
AttachmentDescription & operator=( AttachmentDescription const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
AttachmentDescription & operator=( VkAttachmentDescription const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::AttachmentDescription const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
|
|
VULKAN_HPP_CONSTEXPR_14 AttachmentDescription & setFlags( VULKAN_HPP_NAMESPACE::AttachmentDescriptionFlags flags_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
flags = flags_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 AttachmentDescription & setFormat( VULKAN_HPP_NAMESPACE::Format format_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
format = format_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 AttachmentDescription & setSamples( VULKAN_HPP_NAMESPACE::SampleCountFlagBits samples_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
samples = samples_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 AttachmentDescription & setLoadOp( VULKAN_HPP_NAMESPACE::AttachmentLoadOp loadOp_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
loadOp = loadOp_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 AttachmentDescription & setStoreOp( VULKAN_HPP_NAMESPACE::AttachmentStoreOp storeOp_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
storeOp = storeOp_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 AttachmentDescription & setStencilLoadOp( VULKAN_HPP_NAMESPACE::AttachmentLoadOp stencilLoadOp_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
stencilLoadOp = stencilLoadOp_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 AttachmentDescription & setStencilStoreOp( VULKAN_HPP_NAMESPACE::AttachmentStoreOp stencilStoreOp_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
stencilStoreOp = stencilStoreOp_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 AttachmentDescription & setInitialLayout( VULKAN_HPP_NAMESPACE::ImageLayout initialLayout_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
initialLayout = initialLayout_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 AttachmentDescription & setFinalLayout( VULKAN_HPP_NAMESPACE::ImageLayout finalLayout_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
finalLayout = finalLayout_;
|
|
return *this;
|
|
}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
|
|
|
|
operator VkAttachmentDescription const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkAttachmentDescription *>( this );
|
|
}
|
|
|
|
operator VkAttachmentDescription &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkAttachmentDescription *>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
# if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
# else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::AttachmentDescriptionFlags const &,
|
|
VULKAN_HPP_NAMESPACE::Format const &,
|
|
VULKAN_HPP_NAMESPACE::SampleCountFlagBits const &,
|
|
VULKAN_HPP_NAMESPACE::AttachmentLoadOp const &,
|
|
VULKAN_HPP_NAMESPACE::AttachmentStoreOp const &,
|
|
VULKAN_HPP_NAMESPACE::AttachmentLoadOp const &,
|
|
VULKAN_HPP_NAMESPACE::AttachmentStoreOp const &,
|
|
VULKAN_HPP_NAMESPACE::ImageLayout const &,
|
|
VULKAN_HPP_NAMESPACE::ImageLayout const &>
|
|
# endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( flags, format, samples, loadOp, storeOp, stencilLoadOp, stencilStoreOp, initialLayout, finalLayout );
|
|
}
|
|
#endif
|
|
|
|
#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
|
|
auto operator<=>( AttachmentDescription const & ) const = default;
|
|
#else
|
|
bool operator==( AttachmentDescription const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
# if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
# else
|
|
return ( flags == rhs.flags ) && ( format == rhs.format ) && ( samples == rhs.samples ) && ( loadOp == rhs.loadOp ) && ( storeOp == rhs.storeOp ) &&
|
|
( stencilLoadOp == rhs.stencilLoadOp ) && ( stencilStoreOp == rhs.stencilStoreOp ) && ( initialLayout == rhs.initialLayout ) &&
|
|
( finalLayout == rhs.finalLayout );
|
|
# endif
|
|
}
|
|
|
|
bool operator!=( AttachmentDescription const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::AttachmentDescriptionFlags flags = {};
|
|
VULKAN_HPP_NAMESPACE::Format format = VULKAN_HPP_NAMESPACE::Format::eUndefined;
|
|
VULKAN_HPP_NAMESPACE::SampleCountFlagBits samples = VULKAN_HPP_NAMESPACE::SampleCountFlagBits::e1;
|
|
VULKAN_HPP_NAMESPACE::AttachmentLoadOp loadOp = VULKAN_HPP_NAMESPACE::AttachmentLoadOp::eLoad;
|
|
VULKAN_HPP_NAMESPACE::AttachmentStoreOp storeOp = VULKAN_HPP_NAMESPACE::AttachmentStoreOp::eStore;
|
|
VULKAN_HPP_NAMESPACE::AttachmentLoadOp stencilLoadOp = VULKAN_HPP_NAMESPACE::AttachmentLoadOp::eLoad;
|
|
VULKAN_HPP_NAMESPACE::AttachmentStoreOp stencilStoreOp = VULKAN_HPP_NAMESPACE::AttachmentStoreOp::eStore;
|
|
VULKAN_HPP_NAMESPACE::ImageLayout initialLayout = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined;
|
|
VULKAN_HPP_NAMESPACE::ImageLayout finalLayout = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined;
|
|
};
|
|
|
|
struct AttachmentDescription2
|
|
{
|
|
using NativeType = VkAttachmentDescription2;
|
|
|
|
static const bool allowDuplicate = false;
|
|
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eAttachmentDescription2;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR AttachmentDescription2( VULKAN_HPP_NAMESPACE::AttachmentDescriptionFlags flags_ = {},
|
|
VULKAN_HPP_NAMESPACE::Format format_ = VULKAN_HPP_NAMESPACE::Format::eUndefined,
|
|
VULKAN_HPP_NAMESPACE::SampleCountFlagBits samples_ = VULKAN_HPP_NAMESPACE::SampleCountFlagBits::e1,
|
|
VULKAN_HPP_NAMESPACE::AttachmentLoadOp loadOp_ = VULKAN_HPP_NAMESPACE::AttachmentLoadOp::eLoad,
|
|
VULKAN_HPP_NAMESPACE::AttachmentStoreOp storeOp_ = VULKAN_HPP_NAMESPACE::AttachmentStoreOp::eStore,
|
|
VULKAN_HPP_NAMESPACE::AttachmentLoadOp stencilLoadOp_ = VULKAN_HPP_NAMESPACE::AttachmentLoadOp::eLoad,
|
|
VULKAN_HPP_NAMESPACE::AttachmentStoreOp stencilStoreOp_ = VULKAN_HPP_NAMESPACE::AttachmentStoreOp::eStore,
|
|
VULKAN_HPP_NAMESPACE::ImageLayout initialLayout_ = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined,
|
|
VULKAN_HPP_NAMESPACE::ImageLayout finalLayout_ = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined,
|
|
const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
|
|
: pNext( pNext_ )
|
|
, flags( flags_ )
|
|
, format( format_ )
|
|
, samples( samples_ )
|
|
, loadOp( loadOp_ )
|
|
, storeOp( storeOp_ )
|
|
, stencilLoadOp( stencilLoadOp_ )
|
|
, stencilStoreOp( stencilStoreOp_ )
|
|
, initialLayout( initialLayout_ )
|
|
, finalLayout( finalLayout_ )
|
|
{
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR AttachmentDescription2( AttachmentDescription2 const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
AttachmentDescription2( VkAttachmentDescription2 const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: AttachmentDescription2( *reinterpret_cast<AttachmentDescription2 const *>( &rhs ) )
|
|
{
|
|
}
|
|
|
|
AttachmentDescription2 & operator=( AttachmentDescription2 const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
AttachmentDescription2 & operator=( VkAttachmentDescription2 const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::AttachmentDescription2 const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
|
|
VULKAN_HPP_CONSTEXPR_14 AttachmentDescription2 & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pNext = pNext_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 AttachmentDescription2 & setFlags( VULKAN_HPP_NAMESPACE::AttachmentDescriptionFlags flags_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
flags = flags_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 AttachmentDescription2 & setFormat( VULKAN_HPP_NAMESPACE::Format format_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
format = format_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 AttachmentDescription2 & setSamples( VULKAN_HPP_NAMESPACE::SampleCountFlagBits samples_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
samples = samples_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 AttachmentDescription2 & setLoadOp( VULKAN_HPP_NAMESPACE::AttachmentLoadOp loadOp_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
loadOp = loadOp_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 AttachmentDescription2 & setStoreOp( VULKAN_HPP_NAMESPACE::AttachmentStoreOp storeOp_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
storeOp = storeOp_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 AttachmentDescription2 & setStencilLoadOp( VULKAN_HPP_NAMESPACE::AttachmentLoadOp stencilLoadOp_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
stencilLoadOp = stencilLoadOp_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 AttachmentDescription2 & setStencilStoreOp( VULKAN_HPP_NAMESPACE::AttachmentStoreOp stencilStoreOp_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
stencilStoreOp = stencilStoreOp_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 AttachmentDescription2 & setInitialLayout( VULKAN_HPP_NAMESPACE::ImageLayout initialLayout_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
initialLayout = initialLayout_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 AttachmentDescription2 & setFinalLayout( VULKAN_HPP_NAMESPACE::ImageLayout finalLayout_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
finalLayout = finalLayout_;
|
|
return *this;
|
|
}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
|
|
|
|
operator VkAttachmentDescription2 const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkAttachmentDescription2 *>( this );
|
|
}
|
|
|
|
operator VkAttachmentDescription2 &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkAttachmentDescription2 *>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
# if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
# else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &,
|
|
const void * const &,
|
|
VULKAN_HPP_NAMESPACE::AttachmentDescriptionFlags const &,
|
|
VULKAN_HPP_NAMESPACE::Format const &,
|
|
VULKAN_HPP_NAMESPACE::SampleCountFlagBits const &,
|
|
VULKAN_HPP_NAMESPACE::AttachmentLoadOp const &,
|
|
VULKAN_HPP_NAMESPACE::AttachmentStoreOp const &,
|
|
VULKAN_HPP_NAMESPACE::AttachmentLoadOp const &,
|
|
VULKAN_HPP_NAMESPACE::AttachmentStoreOp const &,
|
|
VULKAN_HPP_NAMESPACE::ImageLayout const &,
|
|
VULKAN_HPP_NAMESPACE::ImageLayout const &>
|
|
# endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( sType, pNext, flags, format, samples, loadOp, storeOp, stencilLoadOp, stencilStoreOp, initialLayout, finalLayout );
|
|
}
|
|
#endif
|
|
|
|
#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
|
|
auto operator<=>( AttachmentDescription2 const & ) const = default;
|
|
#else
|
|
bool operator==( AttachmentDescription2 const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
# if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
# else
|
|
return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && ( format == rhs.format ) && ( samples == rhs.samples ) &&
|
|
( loadOp == rhs.loadOp ) && ( storeOp == rhs.storeOp ) && ( stencilLoadOp == rhs.stencilLoadOp ) && ( stencilStoreOp == rhs.stencilStoreOp ) &&
|
|
( initialLayout == rhs.initialLayout ) && ( finalLayout == rhs.finalLayout );
|
|
# endif
|
|
}
|
|
|
|
bool operator!=( AttachmentDescription2 const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eAttachmentDescription2;
|
|
const void * pNext = {};
|
|
VULKAN_HPP_NAMESPACE::AttachmentDescriptionFlags flags = {};
|
|
VULKAN_HPP_NAMESPACE::Format format = VULKAN_HPP_NAMESPACE::Format::eUndefined;
|
|
VULKAN_HPP_NAMESPACE::SampleCountFlagBits samples = VULKAN_HPP_NAMESPACE::SampleCountFlagBits::e1;
|
|
VULKAN_HPP_NAMESPACE::AttachmentLoadOp loadOp = VULKAN_HPP_NAMESPACE::AttachmentLoadOp::eLoad;
|
|
VULKAN_HPP_NAMESPACE::AttachmentStoreOp storeOp = VULKAN_HPP_NAMESPACE::AttachmentStoreOp::eStore;
|
|
VULKAN_HPP_NAMESPACE::AttachmentLoadOp stencilLoadOp = VULKAN_HPP_NAMESPACE::AttachmentLoadOp::eLoad;
|
|
VULKAN_HPP_NAMESPACE::AttachmentStoreOp stencilStoreOp = VULKAN_HPP_NAMESPACE::AttachmentStoreOp::eStore;
|
|
VULKAN_HPP_NAMESPACE::ImageLayout initialLayout = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined;
|
|
VULKAN_HPP_NAMESPACE::ImageLayout finalLayout = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined;
|
|
};
|
|
|
|
template <>
|
|
struct CppType<StructureType, StructureType::eAttachmentDescription2>
|
|
{
|
|
using Type = AttachmentDescription2;
|
|
};
|
|
|
|
using AttachmentDescription2KHR = AttachmentDescription2;
|
|
|
|
struct AttachmentDescriptionStencilLayout
|
|
{
|
|
using NativeType = VkAttachmentDescriptionStencilLayout;
|
|
|
|
static const bool allowDuplicate = false;
|
|
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eAttachmentDescriptionStencilLayout;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR
|
|
AttachmentDescriptionStencilLayout( VULKAN_HPP_NAMESPACE::ImageLayout stencilInitialLayout_ = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined,
|
|
VULKAN_HPP_NAMESPACE::ImageLayout stencilFinalLayout_ = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined,
|
|
void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
|
|
: pNext( pNext_ )
|
|
, stencilInitialLayout( stencilInitialLayout_ )
|
|
, stencilFinalLayout( stencilFinalLayout_ )
|
|
{
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR AttachmentDescriptionStencilLayout( AttachmentDescriptionStencilLayout const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
AttachmentDescriptionStencilLayout( VkAttachmentDescriptionStencilLayout const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: AttachmentDescriptionStencilLayout( *reinterpret_cast<AttachmentDescriptionStencilLayout const *>( &rhs ) )
|
|
{
|
|
}
|
|
|
|
AttachmentDescriptionStencilLayout & operator=( AttachmentDescriptionStencilLayout const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
AttachmentDescriptionStencilLayout & operator=( VkAttachmentDescriptionStencilLayout const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::AttachmentDescriptionStencilLayout const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
|
|
VULKAN_HPP_CONSTEXPR_14 AttachmentDescriptionStencilLayout & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pNext = pNext_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 AttachmentDescriptionStencilLayout &
|
|
setStencilInitialLayout( VULKAN_HPP_NAMESPACE::ImageLayout stencilInitialLayout_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
stencilInitialLayout = stencilInitialLayout_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 AttachmentDescriptionStencilLayout &
|
|
setStencilFinalLayout( VULKAN_HPP_NAMESPACE::ImageLayout stencilFinalLayout_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
stencilFinalLayout = stencilFinalLayout_;
|
|
return *this;
|
|
}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
|
|
|
|
operator VkAttachmentDescriptionStencilLayout const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkAttachmentDescriptionStencilLayout *>( this );
|
|
}
|
|
|
|
operator VkAttachmentDescriptionStencilLayout &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkAttachmentDescriptionStencilLayout *>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
# if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
# else
|
|
std::
|
|
tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::ImageLayout const &, VULKAN_HPP_NAMESPACE::ImageLayout const &>
|
|
# endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( sType, pNext, stencilInitialLayout, stencilFinalLayout );
|
|
}
|
|
#endif
|
|
|
|
#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
|
|
auto operator<=>( AttachmentDescriptionStencilLayout const & ) const = default;
|
|
#else
|
|
bool operator==( AttachmentDescriptionStencilLayout const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
# if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
# else
|
|
return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( stencilInitialLayout == rhs.stencilInitialLayout ) &&
|
|
( stencilFinalLayout == rhs.stencilFinalLayout );
|
|
# endif
|
|
}
|
|
|
|
bool operator!=( AttachmentDescriptionStencilLayout const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eAttachmentDescriptionStencilLayout;
|
|
void * pNext = {};
|
|
VULKAN_HPP_NAMESPACE::ImageLayout stencilInitialLayout = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined;
|
|
VULKAN_HPP_NAMESPACE::ImageLayout stencilFinalLayout = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined;
|
|
};
|
|
|
|
template <>
|
|
struct CppType<StructureType, StructureType::eAttachmentDescriptionStencilLayout>
|
|
{
|
|
using Type = AttachmentDescriptionStencilLayout;
|
|
};
|
|
|
|
using AttachmentDescriptionStencilLayoutKHR = AttachmentDescriptionStencilLayout;
|
|
|
|
struct AttachmentReference
|
|
{
|
|
using NativeType = VkAttachmentReference;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR AttachmentReference( uint32_t attachment_ = {},
|
|
VULKAN_HPP_NAMESPACE::ImageLayout layout_ = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined ) VULKAN_HPP_NOEXCEPT
|
|
: attachment( attachment_ )
|
|
, layout( layout_ )
|
|
{
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR AttachmentReference( AttachmentReference const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
AttachmentReference( VkAttachmentReference const & rhs ) VULKAN_HPP_NOEXCEPT : AttachmentReference( *reinterpret_cast<AttachmentReference const *>( &rhs ) )
|
|
{
|
|
}
|
|
|
|
AttachmentReference & operator=( AttachmentReference const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
AttachmentReference & operator=( VkAttachmentReference const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::AttachmentReference const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
|
|
VULKAN_HPP_CONSTEXPR_14 AttachmentReference & setAttachment( uint32_t attachment_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
attachment = attachment_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 AttachmentReference & setLayout( VULKAN_HPP_NAMESPACE::ImageLayout layout_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
layout = layout_;
|
|
return *this;
|
|
}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
|
|
|
|
operator VkAttachmentReference const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkAttachmentReference *>( this );
|
|
}
|
|
|
|
operator VkAttachmentReference &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkAttachmentReference *>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
# if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
# else
|
|
std::tuple<uint32_t const &, VULKAN_HPP_NAMESPACE::ImageLayout const &>
|
|
# endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( attachment, layout );
|
|
}
|
|
#endif
|
|
|
|
#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
|
|
auto operator<=>( AttachmentReference const & ) const = default;
|
|
#else
|
|
bool operator==( AttachmentReference const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
# if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
# else
|
|
return ( attachment == rhs.attachment ) && ( layout == rhs.layout );
|
|
# endif
|
|
}
|
|
|
|
bool operator!=( AttachmentReference const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
uint32_t attachment = {};
|
|
VULKAN_HPP_NAMESPACE::ImageLayout layout = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined;
|
|
};
|
|
|
|
struct AttachmentReference2
|
|
{
|
|
using NativeType = VkAttachmentReference2;
|
|
|
|
static const bool allowDuplicate = false;
|
|
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eAttachmentReference2;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR AttachmentReference2( uint32_t attachment_ = {},
|
|
VULKAN_HPP_NAMESPACE::ImageLayout layout_ = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined,
|
|
VULKAN_HPP_NAMESPACE::ImageAspectFlags aspectMask_ = {},
|
|
const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
|
|
: pNext( pNext_ )
|
|
, attachment( attachment_ )
|
|
, layout( layout_ )
|
|
, aspectMask( aspectMask_ )
|
|
{
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR AttachmentReference2( AttachmentReference2 const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
AttachmentReference2( VkAttachmentReference2 const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: AttachmentReference2( *reinterpret_cast<AttachmentReference2 const *>( &rhs ) )
|
|
{
|
|
}
|
|
|
|
AttachmentReference2 & operator=( AttachmentReference2 const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
AttachmentReference2 & operator=( VkAttachmentReference2 const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::AttachmentReference2 const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
|
|
VULKAN_HPP_CONSTEXPR_14 AttachmentReference2 & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pNext = pNext_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 AttachmentReference2 & setAttachment( uint32_t attachment_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
attachment = attachment_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 AttachmentReference2 & setLayout( VULKAN_HPP_NAMESPACE::ImageLayout layout_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
layout = layout_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 AttachmentReference2 & setAspectMask( VULKAN_HPP_NAMESPACE::ImageAspectFlags aspectMask_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
aspectMask = aspectMask_;
|
|
return *this;
|
|
}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
|
|
|
|
operator VkAttachmentReference2 const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkAttachmentReference2 *>( this );
|
|
}
|
|
|
|
operator VkAttachmentReference2 &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkAttachmentReference2 *>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
# if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
# else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &,
|
|
const void * const &,
|
|
uint32_t const &,
|
|
VULKAN_HPP_NAMESPACE::ImageLayout const &,
|
|
VULKAN_HPP_NAMESPACE::ImageAspectFlags const &>
|
|
# endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( sType, pNext, attachment, layout, aspectMask );
|
|
}
|
|
#endif
|
|
|
|
#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
|
|
auto operator<=>( AttachmentReference2 const & ) const = default;
|
|
#else
|
|
bool operator==( AttachmentReference2 const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
# if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
# else
|
|
return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( attachment == rhs.attachment ) && ( layout == rhs.layout ) &&
|
|
( aspectMask == rhs.aspectMask );
|
|
# endif
|
|
}
|
|
|
|
bool operator!=( AttachmentReference2 const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eAttachmentReference2;
|
|
const void * pNext = {};
|
|
uint32_t attachment = {};
|
|
VULKAN_HPP_NAMESPACE::ImageLayout layout = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined;
|
|
VULKAN_HPP_NAMESPACE::ImageAspectFlags aspectMask = {};
|
|
};
|
|
|
|
template <>
|
|
struct CppType<StructureType, StructureType::eAttachmentReference2>
|
|
{
|
|
using Type = AttachmentReference2;
|
|
};
|
|
|
|
using AttachmentReference2KHR = AttachmentReference2;
|
|
|
|
struct AttachmentReferenceStencilLayout
|
|
{
|
|
using NativeType = VkAttachmentReferenceStencilLayout;
|
|
|
|
static const bool allowDuplicate = false;
|
|
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eAttachmentReferenceStencilLayout;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR AttachmentReferenceStencilLayout( VULKAN_HPP_NAMESPACE::ImageLayout stencilLayout_ = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined,
|
|
void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
|
|
: pNext( pNext_ )
|
|
, stencilLayout( stencilLayout_ )
|
|
{
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR AttachmentReferenceStencilLayout( AttachmentReferenceStencilLayout const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
AttachmentReferenceStencilLayout( VkAttachmentReferenceStencilLayout const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: AttachmentReferenceStencilLayout( *reinterpret_cast<AttachmentReferenceStencilLayout const *>( &rhs ) )
|
|
{
|
|
}
|
|
|
|
AttachmentReferenceStencilLayout & operator=( AttachmentReferenceStencilLayout const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
AttachmentReferenceStencilLayout & operator=( VkAttachmentReferenceStencilLayout const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::AttachmentReferenceStencilLayout const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
|
|
VULKAN_HPP_CONSTEXPR_14 AttachmentReferenceStencilLayout & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pNext = pNext_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 AttachmentReferenceStencilLayout & setStencilLayout( VULKAN_HPP_NAMESPACE::ImageLayout stencilLayout_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
stencilLayout = stencilLayout_;
|
|
return *this;
|
|
}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
|
|
|
|
operator VkAttachmentReferenceStencilLayout const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkAttachmentReferenceStencilLayout *>( this );
|
|
}
|
|
|
|
operator VkAttachmentReferenceStencilLayout &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkAttachmentReferenceStencilLayout *>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
# if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
# else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::ImageLayout const &>
|
|
# endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( sType, pNext, stencilLayout );
|
|
}
|
|
#endif
|
|
|
|
#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
|
|
auto operator<=>( AttachmentReferenceStencilLayout const & ) const = default;
|
|
#else
|
|
bool operator==( AttachmentReferenceStencilLayout const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
# if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
# else
|
|
return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( stencilLayout == rhs.stencilLayout );
|
|
# endif
|
|
}
|
|
|
|
bool operator!=( AttachmentReferenceStencilLayout const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eAttachmentReferenceStencilLayout;
|
|
void * pNext = {};
|
|
VULKAN_HPP_NAMESPACE::ImageLayout stencilLayout = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined;
|
|
};
|
|
|
|
template <>
|
|
struct CppType<StructureType, StructureType::eAttachmentReferenceStencilLayout>
|
|
{
|
|
using Type = AttachmentReferenceStencilLayout;
|
|
};
|
|
|
|
using AttachmentReferenceStencilLayoutKHR = AttachmentReferenceStencilLayout;
|
|
|
|
struct Extent2D
|
|
{
|
|
using NativeType = VkExtent2D;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR Extent2D( uint32_t width_ = {}, uint32_t height_ = {} ) VULKAN_HPP_NOEXCEPT
|
|
: width( width_ )
|
|
, height( height_ )
|
|
{
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR Extent2D( Extent2D const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
Extent2D( VkExtent2D const & rhs ) VULKAN_HPP_NOEXCEPT : Extent2D( *reinterpret_cast<Extent2D const *>( &rhs ) ) {}
|
|
|
|
Extent2D & operator=( Extent2D const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
Extent2D & operator=( VkExtent2D const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::Extent2D const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
|
|
VULKAN_HPP_CONSTEXPR_14 Extent2D & setWidth( uint32_t width_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
width = width_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 Extent2D & setHeight( uint32_t height_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
height = height_;
|
|
return *this;
|
|
}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
|
|
|
|
operator VkExtent2D const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkExtent2D *>( this );
|
|
}
|
|
|
|
operator VkExtent2D &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkExtent2D *>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
# if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
# else
|
|
std::tuple<uint32_t const &, uint32_t const &>
|
|
# endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( width, height );
|
|
}
|
|
#endif
|
|
|
|
#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
|
|
auto operator<=>( Extent2D const & ) const = default;
|
|
#else
|
|
bool operator==( Extent2D const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
# if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
# else
|
|
return ( width == rhs.width ) && ( height == rhs.height );
|
|
# endif
|
|
}
|
|
|
|
bool operator!=( Extent2D const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
uint32_t width = {};
|
|
uint32_t height = {};
|
|
};
|
|
|
|
struct SampleLocationEXT
|
|
{
|
|
using NativeType = VkSampleLocationEXT;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR SampleLocationEXT( float x_ = {}, float y_ = {} ) VULKAN_HPP_NOEXCEPT
|
|
: x( x_ )
|
|
, y( y_ )
|
|
{
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR SampleLocationEXT( SampleLocationEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
SampleLocationEXT( VkSampleLocationEXT const & rhs ) VULKAN_HPP_NOEXCEPT : SampleLocationEXT( *reinterpret_cast<SampleLocationEXT const *>( &rhs ) ) {}
|
|
|
|
SampleLocationEXT & operator=( SampleLocationEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
SampleLocationEXT & operator=( VkSampleLocationEXT const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::SampleLocationEXT const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
|
|
VULKAN_HPP_CONSTEXPR_14 SampleLocationEXT & setX( float x_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
x = x_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 SampleLocationEXT & setY( float y_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
y = y_;
|
|
return *this;
|
|
}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
|
|
|
|
operator VkSampleLocationEXT const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkSampleLocationEXT *>( this );
|
|
}
|
|
|
|
operator VkSampleLocationEXT &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkSampleLocationEXT *>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
# if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
# else
|
|
std::tuple<float const &, float const &>
|
|
# endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( x, y );
|
|
}
|
|
#endif
|
|
|
|
#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
|
|
auto operator<=>( SampleLocationEXT const & ) const = default;
|
|
#else
|
|
bool operator==( SampleLocationEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
# if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
# else
|
|
return ( x == rhs.x ) && ( y == rhs.y );
|
|
# endif
|
|
}
|
|
|
|
bool operator!=( SampleLocationEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
float x = {};
|
|
float y = {};
|
|
};
|
|
|
|
struct SampleLocationsInfoEXT
|
|
{
|
|
using NativeType = VkSampleLocationsInfoEXT;
|
|
|
|
static const bool allowDuplicate = false;
|
|
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eSampleLocationsInfoEXT;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR
|
|
SampleLocationsInfoEXT( VULKAN_HPP_NAMESPACE::SampleCountFlagBits sampleLocationsPerPixel_ = VULKAN_HPP_NAMESPACE::SampleCountFlagBits::e1,
|
|
VULKAN_HPP_NAMESPACE::Extent2D sampleLocationGridSize_ = {},
|
|
uint32_t sampleLocationsCount_ = {},
|
|
const VULKAN_HPP_NAMESPACE::SampleLocationEXT * pSampleLocations_ = {},
|
|
const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
|
|
: pNext( pNext_ )
|
|
, sampleLocationsPerPixel( sampleLocationsPerPixel_ )
|
|
, sampleLocationGridSize( sampleLocationGridSize_ )
|
|
, sampleLocationsCount( sampleLocationsCount_ )
|
|
, pSampleLocations( pSampleLocations_ )
|
|
{
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR SampleLocationsInfoEXT( SampleLocationsInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
SampleLocationsInfoEXT( VkSampleLocationsInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: SampleLocationsInfoEXT( *reinterpret_cast<SampleLocationsInfoEXT const *>( &rhs ) )
|
|
{
|
|
}
|
|
|
|
# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
|
|
SampleLocationsInfoEXT( VULKAN_HPP_NAMESPACE::SampleCountFlagBits sampleLocationsPerPixel_,
|
|
VULKAN_HPP_NAMESPACE::Extent2D sampleLocationGridSize_,
|
|
VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::SampleLocationEXT> const & sampleLocations_,
|
|
const void * pNext_ = nullptr )
|
|
: pNext( pNext_ )
|
|
, sampleLocationsPerPixel( sampleLocationsPerPixel_ )
|
|
, sampleLocationGridSize( sampleLocationGridSize_ )
|
|
, sampleLocationsCount( static_cast<uint32_t>( sampleLocations_.size() ) )
|
|
, pSampleLocations( sampleLocations_.data() )
|
|
{
|
|
}
|
|
# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
|
|
|
|
SampleLocationsInfoEXT & operator=( SampleLocationsInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
SampleLocationsInfoEXT & operator=( VkSampleLocationsInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::SampleLocationsInfoEXT const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
|
|
VULKAN_HPP_CONSTEXPR_14 SampleLocationsInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pNext = pNext_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 SampleLocationsInfoEXT &
|
|
setSampleLocationsPerPixel( VULKAN_HPP_NAMESPACE::SampleCountFlagBits sampleLocationsPerPixel_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
sampleLocationsPerPixel = sampleLocationsPerPixel_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 SampleLocationsInfoEXT &
|
|
setSampleLocationGridSize( VULKAN_HPP_NAMESPACE::Extent2D const & sampleLocationGridSize_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
sampleLocationGridSize = sampleLocationGridSize_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 SampleLocationsInfoEXT & setSampleLocationsCount( uint32_t sampleLocationsCount_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
sampleLocationsCount = sampleLocationsCount_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 SampleLocationsInfoEXT &
|
|
setPSampleLocations( const VULKAN_HPP_NAMESPACE::SampleLocationEXT * pSampleLocations_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pSampleLocations = pSampleLocations_;
|
|
return *this;
|
|
}
|
|
|
|
# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
|
|
SampleLocationsInfoEXT & setSampleLocations(
|
|
VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::SampleLocationEXT> const & sampleLocations_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
sampleLocationsCount = static_cast<uint32_t>( sampleLocations_.size() );
|
|
pSampleLocations = sampleLocations_.data();
|
|
return *this;
|
|
}
|
|
# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
|
|
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
|
|
|
|
operator VkSampleLocationsInfoEXT const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkSampleLocationsInfoEXT *>( this );
|
|
}
|
|
|
|
operator VkSampleLocationsInfoEXT &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkSampleLocationsInfoEXT *>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
# if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
# else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &,
|
|
const void * const &,
|
|
VULKAN_HPP_NAMESPACE::SampleCountFlagBits const &,
|
|
VULKAN_HPP_NAMESPACE::Extent2D const &,
|
|
uint32_t const &,
|
|
const VULKAN_HPP_NAMESPACE::SampleLocationEXT * const &>
|
|
# endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( sType, pNext, sampleLocationsPerPixel, sampleLocationGridSize, sampleLocationsCount, pSampleLocations );
|
|
}
|
|
#endif
|
|
|
|
#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
|
|
auto operator<=>( SampleLocationsInfoEXT const & ) const = default;
|
|
#else
|
|
bool operator==( SampleLocationsInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
# if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
# else
|
|
return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( sampleLocationsPerPixel == rhs.sampleLocationsPerPixel ) &&
|
|
( sampleLocationGridSize == rhs.sampleLocationGridSize ) && ( sampleLocationsCount == rhs.sampleLocationsCount ) &&
|
|
( pSampleLocations == rhs.pSampleLocations );
|
|
# endif
|
|
}
|
|
|
|
bool operator!=( SampleLocationsInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eSampleLocationsInfoEXT;
|
|
const void * pNext = {};
|
|
VULKAN_HPP_NAMESPACE::SampleCountFlagBits sampleLocationsPerPixel = VULKAN_HPP_NAMESPACE::SampleCountFlagBits::e1;
|
|
VULKAN_HPP_NAMESPACE::Extent2D sampleLocationGridSize = {};
|
|
uint32_t sampleLocationsCount = {};
|
|
const VULKAN_HPP_NAMESPACE::SampleLocationEXT * pSampleLocations = {};
|
|
};
|
|
|
|
template <>
|
|
struct CppType<StructureType, StructureType::eSampleLocationsInfoEXT>
|
|
{
|
|
using Type = SampleLocationsInfoEXT;
|
|
};
|
|
|
|
struct AttachmentSampleLocationsEXT
|
|
{
|
|
using NativeType = VkAttachmentSampleLocationsEXT;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR AttachmentSampleLocationsEXT( uint32_t attachmentIndex_ = {},
|
|
VULKAN_HPP_NAMESPACE::SampleLocationsInfoEXT sampleLocationsInfo_ = {} ) VULKAN_HPP_NOEXCEPT
|
|
: attachmentIndex( attachmentIndex_ )
|
|
, sampleLocationsInfo( sampleLocationsInfo_ )
|
|
{
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR AttachmentSampleLocationsEXT( AttachmentSampleLocationsEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
AttachmentSampleLocationsEXT( VkAttachmentSampleLocationsEXT const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: AttachmentSampleLocationsEXT( *reinterpret_cast<AttachmentSampleLocationsEXT const *>( &rhs ) )
|
|
{
|
|
}
|
|
|
|
AttachmentSampleLocationsEXT & operator=( AttachmentSampleLocationsEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
AttachmentSampleLocationsEXT & operator=( VkAttachmentSampleLocationsEXT const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::AttachmentSampleLocationsEXT const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
|
|
VULKAN_HPP_CONSTEXPR_14 AttachmentSampleLocationsEXT & setAttachmentIndex( uint32_t attachmentIndex_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
attachmentIndex = attachmentIndex_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 AttachmentSampleLocationsEXT &
|
|
setSampleLocationsInfo( VULKAN_HPP_NAMESPACE::SampleLocationsInfoEXT const & sampleLocationsInfo_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
sampleLocationsInfo = sampleLocationsInfo_;
|
|
return *this;
|
|
}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
|
|
|
|
operator VkAttachmentSampleLocationsEXT const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkAttachmentSampleLocationsEXT *>( this );
|
|
}
|
|
|
|
operator VkAttachmentSampleLocationsEXT &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkAttachmentSampleLocationsEXT *>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
# if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
# else
|
|
std::tuple<uint32_t const &, VULKAN_HPP_NAMESPACE::SampleLocationsInfoEXT const &>
|
|
# endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( attachmentIndex, sampleLocationsInfo );
|
|
}
|
|
#endif
|
|
|
|
#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
|
|
auto operator<=>( AttachmentSampleLocationsEXT const & ) const = default;
|
|
#else
|
|
bool operator==( AttachmentSampleLocationsEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
# if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
# else
|
|
return ( attachmentIndex == rhs.attachmentIndex ) && ( sampleLocationsInfo == rhs.sampleLocationsInfo );
|
|
# endif
|
|
}
|
|
|
|
bool operator!=( AttachmentSampleLocationsEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
uint32_t attachmentIndex = {};
|
|
VULKAN_HPP_NAMESPACE::SampleLocationsInfoEXT sampleLocationsInfo = {};
|
|
};
|
|
|
|
struct BaseInStructure
|
|
{
|
|
using NativeType = VkBaseInStructure;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
BaseInStructure( VULKAN_HPP_NAMESPACE::StructureType sType_ = VULKAN_HPP_NAMESPACE::StructureType::eApplicationInfo,
|
|
const struct VULKAN_HPP_NAMESPACE::BaseInStructure * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
|
|
: sType( sType_ )
|
|
, pNext( pNext_ )
|
|
{
|
|
}
|
|
|
|
BaseInStructure( BaseInStructure const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
BaseInStructure( VkBaseInStructure const & rhs ) VULKAN_HPP_NOEXCEPT : BaseInStructure( *reinterpret_cast<BaseInStructure const *>( &rhs ) ) {}
|
|
|
|
BaseInStructure & operator=( BaseInStructure const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
BaseInStructure & operator=( VkBaseInStructure const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::BaseInStructure const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
|
|
VULKAN_HPP_CONSTEXPR_14 BaseInStructure & setPNext( const struct VULKAN_HPP_NAMESPACE::BaseInStructure * pNext_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pNext = pNext_;
|
|
return *this;
|
|
}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
|
|
|
|
operator VkBaseInStructure const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkBaseInStructure *>( this );
|
|
}
|
|
|
|
operator VkBaseInStructure &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkBaseInStructure *>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
# if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
# else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const struct VULKAN_HPP_NAMESPACE::BaseInStructure * const &>
|
|
# endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( sType, pNext );
|
|
}
|
|
#endif
|
|
|
|
#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
|
|
auto operator<=>( BaseInStructure const & ) const = default;
|
|
#else
|
|
bool operator==( BaseInStructure const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
# if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
# else
|
|
return ( sType == rhs.sType ) && ( pNext == rhs.pNext );
|
|
# endif
|
|
}
|
|
|
|
bool operator!=( BaseInStructure const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::StructureType sType = VULKAN_HPP_NAMESPACE::StructureType::eApplicationInfo;
|
|
const struct VULKAN_HPP_NAMESPACE::BaseInStructure * pNext = {};
|
|
};
|
|
|
|
struct BaseOutStructure
|
|
{
|
|
using NativeType = VkBaseOutStructure;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
BaseOutStructure( VULKAN_HPP_NAMESPACE::StructureType sType_ = VULKAN_HPP_NAMESPACE::StructureType::eApplicationInfo,
|
|
struct VULKAN_HPP_NAMESPACE::BaseOutStructure * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
|
|
: sType( sType_ )
|
|
, pNext( pNext_ )
|
|
{
|
|
}
|
|
|
|
BaseOutStructure( BaseOutStructure const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
BaseOutStructure( VkBaseOutStructure const & rhs ) VULKAN_HPP_NOEXCEPT : BaseOutStructure( *reinterpret_cast<BaseOutStructure const *>( &rhs ) ) {}
|
|
|
|
BaseOutStructure & operator=( BaseOutStructure const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
BaseOutStructure & operator=( VkBaseOutStructure const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::BaseOutStructure const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
|
|
VULKAN_HPP_CONSTEXPR_14 BaseOutStructure & setPNext( struct VULKAN_HPP_NAMESPACE::BaseOutStructure * pNext_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pNext = pNext_;
|
|
return *this;
|
|
}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
|
|
|
|
operator VkBaseOutStructure const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkBaseOutStructure *>( this );
|
|
}
|
|
|
|
operator VkBaseOutStructure &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkBaseOutStructure *>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
# if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
# else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, struct VULKAN_HPP_NAMESPACE::BaseOutStructure * const &>
|
|
# endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( sType, pNext );
|
|
}
|
|
#endif
|
|
|
|
#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
|
|
auto operator<=>( BaseOutStructure const & ) const = default;
|
|
#else
|
|
bool operator==( BaseOutStructure const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
# if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
# else
|
|
return ( sType == rhs.sType ) && ( pNext == rhs.pNext );
|
|
# endif
|
|
}
|
|
|
|
bool operator!=( BaseOutStructure const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::StructureType sType = VULKAN_HPP_NAMESPACE::StructureType::eApplicationInfo;
|
|
struct VULKAN_HPP_NAMESPACE::BaseOutStructure * pNext = {};
|
|
};
|
|
|
|
struct BindBufferMemoryDeviceGroupInfo
|
|
{
|
|
using NativeType = VkBindBufferMemoryDeviceGroupInfo;
|
|
|
|
static const bool allowDuplicate = false;
|
|
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eBindBufferMemoryDeviceGroupInfo;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR BindBufferMemoryDeviceGroupInfo( uint32_t deviceIndexCount_ = {},
|
|
const uint32_t * pDeviceIndices_ = {},
|
|
const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
|
|
: pNext( pNext_ )
|
|
, deviceIndexCount( deviceIndexCount_ )
|
|
, pDeviceIndices( pDeviceIndices_ )
|
|
{
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR BindBufferMemoryDeviceGroupInfo( BindBufferMemoryDeviceGroupInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
BindBufferMemoryDeviceGroupInfo( VkBindBufferMemoryDeviceGroupInfo const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: BindBufferMemoryDeviceGroupInfo( *reinterpret_cast<BindBufferMemoryDeviceGroupInfo const *>( &rhs ) )
|
|
{
|
|
}
|
|
|
|
# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
|
|
BindBufferMemoryDeviceGroupInfo( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const uint32_t> const & deviceIndices_, const void * pNext_ = nullptr )
|
|
: pNext( pNext_ ), deviceIndexCount( static_cast<uint32_t>( deviceIndices_.size() ) ), pDeviceIndices( deviceIndices_.data() )
|
|
{
|
|
}
|
|
# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
|
|
|
|
BindBufferMemoryDeviceGroupInfo & operator=( BindBufferMemoryDeviceGroupInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
BindBufferMemoryDeviceGroupInfo & operator=( VkBindBufferMemoryDeviceGroupInfo const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::BindBufferMemoryDeviceGroupInfo const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
|
|
VULKAN_HPP_CONSTEXPR_14 BindBufferMemoryDeviceGroupInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pNext = pNext_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 BindBufferMemoryDeviceGroupInfo & setDeviceIndexCount( uint32_t deviceIndexCount_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
deviceIndexCount = deviceIndexCount_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 BindBufferMemoryDeviceGroupInfo & setPDeviceIndices( const uint32_t * pDeviceIndices_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pDeviceIndices = pDeviceIndices_;
|
|
return *this;
|
|
}
|
|
|
|
# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
|
|
BindBufferMemoryDeviceGroupInfo &
|
|
setDeviceIndices( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const uint32_t> const & deviceIndices_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
deviceIndexCount = static_cast<uint32_t>( deviceIndices_.size() );
|
|
pDeviceIndices = deviceIndices_.data();
|
|
return *this;
|
|
}
|
|
# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
|
|
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
|
|
|
|
operator VkBindBufferMemoryDeviceGroupInfo const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkBindBufferMemoryDeviceGroupInfo *>( this );
|
|
}
|
|
|
|
operator VkBindBufferMemoryDeviceGroupInfo &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkBindBufferMemoryDeviceGroupInfo *>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
# if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
# else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, uint32_t const &, const uint32_t * const &>
|
|
# endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( sType, pNext, deviceIndexCount, pDeviceIndices );
|
|
}
|
|
#endif
|
|
|
|
#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
|
|
auto operator<=>( BindBufferMemoryDeviceGroupInfo const & ) const = default;
|
|
#else
|
|
bool operator==( BindBufferMemoryDeviceGroupInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
# if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
# else
|
|
return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( deviceIndexCount == rhs.deviceIndexCount ) && ( pDeviceIndices == rhs.pDeviceIndices );
|
|
# endif
|
|
}
|
|
|
|
bool operator!=( BindBufferMemoryDeviceGroupInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eBindBufferMemoryDeviceGroupInfo;
|
|
const void * pNext = {};
|
|
uint32_t deviceIndexCount = {};
|
|
const uint32_t * pDeviceIndices = {};
|
|
};
|
|
|
|
template <>
|
|
struct CppType<StructureType, StructureType::eBindBufferMemoryDeviceGroupInfo>
|
|
{
|
|
using Type = BindBufferMemoryDeviceGroupInfo;
|
|
};
|
|
|
|
using BindBufferMemoryDeviceGroupInfoKHR = BindBufferMemoryDeviceGroupInfo;
|
|
|
|
struct BindBufferMemoryInfo
|
|
{
|
|
using NativeType = VkBindBufferMemoryInfo;
|
|
|
|
static const bool allowDuplicate = false;
|
|
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eBindBufferMemoryInfo;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR BindBufferMemoryInfo( VULKAN_HPP_NAMESPACE::Buffer buffer_ = {},
|
|
VULKAN_HPP_NAMESPACE::DeviceMemory memory_ = {},
|
|
VULKAN_HPP_NAMESPACE::DeviceSize memoryOffset_ = {},
|
|
const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
|
|
: pNext( pNext_ )
|
|
, buffer( buffer_ )
|
|
, memory( memory_ )
|
|
, memoryOffset( memoryOffset_ )
|
|
{
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR BindBufferMemoryInfo( BindBufferMemoryInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
BindBufferMemoryInfo( VkBindBufferMemoryInfo const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: BindBufferMemoryInfo( *reinterpret_cast<BindBufferMemoryInfo const *>( &rhs ) )
|
|
{
|
|
}
|
|
|
|
BindBufferMemoryInfo & operator=( BindBufferMemoryInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
BindBufferMemoryInfo & operator=( VkBindBufferMemoryInfo const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::BindBufferMemoryInfo const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
|
|
VULKAN_HPP_CONSTEXPR_14 BindBufferMemoryInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pNext = pNext_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 BindBufferMemoryInfo & setBuffer( VULKAN_HPP_NAMESPACE::Buffer buffer_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
buffer = buffer_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 BindBufferMemoryInfo & setMemory( VULKAN_HPP_NAMESPACE::DeviceMemory memory_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
memory = memory_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 BindBufferMemoryInfo & setMemoryOffset( VULKAN_HPP_NAMESPACE::DeviceSize memoryOffset_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
memoryOffset = memoryOffset_;
|
|
return *this;
|
|
}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
|
|
|
|
operator VkBindBufferMemoryInfo const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkBindBufferMemoryInfo *>( this );
|
|
}
|
|
|
|
operator VkBindBufferMemoryInfo &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkBindBufferMemoryInfo *>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
# if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
# else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &,
|
|
const void * const &,
|
|
VULKAN_HPP_NAMESPACE::Buffer const &,
|
|
VULKAN_HPP_NAMESPACE::DeviceMemory const &,
|
|
VULKAN_HPP_NAMESPACE::DeviceSize const &>
|
|
# endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( sType, pNext, buffer, memory, memoryOffset );
|
|
}
|
|
#endif
|
|
|
|
#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
|
|
auto operator<=>( BindBufferMemoryInfo const & ) const = default;
|
|
#else
|
|
bool operator==( BindBufferMemoryInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
# if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
# else
|
|
return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( buffer == rhs.buffer ) && ( memory == rhs.memory ) && ( memoryOffset == rhs.memoryOffset );
|
|
# endif
|
|
}
|
|
|
|
bool operator!=( BindBufferMemoryInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eBindBufferMemoryInfo;
|
|
const void * pNext = {};
|
|
VULKAN_HPP_NAMESPACE::Buffer buffer = {};
|
|
VULKAN_HPP_NAMESPACE::DeviceMemory memory = {};
|
|
VULKAN_HPP_NAMESPACE::DeviceSize memoryOffset = {};
|
|
};
|
|
|
|
template <>
|
|
struct CppType<StructureType, StructureType::eBindBufferMemoryInfo>
|
|
{
|
|
using Type = BindBufferMemoryInfo;
|
|
};
|
|
|
|
using BindBufferMemoryInfoKHR = BindBufferMemoryInfo;
|
|
|
|
struct Offset2D
|
|
{
|
|
using NativeType = VkOffset2D;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR Offset2D( int32_t x_ = {}, int32_t y_ = {} ) VULKAN_HPP_NOEXCEPT
|
|
: x( x_ )
|
|
, y( y_ )
|
|
{
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR Offset2D( Offset2D const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
Offset2D( VkOffset2D const & rhs ) VULKAN_HPP_NOEXCEPT : Offset2D( *reinterpret_cast<Offset2D const *>( &rhs ) ) {}
|
|
|
|
Offset2D & operator=( Offset2D const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
Offset2D & operator=( VkOffset2D const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::Offset2D const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
|
|
VULKAN_HPP_CONSTEXPR_14 Offset2D & setX( int32_t x_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
x = x_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 Offset2D & setY( int32_t y_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
y = y_;
|
|
return *this;
|
|
}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
|
|
|
|
operator VkOffset2D const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkOffset2D *>( this );
|
|
}
|
|
|
|
operator VkOffset2D &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkOffset2D *>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
# if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
# else
|
|
std::tuple<int32_t const &, int32_t const &>
|
|
# endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( x, y );
|
|
}
|
|
#endif
|
|
|
|
#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
|
|
auto operator<=>( Offset2D const & ) const = default;
|
|
#else
|
|
bool operator==( Offset2D const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
# if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
# else
|
|
return ( x == rhs.x ) && ( y == rhs.y );
|
|
# endif
|
|
}
|
|
|
|
bool operator!=( Offset2D const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
int32_t x = {};
|
|
int32_t y = {};
|
|
};
|
|
|
|
struct Rect2D
|
|
{
|
|
using NativeType = VkRect2D;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR Rect2D( VULKAN_HPP_NAMESPACE::Offset2D offset_ = {}, VULKAN_HPP_NAMESPACE::Extent2D extent_ = {} ) VULKAN_HPP_NOEXCEPT
|
|
: offset( offset_ )
|
|
, extent( extent_ )
|
|
{
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR Rect2D( Rect2D const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
Rect2D( VkRect2D const & rhs ) VULKAN_HPP_NOEXCEPT : Rect2D( *reinterpret_cast<Rect2D const *>( &rhs ) ) {}
|
|
|
|
Rect2D & operator=( Rect2D const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
Rect2D & operator=( VkRect2D const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::Rect2D const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
|
|
VULKAN_HPP_CONSTEXPR_14 Rect2D & setOffset( VULKAN_HPP_NAMESPACE::Offset2D const & offset_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
offset = offset_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 Rect2D & setExtent( VULKAN_HPP_NAMESPACE::Extent2D const & extent_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
extent = extent_;
|
|
return *this;
|
|
}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
|
|
|
|
operator VkRect2D const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkRect2D *>( this );
|
|
}
|
|
|
|
operator VkRect2D &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkRect2D *>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
# if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
# else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::Offset2D const &, VULKAN_HPP_NAMESPACE::Extent2D const &>
|
|
# endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( offset, extent );
|
|
}
|
|
#endif
|
|
|
|
#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
|
|
auto operator<=>( Rect2D const & ) const = default;
|
|
#else
|
|
bool operator==( Rect2D const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
# if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
# else
|
|
return ( offset == rhs.offset ) && ( extent == rhs.extent );
|
|
# endif
|
|
}
|
|
|
|
bool operator!=( Rect2D const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::Offset2D offset = {};
|
|
VULKAN_HPP_NAMESPACE::Extent2D extent = {};
|
|
};
|
|
|
|
struct BindImageMemoryDeviceGroupInfo
|
|
{
|
|
using NativeType = VkBindImageMemoryDeviceGroupInfo;
|
|
|
|
static const bool allowDuplicate = false;
|
|
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eBindImageMemoryDeviceGroupInfo;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR BindImageMemoryDeviceGroupInfo( uint32_t deviceIndexCount_ = {},
|
|
const uint32_t * pDeviceIndices_ = {},
|
|
uint32_t splitInstanceBindRegionCount_ = {},
|
|
const VULKAN_HPP_NAMESPACE::Rect2D * pSplitInstanceBindRegions_ = {},
|
|
const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
|
|
: pNext( pNext_ )
|
|
, deviceIndexCount( deviceIndexCount_ )
|
|
, pDeviceIndices( pDeviceIndices_ )
|
|
, splitInstanceBindRegionCount( splitInstanceBindRegionCount_ )
|
|
, pSplitInstanceBindRegions( pSplitInstanceBindRegions_ )
|
|
{
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR BindImageMemoryDeviceGroupInfo( BindImageMemoryDeviceGroupInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
BindImageMemoryDeviceGroupInfo( VkBindImageMemoryDeviceGroupInfo const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: BindImageMemoryDeviceGroupInfo( *reinterpret_cast<BindImageMemoryDeviceGroupInfo const *>( &rhs ) )
|
|
{
|
|
}
|
|
|
|
# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
|
|
BindImageMemoryDeviceGroupInfo( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const uint32_t> const & deviceIndices_,
|
|
VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::Rect2D> const & splitInstanceBindRegions_ = {},
|
|
const void * pNext_ = nullptr )
|
|
: pNext( pNext_ )
|
|
, deviceIndexCount( static_cast<uint32_t>( deviceIndices_.size() ) )
|
|
, pDeviceIndices( deviceIndices_.data() )
|
|
, splitInstanceBindRegionCount( static_cast<uint32_t>( splitInstanceBindRegions_.size() ) )
|
|
, pSplitInstanceBindRegions( splitInstanceBindRegions_.data() )
|
|
{
|
|
}
|
|
# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
|
|
|
|
BindImageMemoryDeviceGroupInfo & operator=( BindImageMemoryDeviceGroupInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
BindImageMemoryDeviceGroupInfo & operator=( VkBindImageMemoryDeviceGroupInfo const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::BindImageMemoryDeviceGroupInfo const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
|
|
VULKAN_HPP_CONSTEXPR_14 BindImageMemoryDeviceGroupInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pNext = pNext_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 BindImageMemoryDeviceGroupInfo & setDeviceIndexCount( uint32_t deviceIndexCount_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
deviceIndexCount = deviceIndexCount_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 BindImageMemoryDeviceGroupInfo & setPDeviceIndices( const uint32_t * pDeviceIndices_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pDeviceIndices = pDeviceIndices_;
|
|
return *this;
|
|
}
|
|
|
|
# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
|
|
BindImageMemoryDeviceGroupInfo &
|
|
setDeviceIndices( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const uint32_t> const & deviceIndices_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
deviceIndexCount = static_cast<uint32_t>( deviceIndices_.size() );
|
|
pDeviceIndices = deviceIndices_.data();
|
|
return *this;
|
|
}
|
|
# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 BindImageMemoryDeviceGroupInfo & setSplitInstanceBindRegionCount( uint32_t splitInstanceBindRegionCount_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
splitInstanceBindRegionCount = splitInstanceBindRegionCount_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 BindImageMemoryDeviceGroupInfo &
|
|
setPSplitInstanceBindRegions( const VULKAN_HPP_NAMESPACE::Rect2D * pSplitInstanceBindRegions_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pSplitInstanceBindRegions = pSplitInstanceBindRegions_;
|
|
return *this;
|
|
}
|
|
|
|
# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
|
|
BindImageMemoryDeviceGroupInfo & setSplitInstanceBindRegions(
|
|
VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::Rect2D> const & splitInstanceBindRegions_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
splitInstanceBindRegionCount = static_cast<uint32_t>( splitInstanceBindRegions_.size() );
|
|
pSplitInstanceBindRegions = splitInstanceBindRegions_.data();
|
|
return *this;
|
|
}
|
|
# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
|
|
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
|
|
|
|
operator VkBindImageMemoryDeviceGroupInfo const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkBindImageMemoryDeviceGroupInfo *>( this );
|
|
}
|
|
|
|
operator VkBindImageMemoryDeviceGroupInfo &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkBindImageMemoryDeviceGroupInfo *>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
# if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
# else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &,
|
|
const void * const &,
|
|
uint32_t const &,
|
|
const uint32_t * const &,
|
|
uint32_t const &,
|
|
const VULKAN_HPP_NAMESPACE::Rect2D * const &>
|
|
# endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( sType, pNext, deviceIndexCount, pDeviceIndices, splitInstanceBindRegionCount, pSplitInstanceBindRegions );
|
|
}
|
|
#endif
|
|
|
|
#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
|
|
auto operator<=>( BindImageMemoryDeviceGroupInfo const & ) const = default;
|
|
#else
|
|
bool operator==( BindImageMemoryDeviceGroupInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
# if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
# else
|
|
return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( deviceIndexCount == rhs.deviceIndexCount ) && ( pDeviceIndices == rhs.pDeviceIndices ) &&
|
|
( splitInstanceBindRegionCount == rhs.splitInstanceBindRegionCount ) && ( pSplitInstanceBindRegions == rhs.pSplitInstanceBindRegions );
|
|
# endif
|
|
}
|
|
|
|
bool operator!=( BindImageMemoryDeviceGroupInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eBindImageMemoryDeviceGroupInfo;
|
|
const void * pNext = {};
|
|
uint32_t deviceIndexCount = {};
|
|
const uint32_t * pDeviceIndices = {};
|
|
uint32_t splitInstanceBindRegionCount = {};
|
|
const VULKAN_HPP_NAMESPACE::Rect2D * pSplitInstanceBindRegions = {};
|
|
};
|
|
|
|
template <>
|
|
struct CppType<StructureType, StructureType::eBindImageMemoryDeviceGroupInfo>
|
|
{
|
|
using Type = BindImageMemoryDeviceGroupInfo;
|
|
};
|
|
|
|
using BindImageMemoryDeviceGroupInfoKHR = BindImageMemoryDeviceGroupInfo;
|
|
|
|
struct BindImageMemoryInfo
|
|
{
|
|
using NativeType = VkBindImageMemoryInfo;
|
|
|
|
static const bool allowDuplicate = false;
|
|
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eBindImageMemoryInfo;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR BindImageMemoryInfo( VULKAN_HPP_NAMESPACE::Image image_ = {},
|
|
VULKAN_HPP_NAMESPACE::DeviceMemory memory_ = {},
|
|
VULKAN_HPP_NAMESPACE::DeviceSize memoryOffset_ = {},
|
|
const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
|
|
: pNext( pNext_ )
|
|
, image( image_ )
|
|
, memory( memory_ )
|
|
, memoryOffset( memoryOffset_ )
|
|
{
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR BindImageMemoryInfo( BindImageMemoryInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
BindImageMemoryInfo( VkBindImageMemoryInfo const & rhs ) VULKAN_HPP_NOEXCEPT : BindImageMemoryInfo( *reinterpret_cast<BindImageMemoryInfo const *>( &rhs ) )
|
|
{
|
|
}
|
|
|
|
BindImageMemoryInfo & operator=( BindImageMemoryInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
BindImageMemoryInfo & operator=( VkBindImageMemoryInfo const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::BindImageMemoryInfo const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
|
|
VULKAN_HPP_CONSTEXPR_14 BindImageMemoryInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pNext = pNext_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 BindImageMemoryInfo & setImage( VULKAN_HPP_NAMESPACE::Image image_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
image = image_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 BindImageMemoryInfo & setMemory( VULKAN_HPP_NAMESPACE::DeviceMemory memory_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
memory = memory_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 BindImageMemoryInfo & setMemoryOffset( VULKAN_HPP_NAMESPACE::DeviceSize memoryOffset_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
memoryOffset = memoryOffset_;
|
|
return *this;
|
|
}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
|
|
|
|
operator VkBindImageMemoryInfo const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkBindImageMemoryInfo *>( this );
|
|
}
|
|
|
|
operator VkBindImageMemoryInfo &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkBindImageMemoryInfo *>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
# if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
# else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &,
|
|
const void * const &,
|
|
VULKAN_HPP_NAMESPACE::Image const &,
|
|
VULKAN_HPP_NAMESPACE::DeviceMemory const &,
|
|
VULKAN_HPP_NAMESPACE::DeviceSize const &>
|
|
# endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( sType, pNext, image, memory, memoryOffset );
|
|
}
|
|
#endif
|
|
|
|
#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
|
|
auto operator<=>( BindImageMemoryInfo const & ) const = default;
|
|
#else
|
|
bool operator==( BindImageMemoryInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
# if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
# else
|
|
return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( image == rhs.image ) && ( memory == rhs.memory ) && ( memoryOffset == rhs.memoryOffset );
|
|
# endif
|
|
}
|
|
|
|
bool operator!=( BindImageMemoryInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eBindImageMemoryInfo;
|
|
const void * pNext = {};
|
|
VULKAN_HPP_NAMESPACE::Image image = {};
|
|
VULKAN_HPP_NAMESPACE::DeviceMemory memory = {};
|
|
VULKAN_HPP_NAMESPACE::DeviceSize memoryOffset = {};
|
|
};
|
|
|
|
template <>
|
|
struct CppType<StructureType, StructureType::eBindImageMemoryInfo>
|
|
{
|
|
using Type = BindImageMemoryInfo;
|
|
};
|
|
|
|
using BindImageMemoryInfoKHR = BindImageMemoryInfo;
|
|
|
|
struct BindImageMemorySwapchainInfoKHR
|
|
{
|
|
using NativeType = VkBindImageMemorySwapchainInfoKHR;
|
|
|
|
static const bool allowDuplicate = false;
|
|
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eBindImageMemorySwapchainInfoKHR;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR BindImageMemorySwapchainInfoKHR( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain_ = {},
|
|
uint32_t imageIndex_ = {},
|
|
const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
|
|
: pNext( pNext_ )
|
|
, swapchain( swapchain_ )
|
|
, imageIndex( imageIndex_ )
|
|
{
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR BindImageMemorySwapchainInfoKHR( BindImageMemorySwapchainInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
BindImageMemorySwapchainInfoKHR( VkBindImageMemorySwapchainInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: BindImageMemorySwapchainInfoKHR( *reinterpret_cast<BindImageMemorySwapchainInfoKHR const *>( &rhs ) )
|
|
{
|
|
}
|
|
|
|
BindImageMemorySwapchainInfoKHR & operator=( BindImageMemorySwapchainInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
BindImageMemorySwapchainInfoKHR & operator=( VkBindImageMemorySwapchainInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::BindImageMemorySwapchainInfoKHR const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
|
|
VULKAN_HPP_CONSTEXPR_14 BindImageMemorySwapchainInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pNext = pNext_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 BindImageMemorySwapchainInfoKHR & setSwapchain( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
swapchain = swapchain_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 BindImageMemorySwapchainInfoKHR & setImageIndex( uint32_t imageIndex_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
imageIndex = imageIndex_;
|
|
return *this;
|
|
}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
|
|
|
|
operator VkBindImageMemorySwapchainInfoKHR const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkBindImageMemorySwapchainInfoKHR *>( this );
|
|
}
|
|
|
|
operator VkBindImageMemorySwapchainInfoKHR &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkBindImageMemorySwapchainInfoKHR *>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
# if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
# else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::SwapchainKHR const &, uint32_t const &>
|
|
# endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( sType, pNext, swapchain, imageIndex );
|
|
}
|
|
#endif
|
|
|
|
#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
|
|
auto operator<=>( BindImageMemorySwapchainInfoKHR const & ) const = default;
|
|
#else
|
|
bool operator==( BindImageMemorySwapchainInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
# if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
# else
|
|
return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( swapchain == rhs.swapchain ) && ( imageIndex == rhs.imageIndex );
|
|
# endif
|
|
}
|
|
|
|
bool operator!=( BindImageMemorySwapchainInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eBindImageMemorySwapchainInfoKHR;
|
|
const void * pNext = {};
|
|
VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain = {};
|
|
uint32_t imageIndex = {};
|
|
};
|
|
|
|
template <>
|
|
struct CppType<StructureType, StructureType::eBindImageMemorySwapchainInfoKHR>
|
|
{
|
|
using Type = BindImageMemorySwapchainInfoKHR;
|
|
};
|
|
|
|
struct BindImagePlaneMemoryInfo
|
|
{
|
|
using NativeType = VkBindImagePlaneMemoryInfo;
|
|
|
|
static const bool allowDuplicate = false;
|
|
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eBindImagePlaneMemoryInfo;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR BindImagePlaneMemoryInfo( VULKAN_HPP_NAMESPACE::ImageAspectFlagBits planeAspect_ = VULKAN_HPP_NAMESPACE::ImageAspectFlagBits::eColor,
|
|
const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
|
|
: pNext( pNext_ )
|
|
, planeAspect( planeAspect_ )
|
|
{
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR BindImagePlaneMemoryInfo( BindImagePlaneMemoryInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
BindImagePlaneMemoryInfo( VkBindImagePlaneMemoryInfo const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: BindImagePlaneMemoryInfo( *reinterpret_cast<BindImagePlaneMemoryInfo const *>( &rhs ) )
|
|
{
|
|
}
|
|
|
|
BindImagePlaneMemoryInfo & operator=( BindImagePlaneMemoryInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
BindImagePlaneMemoryInfo & operator=( VkBindImagePlaneMemoryInfo const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::BindImagePlaneMemoryInfo const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
|
|
VULKAN_HPP_CONSTEXPR_14 BindImagePlaneMemoryInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pNext = pNext_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 BindImagePlaneMemoryInfo & setPlaneAspect( VULKAN_HPP_NAMESPACE::ImageAspectFlagBits planeAspect_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
planeAspect = planeAspect_;
|
|
return *this;
|
|
}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
|
|
|
|
operator VkBindImagePlaneMemoryInfo const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkBindImagePlaneMemoryInfo *>( this );
|
|
}
|
|
|
|
operator VkBindImagePlaneMemoryInfo &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkBindImagePlaneMemoryInfo *>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
# if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
# else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::ImageAspectFlagBits const &>
|
|
# endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( sType, pNext, planeAspect );
|
|
}
|
|
#endif
|
|
|
|
#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
|
|
auto operator<=>( BindImagePlaneMemoryInfo const & ) const = default;
|
|
#else
|
|
bool operator==( BindImagePlaneMemoryInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
# if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
# else
|
|
return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( planeAspect == rhs.planeAspect );
|
|
# endif
|
|
}
|
|
|
|
bool operator!=( BindImagePlaneMemoryInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eBindImagePlaneMemoryInfo;
|
|
const void * pNext = {};
|
|
VULKAN_HPP_NAMESPACE::ImageAspectFlagBits planeAspect = VULKAN_HPP_NAMESPACE::ImageAspectFlagBits::eColor;
|
|
};
|
|
|
|
template <>
|
|
struct CppType<StructureType, StructureType::eBindImagePlaneMemoryInfo>
|
|
{
|
|
using Type = BindImagePlaneMemoryInfo;
|
|
};
|
|
|
|
using BindImagePlaneMemoryInfoKHR = BindImagePlaneMemoryInfo;
|
|
|
|
struct ImageSubresourceLayers
|
|
{
|
|
using NativeType = VkImageSubresourceLayers;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR ImageSubresourceLayers( VULKAN_HPP_NAMESPACE::ImageAspectFlags aspectMask_ = {},
|
|
uint32_t mipLevel_ = {},
|
|
uint32_t baseArrayLayer_ = {},
|
|
uint32_t layerCount_ = {} ) VULKAN_HPP_NOEXCEPT
|
|
: aspectMask( aspectMask_ )
|
|
, mipLevel( mipLevel_ )
|
|
, baseArrayLayer( baseArrayLayer_ )
|
|
, layerCount( layerCount_ )
|
|
{
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR ImageSubresourceLayers( ImageSubresourceLayers const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
ImageSubresourceLayers( VkImageSubresourceLayers const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: ImageSubresourceLayers( *reinterpret_cast<ImageSubresourceLayers const *>( &rhs ) )
|
|
{
|
|
}
|
|
|
|
ImageSubresourceLayers & operator=( ImageSubresourceLayers const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
ImageSubresourceLayers & operator=( VkImageSubresourceLayers const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ImageSubresourceLayers const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
|
|
VULKAN_HPP_CONSTEXPR_14 ImageSubresourceLayers & setAspectMask( VULKAN_HPP_NAMESPACE::ImageAspectFlags aspectMask_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
aspectMask = aspectMask_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 ImageSubresourceLayers & setMipLevel( uint32_t mipLevel_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
mipLevel = mipLevel_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 ImageSubresourceLayers & setBaseArrayLayer( uint32_t baseArrayLayer_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
baseArrayLayer = baseArrayLayer_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 ImageSubresourceLayers & setLayerCount( uint32_t layerCount_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
layerCount = layerCount_;
|
|
return *this;
|
|
}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
|
|
|
|
operator VkImageSubresourceLayers const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkImageSubresourceLayers *>( this );
|
|
}
|
|
|
|
operator VkImageSubresourceLayers &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkImageSubresourceLayers *>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
# if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
# else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::ImageAspectFlags const &, uint32_t const &, uint32_t const &, uint32_t const &>
|
|
# endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( aspectMask, mipLevel, baseArrayLayer, layerCount );
|
|
}
|
|
#endif
|
|
|
|
#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
|
|
auto operator<=>( ImageSubresourceLayers const & ) const = default;
|
|
#else
|
|
bool operator==( ImageSubresourceLayers const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
# if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
# else
|
|
return ( aspectMask == rhs.aspectMask ) && ( mipLevel == rhs.mipLevel ) && ( baseArrayLayer == rhs.baseArrayLayer ) && ( layerCount == rhs.layerCount );
|
|
# endif
|
|
}
|
|
|
|
bool operator!=( ImageSubresourceLayers const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::ImageAspectFlags aspectMask = {};
|
|
uint32_t mipLevel = {};
|
|
uint32_t baseArrayLayer = {};
|
|
uint32_t layerCount = {};
|
|
};
|
|
|
|
struct Offset3D
|
|
{
|
|
using NativeType = VkOffset3D;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR Offset3D( int32_t x_ = {}, int32_t y_ = {}, int32_t z_ = {} ) VULKAN_HPP_NOEXCEPT
|
|
: x( x_ )
|
|
, y( y_ )
|
|
, z( z_ )
|
|
{
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR Offset3D( Offset3D const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
Offset3D( VkOffset3D const & rhs ) VULKAN_HPP_NOEXCEPT : Offset3D( *reinterpret_cast<Offset3D const *>( &rhs ) ) {}
|
|
|
|
explicit Offset3D( Offset2D const & offset2D, int32_t z_ = {} ) : x( offset2D.x ), y( offset2D.y ), z( z_ ) {}
|
|
|
|
Offset3D & operator=( Offset3D const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
Offset3D & operator=( VkOffset3D const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::Offset3D const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
|
|
VULKAN_HPP_CONSTEXPR_14 Offset3D & setX( int32_t x_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
x = x_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 Offset3D & setY( int32_t y_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
y = y_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 Offset3D & setZ( int32_t z_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
z = z_;
|
|
return *this;
|
|
}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
|
|
|
|
operator VkOffset3D const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkOffset3D *>( this );
|
|
}
|
|
|
|
operator VkOffset3D &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkOffset3D *>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
# if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
# else
|
|
std::tuple<int32_t const &, int32_t const &, int32_t const &>
|
|
# endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( x, y, z );
|
|
}
|
|
#endif
|
|
|
|
#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
|
|
auto operator<=>( Offset3D const & ) const = default;
|
|
#else
|
|
bool operator==( Offset3D const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
# if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
# else
|
|
return ( x == rhs.x ) && ( y == rhs.y ) && ( z == rhs.z );
|
|
# endif
|
|
}
|
|
|
|
bool operator!=( Offset3D const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
int32_t x = {};
|
|
int32_t y = {};
|
|
int32_t z = {};
|
|
};
|
|
|
|
struct ImageBlit2
|
|
{
|
|
using NativeType = VkImageBlit2;
|
|
|
|
static const bool allowDuplicate = false;
|
|
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eImageBlit2;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR_14 ImageBlit2( VULKAN_HPP_NAMESPACE::ImageSubresourceLayers srcSubresource_ = {},
|
|
std::array<VULKAN_HPP_NAMESPACE::Offset3D, 2> const & srcOffsets_ = {},
|
|
VULKAN_HPP_NAMESPACE::ImageSubresourceLayers dstSubresource_ = {},
|
|
std::array<VULKAN_HPP_NAMESPACE::Offset3D, 2> const & dstOffsets_ = {},
|
|
const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
|
|
: pNext( pNext_ )
|
|
, srcSubresource( srcSubresource_ )
|
|
, srcOffsets( srcOffsets_ )
|
|
, dstSubresource( dstSubresource_ )
|
|
, dstOffsets( dstOffsets_ )
|
|
{
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 ImageBlit2( ImageBlit2 const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
ImageBlit2( VkImageBlit2 const & rhs ) VULKAN_HPP_NOEXCEPT : ImageBlit2( *reinterpret_cast<ImageBlit2 const *>( &rhs ) ) {}
|
|
|
|
ImageBlit2 & operator=( ImageBlit2 const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
ImageBlit2 & operator=( VkImageBlit2 const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ImageBlit2 const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
|
|
VULKAN_HPP_CONSTEXPR_14 ImageBlit2 & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pNext = pNext_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 ImageBlit2 & setSrcSubresource( VULKAN_HPP_NAMESPACE::ImageSubresourceLayers const & srcSubresource_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
srcSubresource = srcSubresource_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 ImageBlit2 & setSrcOffsets( std::array<VULKAN_HPP_NAMESPACE::Offset3D, 2> const & srcOffsets_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
srcOffsets = srcOffsets_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 ImageBlit2 & setDstSubresource( VULKAN_HPP_NAMESPACE::ImageSubresourceLayers const & dstSubresource_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
dstSubresource = dstSubresource_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 ImageBlit2 & setDstOffsets( std::array<VULKAN_HPP_NAMESPACE::Offset3D, 2> const & dstOffsets_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
dstOffsets = dstOffsets_;
|
|
return *this;
|
|
}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
|
|
|
|
operator VkImageBlit2 const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkImageBlit2 *>( this );
|
|
}
|
|
|
|
operator VkImageBlit2 &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkImageBlit2 *>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
# if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
# else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &,
|
|
const void * const &,
|
|
VULKAN_HPP_NAMESPACE::ImageSubresourceLayers const &,
|
|
VULKAN_HPP_NAMESPACE::ArrayWrapper1D<VULKAN_HPP_NAMESPACE::Offset3D, 2> const &,
|
|
VULKAN_HPP_NAMESPACE::ImageSubresourceLayers const &,
|
|
VULKAN_HPP_NAMESPACE::ArrayWrapper1D<VULKAN_HPP_NAMESPACE::Offset3D, 2> const &>
|
|
# endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( sType, pNext, srcSubresource, srcOffsets, dstSubresource, dstOffsets );
|
|
}
|
|
#endif
|
|
|
|
#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
|
|
auto operator<=>( ImageBlit2 const & ) const = default;
|
|
#else
|
|
bool operator==( ImageBlit2 const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
# if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
# else
|
|
return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( srcSubresource == rhs.srcSubresource ) && ( srcOffsets == rhs.srcOffsets ) &&
|
|
( dstSubresource == rhs.dstSubresource ) && ( dstOffsets == rhs.dstOffsets );
|
|
# endif
|
|
}
|
|
|
|
bool operator!=( ImageBlit2 const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eImageBlit2;
|
|
const void * pNext = {};
|
|
VULKAN_HPP_NAMESPACE::ImageSubresourceLayers srcSubresource = {};
|
|
VULKAN_HPP_NAMESPACE::ArrayWrapper1D<VULKAN_HPP_NAMESPACE::Offset3D, 2> srcOffsets = {};
|
|
VULKAN_HPP_NAMESPACE::ImageSubresourceLayers dstSubresource = {};
|
|
VULKAN_HPP_NAMESPACE::ArrayWrapper1D<VULKAN_HPP_NAMESPACE::Offset3D, 2> dstOffsets = {};
|
|
};
|
|
|
|
template <>
|
|
struct CppType<StructureType, StructureType::eImageBlit2>
|
|
{
|
|
using Type = ImageBlit2;
|
|
};
|
|
|
|
using ImageBlit2KHR = ImageBlit2;
|
|
|
|
struct BlitImageInfo2
|
|
{
|
|
using NativeType = VkBlitImageInfo2;
|
|
|
|
static const bool allowDuplicate = false;
|
|
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eBlitImageInfo2;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR_14 BlitImageInfo2( VULKAN_HPP_NAMESPACE::Image srcImage_ = {},
|
|
VULKAN_HPP_NAMESPACE::ImageLayout srcImageLayout_ = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined,
|
|
VULKAN_HPP_NAMESPACE::Image dstImage_ = {},
|
|
VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout_ = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined,
|
|
uint32_t regionCount_ = {},
|
|
const VULKAN_HPP_NAMESPACE::ImageBlit2 * pRegions_ = {},
|
|
VULKAN_HPP_NAMESPACE::Filter filter_ = VULKAN_HPP_NAMESPACE::Filter::eNearest,
|
|
const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
|
|
: pNext( pNext_ )
|
|
, srcImage( srcImage_ )
|
|
, srcImageLayout( srcImageLayout_ )
|
|
, dstImage( dstImage_ )
|
|
, dstImageLayout( dstImageLayout_ )
|
|
, regionCount( regionCount_ )
|
|
, pRegions( pRegions_ )
|
|
, filter( filter_ )
|
|
{
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 BlitImageInfo2( BlitImageInfo2 const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
BlitImageInfo2( VkBlitImageInfo2 const & rhs ) VULKAN_HPP_NOEXCEPT : BlitImageInfo2( *reinterpret_cast<BlitImageInfo2 const *>( &rhs ) ) {}
|
|
|
|
# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
|
|
BlitImageInfo2( VULKAN_HPP_NAMESPACE::Image srcImage_,
|
|
VULKAN_HPP_NAMESPACE::ImageLayout srcImageLayout_,
|
|
VULKAN_HPP_NAMESPACE::Image dstImage_,
|
|
VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout_,
|
|
VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::ImageBlit2> const & regions_,
|
|
VULKAN_HPP_NAMESPACE::Filter filter_ = VULKAN_HPP_NAMESPACE::Filter::eNearest,
|
|
const void * pNext_ = nullptr )
|
|
: pNext( pNext_ )
|
|
, srcImage( srcImage_ )
|
|
, srcImageLayout( srcImageLayout_ )
|
|
, dstImage( dstImage_ )
|
|
, dstImageLayout( dstImageLayout_ )
|
|
, regionCount( static_cast<uint32_t>( regions_.size() ) )
|
|
, pRegions( regions_.data() )
|
|
, filter( filter_ )
|
|
{
|
|
}
|
|
# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
|
|
|
|
BlitImageInfo2 & operator=( BlitImageInfo2 const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
BlitImageInfo2 & operator=( VkBlitImageInfo2 const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::BlitImageInfo2 const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
|
|
VULKAN_HPP_CONSTEXPR_14 BlitImageInfo2 & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pNext = pNext_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 BlitImageInfo2 & setSrcImage( VULKAN_HPP_NAMESPACE::Image srcImage_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
srcImage = srcImage_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 BlitImageInfo2 & setSrcImageLayout( VULKAN_HPP_NAMESPACE::ImageLayout srcImageLayout_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
srcImageLayout = srcImageLayout_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 BlitImageInfo2 & setDstImage( VULKAN_HPP_NAMESPACE::Image dstImage_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
dstImage = dstImage_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 BlitImageInfo2 & setDstImageLayout( VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
dstImageLayout = dstImageLayout_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 BlitImageInfo2 & setRegionCount( uint32_t regionCount_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
regionCount = regionCount_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 BlitImageInfo2 & setPRegions( const VULKAN_HPP_NAMESPACE::ImageBlit2 * pRegions_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pRegions = pRegions_;
|
|
return *this;
|
|
}
|
|
|
|
# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
|
|
BlitImageInfo2 & setRegions( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::ImageBlit2> const & regions_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
regionCount = static_cast<uint32_t>( regions_.size() );
|
|
pRegions = regions_.data();
|
|
return *this;
|
|
}
|
|
# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 BlitImageInfo2 & setFilter( VULKAN_HPP_NAMESPACE::Filter filter_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
filter = filter_;
|
|
return *this;
|
|
}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
|
|
|
|
operator VkBlitImageInfo2 const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkBlitImageInfo2 *>( this );
|
|
}
|
|
|
|
operator VkBlitImageInfo2 &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkBlitImageInfo2 *>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
# if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
# else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &,
|
|
const void * const &,
|
|
VULKAN_HPP_NAMESPACE::Image const &,
|
|
VULKAN_HPP_NAMESPACE::ImageLayout const &,
|
|
VULKAN_HPP_NAMESPACE::Image const &,
|
|
VULKAN_HPP_NAMESPACE::ImageLayout const &,
|
|
uint32_t const &,
|
|
const VULKAN_HPP_NAMESPACE::ImageBlit2 * const &,
|
|
VULKAN_HPP_NAMESPACE::Filter const &>
|
|
# endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( sType, pNext, srcImage, srcImageLayout, dstImage, dstImageLayout, regionCount, pRegions, filter );
|
|
}
|
|
#endif
|
|
|
|
#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
|
|
auto operator<=>( BlitImageInfo2 const & ) const = default;
|
|
#else
|
|
bool operator==( BlitImageInfo2 const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
# if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
# else
|
|
return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( srcImage == rhs.srcImage ) && ( srcImageLayout == rhs.srcImageLayout ) &&
|
|
( dstImage == rhs.dstImage ) && ( dstImageLayout == rhs.dstImageLayout ) && ( regionCount == rhs.regionCount ) && ( pRegions == rhs.pRegions ) &&
|
|
( filter == rhs.filter );
|
|
# endif
|
|
}
|
|
|
|
bool operator!=( BlitImageInfo2 const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eBlitImageInfo2;
|
|
const void * pNext = {};
|
|
VULKAN_HPP_NAMESPACE::Image srcImage = {};
|
|
VULKAN_HPP_NAMESPACE::ImageLayout srcImageLayout = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined;
|
|
VULKAN_HPP_NAMESPACE::Image dstImage = {};
|
|
VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined;
|
|
uint32_t regionCount = {};
|
|
const VULKAN_HPP_NAMESPACE::ImageBlit2 * pRegions = {};
|
|
VULKAN_HPP_NAMESPACE::Filter filter = VULKAN_HPP_NAMESPACE::Filter::eNearest;
|
|
};
|
|
|
|
template <>
|
|
struct CppType<StructureType, StructureType::eBlitImageInfo2>
|
|
{
|
|
using Type = BlitImageInfo2;
|
|
};
|
|
|
|
using BlitImageInfo2KHR = BlitImageInfo2;
|
|
|
|
struct BufferCopy
|
|
{
|
|
using NativeType = VkBufferCopy;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR BufferCopy( VULKAN_HPP_NAMESPACE::DeviceSize srcOffset_ = {},
|
|
VULKAN_HPP_NAMESPACE::DeviceSize dstOffset_ = {},
|
|
VULKAN_HPP_NAMESPACE::DeviceSize size_ = {} ) VULKAN_HPP_NOEXCEPT
|
|
: srcOffset( srcOffset_ )
|
|
, dstOffset( dstOffset_ )
|
|
, size( size_ )
|
|
{
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR BufferCopy( BufferCopy const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
BufferCopy( VkBufferCopy const & rhs ) VULKAN_HPP_NOEXCEPT : BufferCopy( *reinterpret_cast<BufferCopy const *>( &rhs ) ) {}
|
|
|
|
BufferCopy & operator=( BufferCopy const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
BufferCopy & operator=( VkBufferCopy const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::BufferCopy const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
|
|
VULKAN_HPP_CONSTEXPR_14 BufferCopy & setSrcOffset( VULKAN_HPP_NAMESPACE::DeviceSize srcOffset_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
srcOffset = srcOffset_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 BufferCopy & setDstOffset( VULKAN_HPP_NAMESPACE::DeviceSize dstOffset_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
dstOffset = dstOffset_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 BufferCopy & setSize( VULKAN_HPP_NAMESPACE::DeviceSize size_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
size = size_;
|
|
return *this;
|
|
}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
|
|
|
|
operator VkBufferCopy const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkBufferCopy *>( this );
|
|
}
|
|
|
|
operator VkBufferCopy &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkBufferCopy *>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
# if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
# else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::DeviceSize const &, VULKAN_HPP_NAMESPACE::DeviceSize const &, VULKAN_HPP_NAMESPACE::DeviceSize const &>
|
|
# endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( srcOffset, dstOffset, size );
|
|
}
|
|
#endif
|
|
|
|
#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
|
|
auto operator<=>( BufferCopy const & ) const = default;
|
|
#else
|
|
bool operator==( BufferCopy const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
# if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
# else
|
|
return ( srcOffset == rhs.srcOffset ) && ( dstOffset == rhs.dstOffset ) && ( size == rhs.size );
|
|
# endif
|
|
}
|
|
|
|
bool operator!=( BufferCopy const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::DeviceSize srcOffset = {};
|
|
VULKAN_HPP_NAMESPACE::DeviceSize dstOffset = {};
|
|
VULKAN_HPP_NAMESPACE::DeviceSize size = {};
|
|
};
|
|
|
|
struct BufferCopy2
|
|
{
|
|
using NativeType = VkBufferCopy2;
|
|
|
|
static const bool allowDuplicate = false;
|
|
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eBufferCopy2;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR BufferCopy2( VULKAN_HPP_NAMESPACE::DeviceSize srcOffset_ = {},
|
|
VULKAN_HPP_NAMESPACE::DeviceSize dstOffset_ = {},
|
|
VULKAN_HPP_NAMESPACE::DeviceSize size_ = {},
|
|
const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
|
|
: pNext( pNext_ )
|
|
, srcOffset( srcOffset_ )
|
|
, dstOffset( dstOffset_ )
|
|
, size( size_ )
|
|
{
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR BufferCopy2( BufferCopy2 const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
BufferCopy2( VkBufferCopy2 const & rhs ) VULKAN_HPP_NOEXCEPT : BufferCopy2( *reinterpret_cast<BufferCopy2 const *>( &rhs ) ) {}
|
|
|
|
BufferCopy2 & operator=( BufferCopy2 const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
BufferCopy2 & operator=( VkBufferCopy2 const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::BufferCopy2 const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
|
|
VULKAN_HPP_CONSTEXPR_14 BufferCopy2 & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pNext = pNext_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 BufferCopy2 & setSrcOffset( VULKAN_HPP_NAMESPACE::DeviceSize srcOffset_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
srcOffset = srcOffset_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 BufferCopy2 & setDstOffset( VULKAN_HPP_NAMESPACE::DeviceSize dstOffset_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
dstOffset = dstOffset_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 BufferCopy2 & setSize( VULKAN_HPP_NAMESPACE::DeviceSize size_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
size = size_;
|
|
return *this;
|
|
}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
|
|
|
|
operator VkBufferCopy2 const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkBufferCopy2 *>( this );
|
|
}
|
|
|
|
operator VkBufferCopy2 &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkBufferCopy2 *>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
# if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
# else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &,
|
|
const void * const &,
|
|
VULKAN_HPP_NAMESPACE::DeviceSize const &,
|
|
VULKAN_HPP_NAMESPACE::DeviceSize const &,
|
|
VULKAN_HPP_NAMESPACE::DeviceSize const &>
|
|
# endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( sType, pNext, srcOffset, dstOffset, size );
|
|
}
|
|
#endif
|
|
|
|
#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
|
|
auto operator<=>( BufferCopy2 const & ) const = default;
|
|
#else
|
|
bool operator==( BufferCopy2 const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
# if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
# else
|
|
return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( srcOffset == rhs.srcOffset ) && ( dstOffset == rhs.dstOffset ) && ( size == rhs.size );
|
|
# endif
|
|
}
|
|
|
|
bool operator!=( BufferCopy2 const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eBufferCopy2;
|
|
const void * pNext = {};
|
|
VULKAN_HPP_NAMESPACE::DeviceSize srcOffset = {};
|
|
VULKAN_HPP_NAMESPACE::DeviceSize dstOffset = {};
|
|
VULKAN_HPP_NAMESPACE::DeviceSize size = {};
|
|
};
|
|
|
|
template <>
|
|
struct CppType<StructureType, StructureType::eBufferCopy2>
|
|
{
|
|
using Type = BufferCopy2;
|
|
};
|
|
|
|
using BufferCopy2KHR = BufferCopy2;
|
|
|
|
struct BufferCreateInfo
|
|
{
|
|
using NativeType = VkBufferCreateInfo;
|
|
|
|
static const bool allowDuplicate = false;
|
|
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eBufferCreateInfo;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR BufferCreateInfo( VULKAN_HPP_NAMESPACE::BufferCreateFlags flags_ = {},
|
|
VULKAN_HPP_NAMESPACE::DeviceSize size_ = {},
|
|
VULKAN_HPP_NAMESPACE::BufferUsageFlags usage_ = {},
|
|
VULKAN_HPP_NAMESPACE::SharingMode sharingMode_ = VULKAN_HPP_NAMESPACE::SharingMode::eExclusive,
|
|
uint32_t queueFamilyIndexCount_ = {},
|
|
const uint32_t * pQueueFamilyIndices_ = {},
|
|
const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
|
|
: pNext( pNext_ )
|
|
, flags( flags_ )
|
|
, size( size_ )
|
|
, usage( usage_ )
|
|
, sharingMode( sharingMode_ )
|
|
, queueFamilyIndexCount( queueFamilyIndexCount_ )
|
|
, pQueueFamilyIndices( pQueueFamilyIndices_ )
|
|
{
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR BufferCreateInfo( BufferCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
BufferCreateInfo( VkBufferCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT : BufferCreateInfo( *reinterpret_cast<BufferCreateInfo const *>( &rhs ) ) {}
|
|
|
|
# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
|
|
BufferCreateInfo( VULKAN_HPP_NAMESPACE::BufferCreateFlags flags_,
|
|
VULKAN_HPP_NAMESPACE::DeviceSize size_,
|
|
VULKAN_HPP_NAMESPACE::BufferUsageFlags usage_,
|
|
VULKAN_HPP_NAMESPACE::SharingMode sharingMode_,
|
|
VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const uint32_t> const & queueFamilyIndices_,
|
|
const void * pNext_ = nullptr )
|
|
: pNext( pNext_ )
|
|
, flags( flags_ )
|
|
, size( size_ )
|
|
, usage( usage_ )
|
|
, sharingMode( sharingMode_ )
|
|
, queueFamilyIndexCount( static_cast<uint32_t>( queueFamilyIndices_.size() ) )
|
|
, pQueueFamilyIndices( queueFamilyIndices_.data() )
|
|
{
|
|
}
|
|
# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
|
|
|
|
BufferCreateInfo & operator=( BufferCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
BufferCreateInfo & operator=( VkBufferCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::BufferCreateInfo const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
|
|
VULKAN_HPP_CONSTEXPR_14 BufferCreateInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pNext = pNext_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 BufferCreateInfo & setFlags( VULKAN_HPP_NAMESPACE::BufferCreateFlags flags_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
flags = flags_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 BufferCreateInfo & setSize( VULKAN_HPP_NAMESPACE::DeviceSize size_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
size = size_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 BufferCreateInfo & setUsage( VULKAN_HPP_NAMESPACE::BufferUsageFlags usage_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
usage = usage_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 BufferCreateInfo & setSharingMode( VULKAN_HPP_NAMESPACE::SharingMode sharingMode_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
sharingMode = sharingMode_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 BufferCreateInfo & setQueueFamilyIndexCount( uint32_t queueFamilyIndexCount_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
queueFamilyIndexCount = queueFamilyIndexCount_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 BufferCreateInfo & setPQueueFamilyIndices( const uint32_t * pQueueFamilyIndices_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pQueueFamilyIndices = pQueueFamilyIndices_;
|
|
return *this;
|
|
}
|
|
|
|
# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
|
|
BufferCreateInfo & setQueueFamilyIndices( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const uint32_t> const & queueFamilyIndices_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
queueFamilyIndexCount = static_cast<uint32_t>( queueFamilyIndices_.size() );
|
|
pQueueFamilyIndices = queueFamilyIndices_.data();
|
|
return *this;
|
|
}
|
|
# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
|
|
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
|
|
|
|
operator VkBufferCreateInfo const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkBufferCreateInfo *>( this );
|
|
}
|
|
|
|
operator VkBufferCreateInfo &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkBufferCreateInfo *>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
# if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
# else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &,
|
|
const void * const &,
|
|
VULKAN_HPP_NAMESPACE::BufferCreateFlags const &,
|
|
VULKAN_HPP_NAMESPACE::DeviceSize const &,
|
|
VULKAN_HPP_NAMESPACE::BufferUsageFlags const &,
|
|
VULKAN_HPP_NAMESPACE::SharingMode const &,
|
|
uint32_t const &,
|
|
const uint32_t * const &>
|
|
# endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( sType, pNext, flags, size, usage, sharingMode, queueFamilyIndexCount, pQueueFamilyIndices );
|
|
}
|
|
#endif
|
|
|
|
#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
|
|
auto operator<=>( BufferCreateInfo const & ) const = default;
|
|
#else
|
|
bool operator==( BufferCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
# if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
# else
|
|
return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && ( size == rhs.size ) && ( usage == rhs.usage ) &&
|
|
( sharingMode == rhs.sharingMode ) && ( queueFamilyIndexCount == rhs.queueFamilyIndexCount ) && ( pQueueFamilyIndices == rhs.pQueueFamilyIndices );
|
|
# endif
|
|
}
|
|
|
|
bool operator!=( BufferCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eBufferCreateInfo;
|
|
const void * pNext = {};
|
|
VULKAN_HPP_NAMESPACE::BufferCreateFlags flags = {};
|
|
VULKAN_HPP_NAMESPACE::DeviceSize size = {};
|
|
VULKAN_HPP_NAMESPACE::BufferUsageFlags usage = {};
|
|
VULKAN_HPP_NAMESPACE::SharingMode sharingMode = VULKAN_HPP_NAMESPACE::SharingMode::eExclusive;
|
|
uint32_t queueFamilyIndexCount = {};
|
|
const uint32_t * pQueueFamilyIndices = {};
|
|
};
|
|
|
|
template <>
|
|
struct CppType<StructureType, StructureType::eBufferCreateInfo>
|
|
{
|
|
using Type = BufferCreateInfo;
|
|
};
|
|
|
|
struct BufferDeviceAddressInfo
|
|
{
|
|
using NativeType = VkBufferDeviceAddressInfo;
|
|
|
|
static const bool allowDuplicate = false;
|
|
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eBufferDeviceAddressInfo;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR BufferDeviceAddressInfo( VULKAN_HPP_NAMESPACE::Buffer buffer_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
|
|
: pNext( pNext_ )
|
|
, buffer( buffer_ )
|
|
{
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR BufferDeviceAddressInfo( BufferDeviceAddressInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
BufferDeviceAddressInfo( VkBufferDeviceAddressInfo const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: BufferDeviceAddressInfo( *reinterpret_cast<BufferDeviceAddressInfo const *>( &rhs ) )
|
|
{
|
|
}
|
|
|
|
BufferDeviceAddressInfo & operator=( BufferDeviceAddressInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
BufferDeviceAddressInfo & operator=( VkBufferDeviceAddressInfo const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::BufferDeviceAddressInfo const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
|
|
VULKAN_HPP_CONSTEXPR_14 BufferDeviceAddressInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pNext = pNext_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 BufferDeviceAddressInfo & setBuffer( VULKAN_HPP_NAMESPACE::Buffer buffer_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
buffer = buffer_;
|
|
return *this;
|
|
}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
|
|
|
|
operator VkBufferDeviceAddressInfo const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkBufferDeviceAddressInfo *>( this );
|
|
}
|
|
|
|
operator VkBufferDeviceAddressInfo &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkBufferDeviceAddressInfo *>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
# if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
# else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::Buffer const &>
|
|
# endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( sType, pNext, buffer );
|
|
}
|
|
#endif
|
|
|
|
#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
|
|
auto operator<=>( BufferDeviceAddressInfo const & ) const = default;
|
|
#else
|
|
bool operator==( BufferDeviceAddressInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
# if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
# else
|
|
return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( buffer == rhs.buffer );
|
|
# endif
|
|
}
|
|
|
|
bool operator!=( BufferDeviceAddressInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eBufferDeviceAddressInfo;
|
|
const void * pNext = {};
|
|
VULKAN_HPP_NAMESPACE::Buffer buffer = {};
|
|
};
|
|
|
|
template <>
|
|
struct CppType<StructureType, StructureType::eBufferDeviceAddressInfo>
|
|
{
|
|
using Type = BufferDeviceAddressInfo;
|
|
};
|
|
|
|
using BufferDeviceAddressInfoEXT = BufferDeviceAddressInfo;
|
|
using BufferDeviceAddressInfoKHR = BufferDeviceAddressInfo;
|
|
|
|
struct Extent3D
|
|
{
|
|
using NativeType = VkExtent3D;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR Extent3D( uint32_t width_ = {}, uint32_t height_ = {}, uint32_t depth_ = {} ) VULKAN_HPP_NOEXCEPT
|
|
: width( width_ )
|
|
, height( height_ )
|
|
, depth( depth_ )
|
|
{
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR Extent3D( Extent3D const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
Extent3D( VkExtent3D const & rhs ) VULKAN_HPP_NOEXCEPT : Extent3D( *reinterpret_cast<Extent3D const *>( &rhs ) ) {}
|
|
|
|
explicit Extent3D( Extent2D const & extent2D, uint32_t depth_ = {} ) : width( extent2D.width ), height( extent2D.height ), depth( depth_ ) {}
|
|
|
|
Extent3D & operator=( Extent3D const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
Extent3D & operator=( VkExtent3D const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::Extent3D const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
|
|
VULKAN_HPP_CONSTEXPR_14 Extent3D & setWidth( uint32_t width_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
width = width_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 Extent3D & setHeight( uint32_t height_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
height = height_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 Extent3D & setDepth( uint32_t depth_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
depth = depth_;
|
|
return *this;
|
|
}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
|
|
|
|
operator VkExtent3D const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkExtent3D *>( this );
|
|
}
|
|
|
|
operator VkExtent3D &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkExtent3D *>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
# if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
# else
|
|
std::tuple<uint32_t const &, uint32_t const &, uint32_t const &>
|
|
# endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( width, height, depth );
|
|
}
|
|
#endif
|
|
|
|
#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
|
|
auto operator<=>( Extent3D const & ) const = default;
|
|
#else
|
|
bool operator==( Extent3D const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
# if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
# else
|
|
return ( width == rhs.width ) && ( height == rhs.height ) && ( depth == rhs.depth );
|
|
# endif
|
|
}
|
|
|
|
bool operator!=( Extent3D const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
uint32_t width = {};
|
|
uint32_t height = {};
|
|
uint32_t depth = {};
|
|
};
|
|
|
|
struct BufferImageCopy
|
|
{
|
|
using NativeType = VkBufferImageCopy;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR BufferImageCopy( VULKAN_HPP_NAMESPACE::DeviceSize bufferOffset_ = {},
|
|
uint32_t bufferRowLength_ = {},
|
|
uint32_t bufferImageHeight_ = {},
|
|
VULKAN_HPP_NAMESPACE::ImageSubresourceLayers imageSubresource_ = {},
|
|
VULKAN_HPP_NAMESPACE::Offset3D imageOffset_ = {},
|
|
VULKAN_HPP_NAMESPACE::Extent3D imageExtent_ = {} ) VULKAN_HPP_NOEXCEPT
|
|
: bufferOffset( bufferOffset_ )
|
|
, bufferRowLength( bufferRowLength_ )
|
|
, bufferImageHeight( bufferImageHeight_ )
|
|
, imageSubresource( imageSubresource_ )
|
|
, imageOffset( imageOffset_ )
|
|
, imageExtent( imageExtent_ )
|
|
{
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR BufferImageCopy( BufferImageCopy const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
BufferImageCopy( VkBufferImageCopy const & rhs ) VULKAN_HPP_NOEXCEPT : BufferImageCopy( *reinterpret_cast<BufferImageCopy const *>( &rhs ) ) {}
|
|
|
|
BufferImageCopy & operator=( BufferImageCopy const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
BufferImageCopy & operator=( VkBufferImageCopy const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::BufferImageCopy const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
|
|
VULKAN_HPP_CONSTEXPR_14 BufferImageCopy & setBufferOffset( VULKAN_HPP_NAMESPACE::DeviceSize bufferOffset_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
bufferOffset = bufferOffset_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 BufferImageCopy & setBufferRowLength( uint32_t bufferRowLength_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
bufferRowLength = bufferRowLength_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 BufferImageCopy & setBufferImageHeight( uint32_t bufferImageHeight_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
bufferImageHeight = bufferImageHeight_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 BufferImageCopy & setImageSubresource( VULKAN_HPP_NAMESPACE::ImageSubresourceLayers const & imageSubresource_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
imageSubresource = imageSubresource_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 BufferImageCopy & setImageOffset( VULKAN_HPP_NAMESPACE::Offset3D const & imageOffset_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
imageOffset = imageOffset_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 BufferImageCopy & setImageExtent( VULKAN_HPP_NAMESPACE::Extent3D const & imageExtent_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
imageExtent = imageExtent_;
|
|
return *this;
|
|
}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
|
|
|
|
operator VkBufferImageCopy const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkBufferImageCopy *>( this );
|
|
}
|
|
|
|
operator VkBufferImageCopy &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkBufferImageCopy *>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
# if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
# else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::DeviceSize const &,
|
|
uint32_t const &,
|
|
uint32_t const &,
|
|
VULKAN_HPP_NAMESPACE::ImageSubresourceLayers const &,
|
|
VULKAN_HPP_NAMESPACE::Offset3D const &,
|
|
VULKAN_HPP_NAMESPACE::Extent3D const &>
|
|
# endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( bufferOffset, bufferRowLength, bufferImageHeight, imageSubresource, imageOffset, imageExtent );
|
|
}
|
|
#endif
|
|
|
|
#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
|
|
auto operator<=>( BufferImageCopy const & ) const = default;
|
|
#else
|
|
bool operator==( BufferImageCopy const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
# if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
# else
|
|
return ( bufferOffset == rhs.bufferOffset ) && ( bufferRowLength == rhs.bufferRowLength ) && ( bufferImageHeight == rhs.bufferImageHeight ) &&
|
|
( imageSubresource == rhs.imageSubresource ) && ( imageOffset == rhs.imageOffset ) && ( imageExtent == rhs.imageExtent );
|
|
# endif
|
|
}
|
|
|
|
bool operator!=( BufferImageCopy const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::DeviceSize bufferOffset = {};
|
|
uint32_t bufferRowLength = {};
|
|
uint32_t bufferImageHeight = {};
|
|
VULKAN_HPP_NAMESPACE::ImageSubresourceLayers imageSubresource = {};
|
|
VULKAN_HPP_NAMESPACE::Offset3D imageOffset = {};
|
|
VULKAN_HPP_NAMESPACE::Extent3D imageExtent = {};
|
|
};
|
|
|
|
struct BufferImageCopy2
|
|
{
|
|
using NativeType = VkBufferImageCopy2;
|
|
|
|
static const bool allowDuplicate = false;
|
|
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eBufferImageCopy2;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR BufferImageCopy2( VULKAN_HPP_NAMESPACE::DeviceSize bufferOffset_ = {},
|
|
uint32_t bufferRowLength_ = {},
|
|
uint32_t bufferImageHeight_ = {},
|
|
VULKAN_HPP_NAMESPACE::ImageSubresourceLayers imageSubresource_ = {},
|
|
VULKAN_HPP_NAMESPACE::Offset3D imageOffset_ = {},
|
|
VULKAN_HPP_NAMESPACE::Extent3D imageExtent_ = {},
|
|
const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
|
|
: pNext( pNext_ )
|
|
, bufferOffset( bufferOffset_ )
|
|
, bufferRowLength( bufferRowLength_ )
|
|
, bufferImageHeight( bufferImageHeight_ )
|
|
, imageSubresource( imageSubresource_ )
|
|
, imageOffset( imageOffset_ )
|
|
, imageExtent( imageExtent_ )
|
|
{
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR BufferImageCopy2( BufferImageCopy2 const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
BufferImageCopy2( VkBufferImageCopy2 const & rhs ) VULKAN_HPP_NOEXCEPT : BufferImageCopy2( *reinterpret_cast<BufferImageCopy2 const *>( &rhs ) ) {}
|
|
|
|
BufferImageCopy2 & operator=( BufferImageCopy2 const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
BufferImageCopy2 & operator=( VkBufferImageCopy2 const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::BufferImageCopy2 const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
|
|
VULKAN_HPP_CONSTEXPR_14 BufferImageCopy2 & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pNext = pNext_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 BufferImageCopy2 & setBufferOffset( VULKAN_HPP_NAMESPACE::DeviceSize bufferOffset_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
bufferOffset = bufferOffset_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 BufferImageCopy2 & setBufferRowLength( uint32_t bufferRowLength_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
bufferRowLength = bufferRowLength_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 BufferImageCopy2 & setBufferImageHeight( uint32_t bufferImageHeight_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
bufferImageHeight = bufferImageHeight_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 BufferImageCopy2 & setImageSubresource( VULKAN_HPP_NAMESPACE::ImageSubresourceLayers const & imageSubresource_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
imageSubresource = imageSubresource_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 BufferImageCopy2 & setImageOffset( VULKAN_HPP_NAMESPACE::Offset3D const & imageOffset_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
imageOffset = imageOffset_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 BufferImageCopy2 & setImageExtent( VULKAN_HPP_NAMESPACE::Extent3D const & imageExtent_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
imageExtent = imageExtent_;
|
|
return *this;
|
|
}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
|
|
|
|
operator VkBufferImageCopy2 const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkBufferImageCopy2 *>( this );
|
|
}
|
|
|
|
operator VkBufferImageCopy2 &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkBufferImageCopy2 *>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
# if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
# else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &,
|
|
const void * const &,
|
|
VULKAN_HPP_NAMESPACE::DeviceSize const &,
|
|
uint32_t const &,
|
|
uint32_t const &,
|
|
VULKAN_HPP_NAMESPACE::ImageSubresourceLayers const &,
|
|
VULKAN_HPP_NAMESPACE::Offset3D const &,
|
|
VULKAN_HPP_NAMESPACE::Extent3D const &>
|
|
# endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( sType, pNext, bufferOffset, bufferRowLength, bufferImageHeight, imageSubresource, imageOffset, imageExtent );
|
|
}
|
|
#endif
|
|
|
|
#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
|
|
auto operator<=>( BufferImageCopy2 const & ) const = default;
|
|
#else
|
|
bool operator==( BufferImageCopy2 const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
# if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
# else
|
|
return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( bufferOffset == rhs.bufferOffset ) && ( bufferRowLength == rhs.bufferRowLength ) &&
|
|
( bufferImageHeight == rhs.bufferImageHeight ) && ( imageSubresource == rhs.imageSubresource ) && ( imageOffset == rhs.imageOffset ) &&
|
|
( imageExtent == rhs.imageExtent );
|
|
# endif
|
|
}
|
|
|
|
bool operator!=( BufferImageCopy2 const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eBufferImageCopy2;
|
|
const void * pNext = {};
|
|
VULKAN_HPP_NAMESPACE::DeviceSize bufferOffset = {};
|
|
uint32_t bufferRowLength = {};
|
|
uint32_t bufferImageHeight = {};
|
|
VULKAN_HPP_NAMESPACE::ImageSubresourceLayers imageSubresource = {};
|
|
VULKAN_HPP_NAMESPACE::Offset3D imageOffset = {};
|
|
VULKAN_HPP_NAMESPACE::Extent3D imageExtent = {};
|
|
};
|
|
|
|
template <>
|
|
struct CppType<StructureType, StructureType::eBufferImageCopy2>
|
|
{
|
|
using Type = BufferImageCopy2;
|
|
};
|
|
|
|
using BufferImageCopy2KHR = BufferImageCopy2;
|
|
|
|
struct BufferMemoryBarrier
|
|
{
|
|
using NativeType = VkBufferMemoryBarrier;
|
|
|
|
static const bool allowDuplicate = false;
|
|
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eBufferMemoryBarrier;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR BufferMemoryBarrier( VULKAN_HPP_NAMESPACE::AccessFlags srcAccessMask_ = {},
|
|
VULKAN_HPP_NAMESPACE::AccessFlags dstAccessMask_ = {},
|
|
uint32_t srcQueueFamilyIndex_ = {},
|
|
uint32_t dstQueueFamilyIndex_ = {},
|
|
VULKAN_HPP_NAMESPACE::Buffer buffer_ = {},
|
|
VULKAN_HPP_NAMESPACE::DeviceSize offset_ = {},
|
|
VULKAN_HPP_NAMESPACE::DeviceSize size_ = {},
|
|
const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
|
|
: pNext( pNext_ )
|
|
, srcAccessMask( srcAccessMask_ )
|
|
, dstAccessMask( dstAccessMask_ )
|
|
, srcQueueFamilyIndex( srcQueueFamilyIndex_ )
|
|
, dstQueueFamilyIndex( dstQueueFamilyIndex_ )
|
|
, buffer( buffer_ )
|
|
, offset( offset_ )
|
|
, size( size_ )
|
|
{
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR BufferMemoryBarrier( BufferMemoryBarrier const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
BufferMemoryBarrier( VkBufferMemoryBarrier const & rhs ) VULKAN_HPP_NOEXCEPT : BufferMemoryBarrier( *reinterpret_cast<BufferMemoryBarrier const *>( &rhs ) )
|
|
{
|
|
}
|
|
|
|
BufferMemoryBarrier & operator=( BufferMemoryBarrier const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
BufferMemoryBarrier & operator=( VkBufferMemoryBarrier const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::BufferMemoryBarrier const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
|
|
VULKAN_HPP_CONSTEXPR_14 BufferMemoryBarrier & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pNext = pNext_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 BufferMemoryBarrier & setSrcAccessMask( VULKAN_HPP_NAMESPACE::AccessFlags srcAccessMask_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
srcAccessMask = srcAccessMask_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 BufferMemoryBarrier & setDstAccessMask( VULKAN_HPP_NAMESPACE::AccessFlags dstAccessMask_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
dstAccessMask = dstAccessMask_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 BufferMemoryBarrier & setSrcQueueFamilyIndex( uint32_t srcQueueFamilyIndex_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
srcQueueFamilyIndex = srcQueueFamilyIndex_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 BufferMemoryBarrier & setDstQueueFamilyIndex( uint32_t dstQueueFamilyIndex_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
dstQueueFamilyIndex = dstQueueFamilyIndex_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 BufferMemoryBarrier & setBuffer( VULKAN_HPP_NAMESPACE::Buffer buffer_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
buffer = buffer_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 BufferMemoryBarrier & setOffset( VULKAN_HPP_NAMESPACE::DeviceSize offset_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
offset = offset_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 BufferMemoryBarrier & setSize( VULKAN_HPP_NAMESPACE::DeviceSize size_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
size = size_;
|
|
return *this;
|
|
}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
|
|
|
|
operator VkBufferMemoryBarrier const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkBufferMemoryBarrier *>( this );
|
|
}
|
|
|
|
operator VkBufferMemoryBarrier &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkBufferMemoryBarrier *>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
# if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
# else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &,
|
|
const void * const &,
|
|
VULKAN_HPP_NAMESPACE::AccessFlags const &,
|
|
VULKAN_HPP_NAMESPACE::AccessFlags const &,
|
|
uint32_t const &,
|
|
uint32_t const &,
|
|
VULKAN_HPP_NAMESPACE::Buffer const &,
|
|
VULKAN_HPP_NAMESPACE::DeviceSize const &,
|
|
VULKAN_HPP_NAMESPACE::DeviceSize const &>
|
|
# endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( sType, pNext, srcAccessMask, dstAccessMask, srcQueueFamilyIndex, dstQueueFamilyIndex, buffer, offset, size );
|
|
}
|
|
#endif
|
|
|
|
#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
|
|
auto operator<=>( BufferMemoryBarrier const & ) const = default;
|
|
#else
|
|
bool operator==( BufferMemoryBarrier const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
# if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
# else
|
|
return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( srcAccessMask == rhs.srcAccessMask ) && ( dstAccessMask == rhs.dstAccessMask ) &&
|
|
( srcQueueFamilyIndex == rhs.srcQueueFamilyIndex ) && ( dstQueueFamilyIndex == rhs.dstQueueFamilyIndex ) && ( buffer == rhs.buffer ) &&
|
|
( offset == rhs.offset ) && ( size == rhs.size );
|
|
# endif
|
|
}
|
|
|
|
bool operator!=( BufferMemoryBarrier const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eBufferMemoryBarrier;
|
|
const void * pNext = {};
|
|
VULKAN_HPP_NAMESPACE::AccessFlags srcAccessMask = {};
|
|
VULKAN_HPP_NAMESPACE::AccessFlags dstAccessMask = {};
|
|
uint32_t srcQueueFamilyIndex = {};
|
|
uint32_t dstQueueFamilyIndex = {};
|
|
VULKAN_HPP_NAMESPACE::Buffer buffer = {};
|
|
VULKAN_HPP_NAMESPACE::DeviceSize offset = {};
|
|
VULKAN_HPP_NAMESPACE::DeviceSize size = {};
|
|
};
|
|
|
|
template <>
|
|
struct CppType<StructureType, StructureType::eBufferMemoryBarrier>
|
|
{
|
|
using Type = BufferMemoryBarrier;
|
|
};
|
|
|
|
struct BufferMemoryBarrier2
|
|
{
|
|
using NativeType = VkBufferMemoryBarrier2;
|
|
|
|
static const bool allowDuplicate = false;
|
|
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eBufferMemoryBarrier2;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR BufferMemoryBarrier2( VULKAN_HPP_NAMESPACE::PipelineStageFlags2 srcStageMask_ = {},
|
|
VULKAN_HPP_NAMESPACE::AccessFlags2 srcAccessMask_ = {},
|
|
VULKAN_HPP_NAMESPACE::PipelineStageFlags2 dstStageMask_ = {},
|
|
VULKAN_HPP_NAMESPACE::AccessFlags2 dstAccessMask_ = {},
|
|
uint32_t srcQueueFamilyIndex_ = {},
|
|
uint32_t dstQueueFamilyIndex_ = {},
|
|
VULKAN_HPP_NAMESPACE::Buffer buffer_ = {},
|
|
VULKAN_HPP_NAMESPACE::DeviceSize offset_ = {},
|
|
VULKAN_HPP_NAMESPACE::DeviceSize size_ = {},
|
|
const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
|
|
: pNext( pNext_ )
|
|
, srcStageMask( srcStageMask_ )
|
|
, srcAccessMask( srcAccessMask_ )
|
|
, dstStageMask( dstStageMask_ )
|
|
, dstAccessMask( dstAccessMask_ )
|
|
, srcQueueFamilyIndex( srcQueueFamilyIndex_ )
|
|
, dstQueueFamilyIndex( dstQueueFamilyIndex_ )
|
|
, buffer( buffer_ )
|
|
, offset( offset_ )
|
|
, size( size_ )
|
|
{
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR BufferMemoryBarrier2( BufferMemoryBarrier2 const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
BufferMemoryBarrier2( VkBufferMemoryBarrier2 const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: BufferMemoryBarrier2( *reinterpret_cast<BufferMemoryBarrier2 const *>( &rhs ) )
|
|
{
|
|
}
|
|
|
|
BufferMemoryBarrier2 & operator=( BufferMemoryBarrier2 const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
BufferMemoryBarrier2 & operator=( VkBufferMemoryBarrier2 const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::BufferMemoryBarrier2 const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
|
|
VULKAN_HPP_CONSTEXPR_14 BufferMemoryBarrier2 & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pNext = pNext_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 BufferMemoryBarrier2 & setSrcStageMask( VULKAN_HPP_NAMESPACE::PipelineStageFlags2 srcStageMask_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
srcStageMask = srcStageMask_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 BufferMemoryBarrier2 & setSrcAccessMask( VULKAN_HPP_NAMESPACE::AccessFlags2 srcAccessMask_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
srcAccessMask = srcAccessMask_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 BufferMemoryBarrier2 & setDstStageMask( VULKAN_HPP_NAMESPACE::PipelineStageFlags2 dstStageMask_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
dstStageMask = dstStageMask_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 BufferMemoryBarrier2 & setDstAccessMask( VULKAN_HPP_NAMESPACE::AccessFlags2 dstAccessMask_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
dstAccessMask = dstAccessMask_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 BufferMemoryBarrier2 & setSrcQueueFamilyIndex( uint32_t srcQueueFamilyIndex_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
srcQueueFamilyIndex = srcQueueFamilyIndex_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 BufferMemoryBarrier2 & setDstQueueFamilyIndex( uint32_t dstQueueFamilyIndex_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
dstQueueFamilyIndex = dstQueueFamilyIndex_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 BufferMemoryBarrier2 & setBuffer( VULKAN_HPP_NAMESPACE::Buffer buffer_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
buffer = buffer_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 BufferMemoryBarrier2 & setOffset( VULKAN_HPP_NAMESPACE::DeviceSize offset_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
offset = offset_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 BufferMemoryBarrier2 & setSize( VULKAN_HPP_NAMESPACE::DeviceSize size_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
size = size_;
|
|
return *this;
|
|
}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
|
|
|
|
operator VkBufferMemoryBarrier2 const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkBufferMemoryBarrier2 *>( this );
|
|
}
|
|
|
|
operator VkBufferMemoryBarrier2 &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkBufferMemoryBarrier2 *>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
# if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
# else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &,
|
|
const void * const &,
|
|
VULKAN_HPP_NAMESPACE::PipelineStageFlags2 const &,
|
|
VULKAN_HPP_NAMESPACE::AccessFlags2 const &,
|
|
VULKAN_HPP_NAMESPACE::PipelineStageFlags2 const &,
|
|
VULKAN_HPP_NAMESPACE::AccessFlags2 const &,
|
|
uint32_t const &,
|
|
uint32_t const &,
|
|
VULKAN_HPP_NAMESPACE::Buffer const &,
|
|
VULKAN_HPP_NAMESPACE::DeviceSize const &,
|
|
VULKAN_HPP_NAMESPACE::DeviceSize const &>
|
|
# endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( sType, pNext, srcStageMask, srcAccessMask, dstStageMask, dstAccessMask, srcQueueFamilyIndex, dstQueueFamilyIndex, buffer, offset, size );
|
|
}
|
|
#endif
|
|
|
|
#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
|
|
auto operator<=>( BufferMemoryBarrier2 const & ) const = default;
|
|
#else
|
|
bool operator==( BufferMemoryBarrier2 const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
# if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
# else
|
|
return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( srcStageMask == rhs.srcStageMask ) && ( srcAccessMask == rhs.srcAccessMask ) &&
|
|
( dstStageMask == rhs.dstStageMask ) && ( dstAccessMask == rhs.dstAccessMask ) && ( srcQueueFamilyIndex == rhs.srcQueueFamilyIndex ) &&
|
|
( dstQueueFamilyIndex == rhs.dstQueueFamilyIndex ) && ( buffer == rhs.buffer ) && ( offset == rhs.offset ) && ( size == rhs.size );
|
|
# endif
|
|
}
|
|
|
|
bool operator!=( BufferMemoryBarrier2 const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eBufferMemoryBarrier2;
|
|
const void * pNext = {};
|
|
VULKAN_HPP_NAMESPACE::PipelineStageFlags2 srcStageMask = {};
|
|
VULKAN_HPP_NAMESPACE::AccessFlags2 srcAccessMask = {};
|
|
VULKAN_HPP_NAMESPACE::PipelineStageFlags2 dstStageMask = {};
|
|
VULKAN_HPP_NAMESPACE::AccessFlags2 dstAccessMask = {};
|
|
uint32_t srcQueueFamilyIndex = {};
|
|
uint32_t dstQueueFamilyIndex = {};
|
|
VULKAN_HPP_NAMESPACE::Buffer buffer = {};
|
|
VULKAN_HPP_NAMESPACE::DeviceSize offset = {};
|
|
VULKAN_HPP_NAMESPACE::DeviceSize size = {};
|
|
};
|
|
|
|
template <>
|
|
struct CppType<StructureType, StructureType::eBufferMemoryBarrier2>
|
|
{
|
|
using Type = BufferMemoryBarrier2;
|
|
};
|
|
|
|
using BufferMemoryBarrier2KHR = BufferMemoryBarrier2;
|
|
|
|
struct BufferMemoryRequirementsInfo2
|
|
{
|
|
using NativeType = VkBufferMemoryRequirementsInfo2;
|
|
|
|
static const bool allowDuplicate = false;
|
|
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eBufferMemoryRequirementsInfo2;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR BufferMemoryRequirementsInfo2( VULKAN_HPP_NAMESPACE::Buffer buffer_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
|
|
: pNext( pNext_ )
|
|
, buffer( buffer_ )
|
|
{
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR BufferMemoryRequirementsInfo2( BufferMemoryRequirementsInfo2 const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
BufferMemoryRequirementsInfo2( VkBufferMemoryRequirementsInfo2 const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: BufferMemoryRequirementsInfo2( *reinterpret_cast<BufferMemoryRequirementsInfo2 const *>( &rhs ) )
|
|
{
|
|
}
|
|
|
|
BufferMemoryRequirementsInfo2 & operator=( BufferMemoryRequirementsInfo2 const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
BufferMemoryRequirementsInfo2 & operator=( VkBufferMemoryRequirementsInfo2 const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::BufferMemoryRequirementsInfo2 const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
|
|
VULKAN_HPP_CONSTEXPR_14 BufferMemoryRequirementsInfo2 & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pNext = pNext_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 BufferMemoryRequirementsInfo2 & setBuffer( VULKAN_HPP_NAMESPACE::Buffer buffer_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
buffer = buffer_;
|
|
return *this;
|
|
}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
|
|
|
|
operator VkBufferMemoryRequirementsInfo2 const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkBufferMemoryRequirementsInfo2 *>( this );
|
|
}
|
|
|
|
operator VkBufferMemoryRequirementsInfo2 &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkBufferMemoryRequirementsInfo2 *>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
# if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
# else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::Buffer const &>
|
|
# endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( sType, pNext, buffer );
|
|
}
|
|
#endif
|
|
|
|
#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
|
|
auto operator<=>( BufferMemoryRequirementsInfo2 const & ) const = default;
|
|
#else
|
|
bool operator==( BufferMemoryRequirementsInfo2 const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
# if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
# else
|
|
return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( buffer == rhs.buffer );
|
|
# endif
|
|
}
|
|
|
|
bool operator!=( BufferMemoryRequirementsInfo2 const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eBufferMemoryRequirementsInfo2;
|
|
const void * pNext = {};
|
|
VULKAN_HPP_NAMESPACE::Buffer buffer = {};
|
|
};
|
|
|
|
template <>
|
|
struct CppType<StructureType, StructureType::eBufferMemoryRequirementsInfo2>
|
|
{
|
|
using Type = BufferMemoryRequirementsInfo2;
|
|
};
|
|
|
|
using BufferMemoryRequirementsInfo2KHR = BufferMemoryRequirementsInfo2;
|
|
|
|
struct BufferOpaqueCaptureAddressCreateInfo
|
|
{
|
|
using NativeType = VkBufferOpaqueCaptureAddressCreateInfo;
|
|
|
|
static const bool allowDuplicate = false;
|
|
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eBufferOpaqueCaptureAddressCreateInfo;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR BufferOpaqueCaptureAddressCreateInfo( uint64_t opaqueCaptureAddress_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
|
|
: pNext( pNext_ )
|
|
, opaqueCaptureAddress( opaqueCaptureAddress_ )
|
|
{
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR BufferOpaqueCaptureAddressCreateInfo( BufferOpaqueCaptureAddressCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
BufferOpaqueCaptureAddressCreateInfo( VkBufferOpaqueCaptureAddressCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: BufferOpaqueCaptureAddressCreateInfo( *reinterpret_cast<BufferOpaqueCaptureAddressCreateInfo const *>( &rhs ) )
|
|
{
|
|
}
|
|
|
|
BufferOpaqueCaptureAddressCreateInfo & operator=( BufferOpaqueCaptureAddressCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
BufferOpaqueCaptureAddressCreateInfo & operator=( VkBufferOpaqueCaptureAddressCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::BufferOpaqueCaptureAddressCreateInfo const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
|
|
VULKAN_HPP_CONSTEXPR_14 BufferOpaqueCaptureAddressCreateInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pNext = pNext_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 BufferOpaqueCaptureAddressCreateInfo & setOpaqueCaptureAddress( uint64_t opaqueCaptureAddress_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
opaqueCaptureAddress = opaqueCaptureAddress_;
|
|
return *this;
|
|
}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
|
|
|
|
operator VkBufferOpaqueCaptureAddressCreateInfo const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkBufferOpaqueCaptureAddressCreateInfo *>( this );
|
|
}
|
|
|
|
operator VkBufferOpaqueCaptureAddressCreateInfo &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkBufferOpaqueCaptureAddressCreateInfo *>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
# if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
# else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, uint64_t const &>
|
|
# endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( sType, pNext, opaqueCaptureAddress );
|
|
}
|
|
#endif
|
|
|
|
#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
|
|
auto operator<=>( BufferOpaqueCaptureAddressCreateInfo const & ) const = default;
|
|
#else
|
|
bool operator==( BufferOpaqueCaptureAddressCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
# if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
# else
|
|
return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( opaqueCaptureAddress == rhs.opaqueCaptureAddress );
|
|
# endif
|
|
}
|
|
|
|
bool operator!=( BufferOpaqueCaptureAddressCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eBufferOpaqueCaptureAddressCreateInfo;
|
|
const void * pNext = {};
|
|
uint64_t opaqueCaptureAddress = {};
|
|
};
|
|
|
|
template <>
|
|
struct CppType<StructureType, StructureType::eBufferOpaqueCaptureAddressCreateInfo>
|
|
{
|
|
using Type = BufferOpaqueCaptureAddressCreateInfo;
|
|
};
|
|
|
|
using BufferOpaqueCaptureAddressCreateInfoKHR = BufferOpaqueCaptureAddressCreateInfo;
|
|
|
|
struct BufferViewCreateInfo
|
|
{
|
|
using NativeType = VkBufferViewCreateInfo;
|
|
|
|
static const bool allowDuplicate = false;
|
|
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eBufferViewCreateInfo;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR BufferViewCreateInfo( VULKAN_HPP_NAMESPACE::BufferViewCreateFlags flags_ = {},
|
|
VULKAN_HPP_NAMESPACE::Buffer buffer_ = {},
|
|
VULKAN_HPP_NAMESPACE::Format format_ = VULKAN_HPP_NAMESPACE::Format::eUndefined,
|
|
VULKAN_HPP_NAMESPACE::DeviceSize offset_ = {},
|
|
VULKAN_HPP_NAMESPACE::DeviceSize range_ = {},
|
|
const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
|
|
: pNext( pNext_ )
|
|
, flags( flags_ )
|
|
, buffer( buffer_ )
|
|
, format( format_ )
|
|
, offset( offset_ )
|
|
, range( range_ )
|
|
{
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR BufferViewCreateInfo( BufferViewCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
BufferViewCreateInfo( VkBufferViewCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: BufferViewCreateInfo( *reinterpret_cast<BufferViewCreateInfo const *>( &rhs ) )
|
|
{
|
|
}
|
|
|
|
BufferViewCreateInfo & operator=( BufferViewCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
BufferViewCreateInfo & operator=( VkBufferViewCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::BufferViewCreateInfo const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
|
|
VULKAN_HPP_CONSTEXPR_14 BufferViewCreateInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pNext = pNext_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 BufferViewCreateInfo & setFlags( VULKAN_HPP_NAMESPACE::BufferViewCreateFlags flags_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
flags = flags_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 BufferViewCreateInfo & setBuffer( VULKAN_HPP_NAMESPACE::Buffer buffer_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
buffer = buffer_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 BufferViewCreateInfo & setFormat( VULKAN_HPP_NAMESPACE::Format format_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
format = format_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 BufferViewCreateInfo & setOffset( VULKAN_HPP_NAMESPACE::DeviceSize offset_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
offset = offset_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 BufferViewCreateInfo & setRange( VULKAN_HPP_NAMESPACE::DeviceSize range_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
range = range_;
|
|
return *this;
|
|
}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
|
|
|
|
operator VkBufferViewCreateInfo const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkBufferViewCreateInfo *>( this );
|
|
}
|
|
|
|
operator VkBufferViewCreateInfo &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkBufferViewCreateInfo *>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
# if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
# else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &,
|
|
const void * const &,
|
|
VULKAN_HPP_NAMESPACE::BufferViewCreateFlags const &,
|
|
VULKAN_HPP_NAMESPACE::Buffer const &,
|
|
VULKAN_HPP_NAMESPACE::Format const &,
|
|
VULKAN_HPP_NAMESPACE::DeviceSize const &,
|
|
VULKAN_HPP_NAMESPACE::DeviceSize const &>
|
|
# endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( sType, pNext, flags, buffer, format, offset, range );
|
|
}
|
|
#endif
|
|
|
|
#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
|
|
auto operator<=>( BufferViewCreateInfo const & ) const = default;
|
|
#else
|
|
bool operator==( BufferViewCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
# if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
# else
|
|
return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && ( buffer == rhs.buffer ) && ( format == rhs.format ) &&
|
|
( offset == rhs.offset ) && ( range == rhs.range );
|
|
# endif
|
|
}
|
|
|
|
bool operator!=( BufferViewCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eBufferViewCreateInfo;
|
|
const void * pNext = {};
|
|
VULKAN_HPP_NAMESPACE::BufferViewCreateFlags flags = {};
|
|
VULKAN_HPP_NAMESPACE::Buffer buffer = {};
|
|
VULKAN_HPP_NAMESPACE::Format format = VULKAN_HPP_NAMESPACE::Format::eUndefined;
|
|
VULKAN_HPP_NAMESPACE::DeviceSize offset = {};
|
|
VULKAN_HPP_NAMESPACE::DeviceSize range = {};
|
|
};
|
|
|
|
template <>
|
|
struct CppType<StructureType, StructureType::eBufferViewCreateInfo>
|
|
{
|
|
using Type = BufferViewCreateInfo;
|
|
};
|
|
|
|
struct CheckpointData2NV
|
|
{
|
|
using NativeType = VkCheckpointData2NV;
|
|
|
|
static const bool allowDuplicate = false;
|
|
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eCheckpointData2NV;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR
|
|
CheckpointData2NV( VULKAN_HPP_NAMESPACE::PipelineStageFlags2 stage_ = {}, void * pCheckpointMarker_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
|
|
: pNext( pNext_ )
|
|
, stage( stage_ )
|
|
, pCheckpointMarker( pCheckpointMarker_ )
|
|
{
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR CheckpointData2NV( CheckpointData2NV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
CheckpointData2NV( VkCheckpointData2NV const & rhs ) VULKAN_HPP_NOEXCEPT : CheckpointData2NV( *reinterpret_cast<CheckpointData2NV const *>( &rhs ) ) {}
|
|
|
|
CheckpointData2NV & operator=( CheckpointData2NV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
CheckpointData2NV & operator=( VkCheckpointData2NV const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::CheckpointData2NV const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
operator VkCheckpointData2NV const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkCheckpointData2NV *>( this );
|
|
}
|
|
|
|
operator VkCheckpointData2NV &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkCheckpointData2NV *>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
# if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
# else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::PipelineStageFlags2 const &, void * const &>
|
|
# endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( sType, pNext, stage, pCheckpointMarker );
|
|
}
|
|
#endif
|
|
|
|
#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
|
|
auto operator<=>( CheckpointData2NV const & ) const = default;
|
|
#else
|
|
bool operator==( CheckpointData2NV const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
# if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
# else
|
|
return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( stage == rhs.stage ) && ( pCheckpointMarker == rhs.pCheckpointMarker );
|
|
# endif
|
|
}
|
|
|
|
bool operator!=( CheckpointData2NV const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eCheckpointData2NV;
|
|
void * pNext = {};
|
|
VULKAN_HPP_NAMESPACE::PipelineStageFlags2 stage = {};
|
|
void * pCheckpointMarker = {};
|
|
};
|
|
|
|
template <>
|
|
struct CppType<StructureType, StructureType::eCheckpointData2NV>
|
|
{
|
|
using Type = CheckpointData2NV;
|
|
};
|
|
|
|
union ClearColorValue
|
|
{
|
|
using NativeType = VkClearColorValue;
|
|
#if !defined( VULKAN_HPP_NO_UNION_CONSTRUCTORS )
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 ClearColorValue( const std::array<float, 4> & float32_ = {} ) : float32( float32_ ) {}
|
|
|
|
VULKAN_HPP_CONSTEXPR ClearColorValue( float float32_0, float float32_1, float float32_2, float float32_3 )
|
|
: float32{ { { float32_0, float32_1, float32_2, float32_3 } } }
|
|
{
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 ClearColorValue( const std::array<int32_t, 4> & int32_ ) : int32( int32_ ) {}
|
|
|
|
VULKAN_HPP_CONSTEXPR ClearColorValue( int32_t int32_0, int32_t int32_1, int32_t int32_2, int32_t int32_3 )
|
|
: int32{ { { int32_0, int32_1, int32_2, int32_3 } } }
|
|
{
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 ClearColorValue( const std::array<uint32_t, 4> & uint32_ ) : uint32( uint32_ ) {}
|
|
|
|
VULKAN_HPP_CONSTEXPR ClearColorValue( uint32_t uint32_0, uint32_t uint32_1, uint32_t uint32_2, uint32_t uint32_3 )
|
|
: uint32{ { { uint32_0, uint32_1, uint32_2, uint32_3 } } }
|
|
{
|
|
}
|
|
#endif /*VULKAN_HPP_NO_UNION_CONSTRUCTORS*/
|
|
|
|
#if !defined( VULKAN_HPP_NO_UNION_SETTERS )
|
|
VULKAN_HPP_CONSTEXPR_14 ClearColorValue & setFloat32( std::array<float, 4> float32_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
float32 = float32_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 ClearColorValue & setInt32( std::array<int32_t, 4> int32_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
int32 = int32_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 ClearColorValue & setUint32( std::array<uint32_t, 4> uint32_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
uint32 = uint32_;
|
|
return *this;
|
|
}
|
|
#endif /*VULKAN_HPP_NO_UNION_SETTERS*/
|
|
|
|
operator VkClearColorValue const &() const
|
|
{
|
|
return *reinterpret_cast<const VkClearColorValue *>( this );
|
|
}
|
|
|
|
operator VkClearColorValue &()
|
|
{
|
|
return *reinterpret_cast<VkClearColorValue *>( this );
|
|
}
|
|
|
|
VULKAN_HPP_NAMESPACE::ArrayWrapper1D<float, 4> float32;
|
|
VULKAN_HPP_NAMESPACE::ArrayWrapper1D<int32_t, 4> int32;
|
|
VULKAN_HPP_NAMESPACE::ArrayWrapper1D<uint32_t, 4> uint32;
|
|
};
|
|
|
|
struct ClearDepthStencilValue
|
|
{
|
|
using NativeType = VkClearDepthStencilValue;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR ClearDepthStencilValue( float depth_ = {}, uint32_t stencil_ = {} ) VULKAN_HPP_NOEXCEPT
|
|
: depth( depth_ )
|
|
, stencil( stencil_ )
|
|
{
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR ClearDepthStencilValue( ClearDepthStencilValue const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
ClearDepthStencilValue( VkClearDepthStencilValue const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: ClearDepthStencilValue( *reinterpret_cast<ClearDepthStencilValue const *>( &rhs ) )
|
|
{
|
|
}
|
|
|
|
ClearDepthStencilValue & operator=( ClearDepthStencilValue const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
ClearDepthStencilValue & operator=( VkClearDepthStencilValue const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ClearDepthStencilValue const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
|
|
VULKAN_HPP_CONSTEXPR_14 ClearDepthStencilValue & setDepth( float depth_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
depth = depth_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 ClearDepthStencilValue & setStencil( uint32_t stencil_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
stencil = stencil_;
|
|
return *this;
|
|
}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
|
|
|
|
operator VkClearDepthStencilValue const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkClearDepthStencilValue *>( this );
|
|
}
|
|
|
|
operator VkClearDepthStencilValue &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkClearDepthStencilValue *>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
# if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
# else
|
|
std::tuple<float const &, uint32_t const &>
|
|
# endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( depth, stencil );
|
|
}
|
|
#endif
|
|
|
|
#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
|
|
auto operator<=>( ClearDepthStencilValue const & ) const = default;
|
|
#else
|
|
bool operator==( ClearDepthStencilValue const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
# if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
# else
|
|
return ( depth == rhs.depth ) && ( stencil == rhs.stencil );
|
|
# endif
|
|
}
|
|
|
|
bool operator!=( ClearDepthStencilValue const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
float depth = {};
|
|
uint32_t stencil = {};
|
|
};
|
|
|
|
union ClearValue
|
|
{
|
|
using NativeType = VkClearValue;
|
|
#if !defined( VULKAN_HPP_NO_UNION_CONSTRUCTORS )
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 ClearValue( VULKAN_HPP_NAMESPACE::ClearColorValue color_ = {} ) : color( color_ ) {}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 ClearValue( VULKAN_HPP_NAMESPACE::ClearDepthStencilValue depthStencil_ ) : depthStencil( depthStencil_ ) {}
|
|
#endif /*VULKAN_HPP_NO_UNION_CONSTRUCTORS*/
|
|
|
|
#if !defined( VULKAN_HPP_NO_UNION_SETTERS )
|
|
VULKAN_HPP_CONSTEXPR_14 ClearValue & setColor( VULKAN_HPP_NAMESPACE::ClearColorValue const & color_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
color = color_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 ClearValue & setDepthStencil( VULKAN_HPP_NAMESPACE::ClearDepthStencilValue const & depthStencil_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
depthStencil = depthStencil_;
|
|
return *this;
|
|
}
|
|
#endif /*VULKAN_HPP_NO_UNION_SETTERS*/
|
|
|
|
operator VkClearValue const &() const
|
|
{
|
|
return *reinterpret_cast<const VkClearValue *>( this );
|
|
}
|
|
|
|
operator VkClearValue &()
|
|
{
|
|
return *reinterpret_cast<VkClearValue *>( this );
|
|
}
|
|
|
|
#ifdef VULKAN_HPP_HAS_UNRESTRICTED_UNIONS
|
|
VULKAN_HPP_NAMESPACE::ClearColorValue color;
|
|
VULKAN_HPP_NAMESPACE::ClearDepthStencilValue depthStencil;
|
|
#else
|
|
VkClearColorValue color;
|
|
VkClearDepthStencilValue depthStencil;
|
|
#endif /*VULKAN_HPP_HAS_UNRESTRICTED_UNIONS*/
|
|
};
|
|
|
|
struct ClearAttachment
|
|
{
|
|
using NativeType = VkClearAttachment;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR_14 ClearAttachment( VULKAN_HPP_NAMESPACE::ImageAspectFlags aspectMask_ = {},
|
|
uint32_t colorAttachment_ = {},
|
|
VULKAN_HPP_NAMESPACE::ClearValue clearValue_ = {} ) VULKAN_HPP_NOEXCEPT
|
|
: aspectMask( aspectMask_ )
|
|
, colorAttachment( colorAttachment_ )
|
|
, clearValue( clearValue_ )
|
|
{
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 ClearAttachment( ClearAttachment const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
ClearAttachment( VkClearAttachment const & rhs ) VULKAN_HPP_NOEXCEPT : ClearAttachment( *reinterpret_cast<ClearAttachment const *>( &rhs ) ) {}
|
|
|
|
ClearAttachment & operator=( ClearAttachment const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
ClearAttachment & operator=( VkClearAttachment const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ClearAttachment const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
|
|
VULKAN_HPP_CONSTEXPR_14 ClearAttachment & setAspectMask( VULKAN_HPP_NAMESPACE::ImageAspectFlags aspectMask_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
aspectMask = aspectMask_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 ClearAttachment & setColorAttachment( uint32_t colorAttachment_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
colorAttachment = colorAttachment_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 ClearAttachment & setClearValue( VULKAN_HPP_NAMESPACE::ClearValue const & clearValue_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
clearValue = clearValue_;
|
|
return *this;
|
|
}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
|
|
|
|
operator VkClearAttachment const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkClearAttachment *>( this );
|
|
}
|
|
|
|
operator VkClearAttachment &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkClearAttachment *>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
# if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
# else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::ImageAspectFlags const &, uint32_t const &, VULKAN_HPP_NAMESPACE::ClearValue const &>
|
|
# endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( aspectMask, colorAttachment, clearValue );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::ImageAspectFlags aspectMask = {};
|
|
uint32_t colorAttachment = {};
|
|
VULKAN_HPP_NAMESPACE::ClearValue clearValue = {};
|
|
};
|
|
|
|
struct ClearRect
|
|
{
|
|
using NativeType = VkClearRect;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR ClearRect( VULKAN_HPP_NAMESPACE::Rect2D rect_ = {}, uint32_t baseArrayLayer_ = {}, uint32_t layerCount_ = {} ) VULKAN_HPP_NOEXCEPT
|
|
: rect( rect_ )
|
|
, baseArrayLayer( baseArrayLayer_ )
|
|
, layerCount( layerCount_ )
|
|
{
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR ClearRect( ClearRect const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
ClearRect( VkClearRect const & rhs ) VULKAN_HPP_NOEXCEPT : ClearRect( *reinterpret_cast<ClearRect const *>( &rhs ) ) {}
|
|
|
|
ClearRect & operator=( ClearRect const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
ClearRect & operator=( VkClearRect const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ClearRect const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
|
|
VULKAN_HPP_CONSTEXPR_14 ClearRect & setRect( VULKAN_HPP_NAMESPACE::Rect2D const & rect_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
rect = rect_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 ClearRect & setBaseArrayLayer( uint32_t baseArrayLayer_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
baseArrayLayer = baseArrayLayer_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 ClearRect & setLayerCount( uint32_t layerCount_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
layerCount = layerCount_;
|
|
return *this;
|
|
}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
|
|
|
|
operator VkClearRect const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkClearRect *>( this );
|
|
}
|
|
|
|
operator VkClearRect &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkClearRect *>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
# if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
# else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::Rect2D const &, uint32_t const &, uint32_t const &>
|
|
# endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( rect, baseArrayLayer, layerCount );
|
|
}
|
|
#endif
|
|
|
|
#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
|
|
auto operator<=>( ClearRect const & ) const = default;
|
|
#else
|
|
bool operator==( ClearRect const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
# if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
# else
|
|
return ( rect == rhs.rect ) && ( baseArrayLayer == rhs.baseArrayLayer ) && ( layerCount == rhs.layerCount );
|
|
# endif
|
|
}
|
|
|
|
bool operator!=( ClearRect const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::Rect2D rect = {};
|
|
uint32_t baseArrayLayer = {};
|
|
uint32_t layerCount = {};
|
|
};
|
|
|
|
struct CommandBufferAllocateInfo
|
|
{
|
|
using NativeType = VkCommandBufferAllocateInfo;
|
|
|
|
static const bool allowDuplicate = false;
|
|
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eCommandBufferAllocateInfo;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR CommandBufferAllocateInfo( VULKAN_HPP_NAMESPACE::CommandPool commandPool_ = {},
|
|
VULKAN_HPP_NAMESPACE::CommandBufferLevel level_ = VULKAN_HPP_NAMESPACE::CommandBufferLevel::ePrimary,
|
|
uint32_t commandBufferCount_ = {},
|
|
const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
|
|
: pNext( pNext_ )
|
|
, commandPool( commandPool_ )
|
|
, level( level_ )
|
|
, commandBufferCount( commandBufferCount_ )
|
|
{
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR CommandBufferAllocateInfo( CommandBufferAllocateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
CommandBufferAllocateInfo( VkCommandBufferAllocateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: CommandBufferAllocateInfo( *reinterpret_cast<CommandBufferAllocateInfo const *>( &rhs ) )
|
|
{
|
|
}
|
|
|
|
CommandBufferAllocateInfo & operator=( CommandBufferAllocateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
CommandBufferAllocateInfo & operator=( VkCommandBufferAllocateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::CommandBufferAllocateInfo const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
|
|
VULKAN_HPP_CONSTEXPR_14 CommandBufferAllocateInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pNext = pNext_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 CommandBufferAllocateInfo & setCommandPool( VULKAN_HPP_NAMESPACE::CommandPool commandPool_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
commandPool = commandPool_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 CommandBufferAllocateInfo & setLevel( VULKAN_HPP_NAMESPACE::CommandBufferLevel level_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
level = level_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 CommandBufferAllocateInfo & setCommandBufferCount( uint32_t commandBufferCount_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
commandBufferCount = commandBufferCount_;
|
|
return *this;
|
|
}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
|
|
|
|
operator VkCommandBufferAllocateInfo const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkCommandBufferAllocateInfo *>( this );
|
|
}
|
|
|
|
operator VkCommandBufferAllocateInfo &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkCommandBufferAllocateInfo *>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
# if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
# else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &,
|
|
const void * const &,
|
|
VULKAN_HPP_NAMESPACE::CommandPool const &,
|
|
VULKAN_HPP_NAMESPACE::CommandBufferLevel const &,
|
|
uint32_t const &>
|
|
# endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( sType, pNext, commandPool, level, commandBufferCount );
|
|
}
|
|
#endif
|
|
|
|
#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
|
|
auto operator<=>( CommandBufferAllocateInfo const & ) const = default;
|
|
#else
|
|
bool operator==( CommandBufferAllocateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
# if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
# else
|
|
return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( commandPool == rhs.commandPool ) && ( level == rhs.level ) &&
|
|
( commandBufferCount == rhs.commandBufferCount );
|
|
# endif
|
|
}
|
|
|
|
bool operator!=( CommandBufferAllocateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eCommandBufferAllocateInfo;
|
|
const void * pNext = {};
|
|
VULKAN_HPP_NAMESPACE::CommandPool commandPool = {};
|
|
VULKAN_HPP_NAMESPACE::CommandBufferLevel level = VULKAN_HPP_NAMESPACE::CommandBufferLevel::ePrimary;
|
|
uint32_t commandBufferCount = {};
|
|
};
|
|
|
|
template <>
|
|
struct CppType<StructureType, StructureType::eCommandBufferAllocateInfo>
|
|
{
|
|
using Type = CommandBufferAllocateInfo;
|
|
};
|
|
|
|
struct CommandBufferInheritanceInfo
|
|
{
|
|
using NativeType = VkCommandBufferInheritanceInfo;
|
|
|
|
static const bool allowDuplicate = false;
|
|
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eCommandBufferInheritanceInfo;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR CommandBufferInheritanceInfo( VULKAN_HPP_NAMESPACE::RenderPass renderPass_ = {},
|
|
uint32_t subpass_ = {},
|
|
VULKAN_HPP_NAMESPACE::Framebuffer framebuffer_ = {},
|
|
VULKAN_HPP_NAMESPACE::Bool32 occlusionQueryEnable_ = {},
|
|
VULKAN_HPP_NAMESPACE::QueryControlFlags queryFlags_ = {},
|
|
VULKAN_HPP_NAMESPACE::QueryPipelineStatisticFlags pipelineStatistics_ = {},
|
|
const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
|
|
: pNext( pNext_ )
|
|
, renderPass( renderPass_ )
|
|
, subpass( subpass_ )
|
|
, framebuffer( framebuffer_ )
|
|
, occlusionQueryEnable( occlusionQueryEnable_ )
|
|
, queryFlags( queryFlags_ )
|
|
, pipelineStatistics( pipelineStatistics_ )
|
|
{
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR CommandBufferInheritanceInfo( CommandBufferInheritanceInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
CommandBufferInheritanceInfo( VkCommandBufferInheritanceInfo const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: CommandBufferInheritanceInfo( *reinterpret_cast<CommandBufferInheritanceInfo const *>( &rhs ) )
|
|
{
|
|
}
|
|
|
|
CommandBufferInheritanceInfo & operator=( CommandBufferInheritanceInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
CommandBufferInheritanceInfo & operator=( VkCommandBufferInheritanceInfo const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::CommandBufferInheritanceInfo const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
|
|
VULKAN_HPP_CONSTEXPR_14 CommandBufferInheritanceInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pNext = pNext_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 CommandBufferInheritanceInfo & setRenderPass( VULKAN_HPP_NAMESPACE::RenderPass renderPass_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
renderPass = renderPass_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 CommandBufferInheritanceInfo & setSubpass( uint32_t subpass_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
subpass = subpass_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 CommandBufferInheritanceInfo & setFramebuffer( VULKAN_HPP_NAMESPACE::Framebuffer framebuffer_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
framebuffer = framebuffer_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 CommandBufferInheritanceInfo & setOcclusionQueryEnable( VULKAN_HPP_NAMESPACE::Bool32 occlusionQueryEnable_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
occlusionQueryEnable = occlusionQueryEnable_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 CommandBufferInheritanceInfo & setQueryFlags( VULKAN_HPP_NAMESPACE::QueryControlFlags queryFlags_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
queryFlags = queryFlags_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 CommandBufferInheritanceInfo &
|
|
setPipelineStatistics( VULKAN_HPP_NAMESPACE::QueryPipelineStatisticFlags pipelineStatistics_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pipelineStatistics = pipelineStatistics_;
|
|
return *this;
|
|
}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
|
|
|
|
operator VkCommandBufferInheritanceInfo const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkCommandBufferInheritanceInfo *>( this );
|
|
}
|
|
|
|
operator VkCommandBufferInheritanceInfo &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkCommandBufferInheritanceInfo *>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
# if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
# else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &,
|
|
const void * const &,
|
|
VULKAN_HPP_NAMESPACE::RenderPass const &,
|
|
uint32_t const &,
|
|
VULKAN_HPP_NAMESPACE::Framebuffer const &,
|
|
VULKAN_HPP_NAMESPACE::Bool32 const &,
|
|
VULKAN_HPP_NAMESPACE::QueryControlFlags const &,
|
|
VULKAN_HPP_NAMESPACE::QueryPipelineStatisticFlags const &>
|
|
# endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( sType, pNext, renderPass, subpass, framebuffer, occlusionQueryEnable, queryFlags, pipelineStatistics );
|
|
}
|
|
#endif
|
|
|
|
#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
|
|
auto operator<=>( CommandBufferInheritanceInfo const & ) const = default;
|
|
#else
|
|
bool operator==( CommandBufferInheritanceInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
# if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
# else
|
|
return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( renderPass == rhs.renderPass ) && ( subpass == rhs.subpass ) &&
|
|
( framebuffer == rhs.framebuffer ) && ( occlusionQueryEnable == rhs.occlusionQueryEnable ) && ( queryFlags == rhs.queryFlags ) &&
|
|
( pipelineStatistics == rhs.pipelineStatistics );
|
|
# endif
|
|
}
|
|
|
|
bool operator!=( CommandBufferInheritanceInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eCommandBufferInheritanceInfo;
|
|
const void * pNext = {};
|
|
VULKAN_HPP_NAMESPACE::RenderPass renderPass = {};
|
|
uint32_t subpass = {};
|
|
VULKAN_HPP_NAMESPACE::Framebuffer framebuffer = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 occlusionQueryEnable = {};
|
|
VULKAN_HPP_NAMESPACE::QueryControlFlags queryFlags = {};
|
|
VULKAN_HPP_NAMESPACE::QueryPipelineStatisticFlags pipelineStatistics = {};
|
|
};
|
|
|
|
template <>
|
|
struct CppType<StructureType, StructureType::eCommandBufferInheritanceInfo>
|
|
{
|
|
using Type = CommandBufferInheritanceInfo;
|
|
};
|
|
|
|
struct CommandBufferBeginInfo
|
|
{
|
|
using NativeType = VkCommandBufferBeginInfo;
|
|
|
|
static const bool allowDuplicate = false;
|
|
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eCommandBufferBeginInfo;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR CommandBufferBeginInfo( VULKAN_HPP_NAMESPACE::CommandBufferUsageFlags flags_ = {},
|
|
const VULKAN_HPP_NAMESPACE::CommandBufferInheritanceInfo * pInheritanceInfo_ = {},
|
|
const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
|
|
: pNext( pNext_ )
|
|
, flags( flags_ )
|
|
, pInheritanceInfo( pInheritanceInfo_ )
|
|
{
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR CommandBufferBeginInfo( CommandBufferBeginInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
CommandBufferBeginInfo( VkCommandBufferBeginInfo const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: CommandBufferBeginInfo( *reinterpret_cast<CommandBufferBeginInfo const *>( &rhs ) )
|
|
{
|
|
}
|
|
|
|
CommandBufferBeginInfo & operator=( CommandBufferBeginInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
CommandBufferBeginInfo & operator=( VkCommandBufferBeginInfo const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::CommandBufferBeginInfo const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
|
|
VULKAN_HPP_CONSTEXPR_14 CommandBufferBeginInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pNext = pNext_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 CommandBufferBeginInfo & setFlags( VULKAN_HPP_NAMESPACE::CommandBufferUsageFlags flags_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
flags = flags_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 CommandBufferBeginInfo &
|
|
setPInheritanceInfo( const VULKAN_HPP_NAMESPACE::CommandBufferInheritanceInfo * pInheritanceInfo_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pInheritanceInfo = pInheritanceInfo_;
|
|
return *this;
|
|
}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
|
|
|
|
operator VkCommandBufferBeginInfo const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkCommandBufferBeginInfo *>( this );
|
|
}
|
|
|
|
operator VkCommandBufferBeginInfo &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkCommandBufferBeginInfo *>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
# if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
# else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &,
|
|
const void * const &,
|
|
VULKAN_HPP_NAMESPACE::CommandBufferUsageFlags const &,
|
|
const VULKAN_HPP_NAMESPACE::CommandBufferInheritanceInfo * const &>
|
|
# endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( sType, pNext, flags, pInheritanceInfo );
|
|
}
|
|
#endif
|
|
|
|
#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
|
|
auto operator<=>( CommandBufferBeginInfo const & ) const = default;
|
|
#else
|
|
bool operator==( CommandBufferBeginInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
# if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
# else
|
|
return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && ( pInheritanceInfo == rhs.pInheritanceInfo );
|
|
# endif
|
|
}
|
|
|
|
bool operator!=( CommandBufferBeginInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eCommandBufferBeginInfo;
|
|
const void * pNext = {};
|
|
VULKAN_HPP_NAMESPACE::CommandBufferUsageFlags flags = {};
|
|
const VULKAN_HPP_NAMESPACE::CommandBufferInheritanceInfo * pInheritanceInfo = {};
|
|
};
|
|
|
|
template <>
|
|
struct CppType<StructureType, StructureType::eCommandBufferBeginInfo>
|
|
{
|
|
using Type = CommandBufferBeginInfo;
|
|
};
|
|
|
|
struct CommandBufferInheritanceRenderingInfo
|
|
{
|
|
using NativeType = VkCommandBufferInheritanceRenderingInfo;
|
|
|
|
static const bool allowDuplicate = false;
|
|
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eCommandBufferInheritanceRenderingInfo;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR
|
|
CommandBufferInheritanceRenderingInfo( VULKAN_HPP_NAMESPACE::RenderingFlags flags_ = {},
|
|
uint32_t viewMask_ = {},
|
|
uint32_t colorAttachmentCount_ = {},
|
|
const VULKAN_HPP_NAMESPACE::Format * pColorAttachmentFormats_ = {},
|
|
VULKAN_HPP_NAMESPACE::Format depthAttachmentFormat_ = VULKAN_HPP_NAMESPACE::Format::eUndefined,
|
|
VULKAN_HPP_NAMESPACE::Format stencilAttachmentFormat_ = VULKAN_HPP_NAMESPACE::Format::eUndefined,
|
|
VULKAN_HPP_NAMESPACE::SampleCountFlagBits rasterizationSamples_ = VULKAN_HPP_NAMESPACE::SampleCountFlagBits::e1,
|
|
const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
|
|
: pNext( pNext_ )
|
|
, flags( flags_ )
|
|
, viewMask( viewMask_ )
|
|
, colorAttachmentCount( colorAttachmentCount_ )
|
|
, pColorAttachmentFormats( pColorAttachmentFormats_ )
|
|
, depthAttachmentFormat( depthAttachmentFormat_ )
|
|
, stencilAttachmentFormat( stencilAttachmentFormat_ )
|
|
, rasterizationSamples( rasterizationSamples_ )
|
|
{
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR CommandBufferInheritanceRenderingInfo( CommandBufferInheritanceRenderingInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
CommandBufferInheritanceRenderingInfo( VkCommandBufferInheritanceRenderingInfo const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: CommandBufferInheritanceRenderingInfo( *reinterpret_cast<CommandBufferInheritanceRenderingInfo const *>( &rhs ) )
|
|
{
|
|
}
|
|
|
|
# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
|
|
CommandBufferInheritanceRenderingInfo( VULKAN_HPP_NAMESPACE::RenderingFlags flags_,
|
|
uint32_t viewMask_,
|
|
VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::Format> const & colorAttachmentFormats_,
|
|
VULKAN_HPP_NAMESPACE::Format depthAttachmentFormat_ = VULKAN_HPP_NAMESPACE::Format::eUndefined,
|
|
VULKAN_HPP_NAMESPACE::Format stencilAttachmentFormat_ = VULKAN_HPP_NAMESPACE::Format::eUndefined,
|
|
VULKAN_HPP_NAMESPACE::SampleCountFlagBits rasterizationSamples_ = VULKAN_HPP_NAMESPACE::SampleCountFlagBits::e1,
|
|
const void * pNext_ = nullptr )
|
|
: pNext( pNext_ )
|
|
, flags( flags_ )
|
|
, viewMask( viewMask_ )
|
|
, colorAttachmentCount( static_cast<uint32_t>( colorAttachmentFormats_.size() ) )
|
|
, pColorAttachmentFormats( colorAttachmentFormats_.data() )
|
|
, depthAttachmentFormat( depthAttachmentFormat_ )
|
|
, stencilAttachmentFormat( stencilAttachmentFormat_ )
|
|
, rasterizationSamples( rasterizationSamples_ )
|
|
{
|
|
}
|
|
# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
|
|
|
|
CommandBufferInheritanceRenderingInfo & operator=( CommandBufferInheritanceRenderingInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
CommandBufferInheritanceRenderingInfo & operator=( VkCommandBufferInheritanceRenderingInfo const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::CommandBufferInheritanceRenderingInfo const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
|
|
VULKAN_HPP_CONSTEXPR_14 CommandBufferInheritanceRenderingInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pNext = pNext_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 CommandBufferInheritanceRenderingInfo & setFlags( VULKAN_HPP_NAMESPACE::RenderingFlags flags_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
flags = flags_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 CommandBufferInheritanceRenderingInfo & setViewMask( uint32_t viewMask_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
viewMask = viewMask_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 CommandBufferInheritanceRenderingInfo & setColorAttachmentCount( uint32_t colorAttachmentCount_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
colorAttachmentCount = colorAttachmentCount_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 CommandBufferInheritanceRenderingInfo &
|
|
setPColorAttachmentFormats( const VULKAN_HPP_NAMESPACE::Format * pColorAttachmentFormats_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pColorAttachmentFormats = pColorAttachmentFormats_;
|
|
return *this;
|
|
}
|
|
|
|
# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
|
|
CommandBufferInheritanceRenderingInfo & setColorAttachmentFormats(
|
|
VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::Format> const & colorAttachmentFormats_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
colorAttachmentCount = static_cast<uint32_t>( colorAttachmentFormats_.size() );
|
|
pColorAttachmentFormats = colorAttachmentFormats_.data();
|
|
return *this;
|
|
}
|
|
# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 CommandBufferInheritanceRenderingInfo &
|
|
setDepthAttachmentFormat( VULKAN_HPP_NAMESPACE::Format depthAttachmentFormat_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
depthAttachmentFormat = depthAttachmentFormat_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 CommandBufferInheritanceRenderingInfo &
|
|
setStencilAttachmentFormat( VULKAN_HPP_NAMESPACE::Format stencilAttachmentFormat_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
stencilAttachmentFormat = stencilAttachmentFormat_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 CommandBufferInheritanceRenderingInfo &
|
|
setRasterizationSamples( VULKAN_HPP_NAMESPACE::SampleCountFlagBits rasterizationSamples_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
rasterizationSamples = rasterizationSamples_;
|
|
return *this;
|
|
}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
|
|
|
|
operator VkCommandBufferInheritanceRenderingInfo const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkCommandBufferInheritanceRenderingInfo *>( this );
|
|
}
|
|
|
|
operator VkCommandBufferInheritanceRenderingInfo &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkCommandBufferInheritanceRenderingInfo *>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
# if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
# else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &,
|
|
const void * const &,
|
|
VULKAN_HPP_NAMESPACE::RenderingFlags const &,
|
|
uint32_t const &,
|
|
uint32_t const &,
|
|
const VULKAN_HPP_NAMESPACE::Format * const &,
|
|
VULKAN_HPP_NAMESPACE::Format const &,
|
|
VULKAN_HPP_NAMESPACE::Format const &,
|
|
VULKAN_HPP_NAMESPACE::SampleCountFlagBits const &>
|
|
# endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie(
|
|
sType, pNext, flags, viewMask, colorAttachmentCount, pColorAttachmentFormats, depthAttachmentFormat, stencilAttachmentFormat, rasterizationSamples );
|
|
}
|
|
#endif
|
|
|
|
#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
|
|
auto operator<=>( CommandBufferInheritanceRenderingInfo const & ) const = default;
|
|
#else
|
|
bool operator==( CommandBufferInheritanceRenderingInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
# if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
# else
|
|
return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && ( viewMask == rhs.viewMask ) &&
|
|
( colorAttachmentCount == rhs.colorAttachmentCount ) && ( pColorAttachmentFormats == rhs.pColorAttachmentFormats ) &&
|
|
( depthAttachmentFormat == rhs.depthAttachmentFormat ) && ( stencilAttachmentFormat == rhs.stencilAttachmentFormat ) &&
|
|
( rasterizationSamples == rhs.rasterizationSamples );
|
|
# endif
|
|
}
|
|
|
|
bool operator!=( CommandBufferInheritanceRenderingInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eCommandBufferInheritanceRenderingInfo;
|
|
const void * pNext = {};
|
|
VULKAN_HPP_NAMESPACE::RenderingFlags flags = {};
|
|
uint32_t viewMask = {};
|
|
uint32_t colorAttachmentCount = {};
|
|
const VULKAN_HPP_NAMESPACE::Format * pColorAttachmentFormats = {};
|
|
VULKAN_HPP_NAMESPACE::Format depthAttachmentFormat = VULKAN_HPP_NAMESPACE::Format::eUndefined;
|
|
VULKAN_HPP_NAMESPACE::Format stencilAttachmentFormat = VULKAN_HPP_NAMESPACE::Format::eUndefined;
|
|
VULKAN_HPP_NAMESPACE::SampleCountFlagBits rasterizationSamples = VULKAN_HPP_NAMESPACE::SampleCountFlagBits::e1;
|
|
};
|
|
|
|
template <>
|
|
struct CppType<StructureType, StructureType::eCommandBufferInheritanceRenderingInfo>
|
|
{
|
|
using Type = CommandBufferInheritanceRenderingInfo;
|
|
};
|
|
|
|
using CommandBufferInheritanceRenderingInfoKHR = CommandBufferInheritanceRenderingInfo;
|
|
|
|
struct CommandBufferSubmitInfo
|
|
{
|
|
using NativeType = VkCommandBufferSubmitInfo;
|
|
|
|
static const bool allowDuplicate = false;
|
|
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eCommandBufferSubmitInfo;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR CommandBufferSubmitInfo( VULKAN_HPP_NAMESPACE::CommandBuffer commandBuffer_ = {},
|
|
uint32_t deviceMask_ = {},
|
|
const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
|
|
: pNext( pNext_ )
|
|
, commandBuffer( commandBuffer_ )
|
|
, deviceMask( deviceMask_ )
|
|
{
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR CommandBufferSubmitInfo( CommandBufferSubmitInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
CommandBufferSubmitInfo( VkCommandBufferSubmitInfo const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: CommandBufferSubmitInfo( *reinterpret_cast<CommandBufferSubmitInfo const *>( &rhs ) )
|
|
{
|
|
}
|
|
|
|
CommandBufferSubmitInfo & operator=( CommandBufferSubmitInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
CommandBufferSubmitInfo & operator=( VkCommandBufferSubmitInfo const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::CommandBufferSubmitInfo const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
|
|
VULKAN_HPP_CONSTEXPR_14 CommandBufferSubmitInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pNext = pNext_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 CommandBufferSubmitInfo & setCommandBuffer( VULKAN_HPP_NAMESPACE::CommandBuffer commandBuffer_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
commandBuffer = commandBuffer_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 CommandBufferSubmitInfo & setDeviceMask( uint32_t deviceMask_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
deviceMask = deviceMask_;
|
|
return *this;
|
|
}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
|
|
|
|
operator VkCommandBufferSubmitInfo const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkCommandBufferSubmitInfo *>( this );
|
|
}
|
|
|
|
operator VkCommandBufferSubmitInfo &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkCommandBufferSubmitInfo *>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
# if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
# else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::CommandBuffer const &, uint32_t const &>
|
|
# endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( sType, pNext, commandBuffer, deviceMask );
|
|
}
|
|
#endif
|
|
|
|
#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
|
|
auto operator<=>( CommandBufferSubmitInfo const & ) const = default;
|
|
#else
|
|
bool operator==( CommandBufferSubmitInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
# if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
# else
|
|
return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( commandBuffer == rhs.commandBuffer ) && ( deviceMask == rhs.deviceMask );
|
|
# endif
|
|
}
|
|
|
|
bool operator!=( CommandBufferSubmitInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eCommandBufferSubmitInfo;
|
|
const void * pNext = {};
|
|
VULKAN_HPP_NAMESPACE::CommandBuffer commandBuffer = {};
|
|
uint32_t deviceMask = {};
|
|
};
|
|
|
|
template <>
|
|
struct CppType<StructureType, StructureType::eCommandBufferSubmitInfo>
|
|
{
|
|
using Type = CommandBufferSubmitInfo;
|
|
};
|
|
|
|
using CommandBufferSubmitInfoKHR = CommandBufferSubmitInfo;
|
|
|
|
struct CommandPoolCreateInfo
|
|
{
|
|
using NativeType = VkCommandPoolCreateInfo;
|
|
|
|
static const bool allowDuplicate = false;
|
|
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eCommandPoolCreateInfo;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR CommandPoolCreateInfo( VULKAN_HPP_NAMESPACE::CommandPoolCreateFlags flags_ = {},
|
|
uint32_t queueFamilyIndex_ = {},
|
|
const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
|
|
: pNext( pNext_ )
|
|
, flags( flags_ )
|
|
, queueFamilyIndex( queueFamilyIndex_ )
|
|
{
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR CommandPoolCreateInfo( CommandPoolCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
CommandPoolCreateInfo( VkCommandPoolCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: CommandPoolCreateInfo( *reinterpret_cast<CommandPoolCreateInfo const *>( &rhs ) )
|
|
{
|
|
}
|
|
|
|
CommandPoolCreateInfo & operator=( CommandPoolCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
CommandPoolCreateInfo & operator=( VkCommandPoolCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::CommandPoolCreateInfo const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
|
|
VULKAN_HPP_CONSTEXPR_14 CommandPoolCreateInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pNext = pNext_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 CommandPoolCreateInfo & setFlags( VULKAN_HPP_NAMESPACE::CommandPoolCreateFlags flags_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
flags = flags_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 CommandPoolCreateInfo & setQueueFamilyIndex( uint32_t queueFamilyIndex_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
queueFamilyIndex = queueFamilyIndex_;
|
|
return *this;
|
|
}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
|
|
|
|
operator VkCommandPoolCreateInfo const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkCommandPoolCreateInfo *>( this );
|
|
}
|
|
|
|
operator VkCommandPoolCreateInfo &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkCommandPoolCreateInfo *>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
# if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
# else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::CommandPoolCreateFlags const &, uint32_t const &>
|
|
# endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( sType, pNext, flags, queueFamilyIndex );
|
|
}
|
|
#endif
|
|
|
|
#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
|
|
auto operator<=>( CommandPoolCreateInfo const & ) const = default;
|
|
#else
|
|
bool operator==( CommandPoolCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
# if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
# else
|
|
return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && ( queueFamilyIndex == rhs.queueFamilyIndex );
|
|
# endif
|
|
}
|
|
|
|
bool operator!=( CommandPoolCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eCommandPoolCreateInfo;
|
|
const void * pNext = {};
|
|
VULKAN_HPP_NAMESPACE::CommandPoolCreateFlags flags = {};
|
|
uint32_t queueFamilyIndex = {};
|
|
};
|
|
|
|
template <>
|
|
struct CppType<StructureType, StructureType::eCommandPoolCreateInfo>
|
|
{
|
|
using Type = CommandPoolCreateInfo;
|
|
};
|
|
|
|
struct CommandPoolMemoryConsumption
|
|
{
|
|
using NativeType = VkCommandPoolMemoryConsumption;
|
|
|
|
static const bool allowDuplicate = false;
|
|
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eCommandPoolMemoryConsumption;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR CommandPoolMemoryConsumption( VULKAN_HPP_NAMESPACE::DeviceSize commandPoolAllocated_ = {},
|
|
VULKAN_HPP_NAMESPACE::DeviceSize commandPoolReservedSize_ = {},
|
|
VULKAN_HPP_NAMESPACE::DeviceSize commandBufferAllocated_ = {},
|
|
void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
|
|
: pNext( pNext_ )
|
|
, commandPoolAllocated( commandPoolAllocated_ )
|
|
, commandPoolReservedSize( commandPoolReservedSize_ )
|
|
, commandBufferAllocated( commandBufferAllocated_ )
|
|
{
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR CommandPoolMemoryConsumption( CommandPoolMemoryConsumption const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
CommandPoolMemoryConsumption( VkCommandPoolMemoryConsumption const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: CommandPoolMemoryConsumption( *reinterpret_cast<CommandPoolMemoryConsumption const *>( &rhs ) )
|
|
{
|
|
}
|
|
|
|
CommandPoolMemoryConsumption & operator=( CommandPoolMemoryConsumption const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
CommandPoolMemoryConsumption & operator=( VkCommandPoolMemoryConsumption const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::CommandPoolMemoryConsumption const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
operator VkCommandPoolMemoryConsumption const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkCommandPoolMemoryConsumption *>( this );
|
|
}
|
|
|
|
operator VkCommandPoolMemoryConsumption &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkCommandPoolMemoryConsumption *>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
# if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
# else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &,
|
|
void * const &,
|
|
VULKAN_HPP_NAMESPACE::DeviceSize const &,
|
|
VULKAN_HPP_NAMESPACE::DeviceSize const &,
|
|
VULKAN_HPP_NAMESPACE::DeviceSize const &>
|
|
# endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( sType, pNext, commandPoolAllocated, commandPoolReservedSize, commandBufferAllocated );
|
|
}
|
|
#endif
|
|
|
|
#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
|
|
auto operator<=>( CommandPoolMemoryConsumption const & ) const = default;
|
|
#else
|
|
bool operator==( CommandPoolMemoryConsumption const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
# if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
# else
|
|
return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( commandPoolAllocated == rhs.commandPoolAllocated ) &&
|
|
( commandPoolReservedSize == rhs.commandPoolReservedSize ) && ( commandBufferAllocated == rhs.commandBufferAllocated );
|
|
# endif
|
|
}
|
|
|
|
bool operator!=( CommandPoolMemoryConsumption const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eCommandPoolMemoryConsumption;
|
|
void * pNext = {};
|
|
VULKAN_HPP_NAMESPACE::DeviceSize commandPoolAllocated = {};
|
|
VULKAN_HPP_NAMESPACE::DeviceSize commandPoolReservedSize = {};
|
|
VULKAN_HPP_NAMESPACE::DeviceSize commandBufferAllocated = {};
|
|
};
|
|
|
|
template <>
|
|
struct CppType<StructureType, StructureType::eCommandPoolMemoryConsumption>
|
|
{
|
|
using Type = CommandPoolMemoryConsumption;
|
|
};
|
|
|
|
struct CommandPoolMemoryReservationCreateInfo
|
|
{
|
|
using NativeType = VkCommandPoolMemoryReservationCreateInfo;
|
|
|
|
static const bool allowDuplicate = false;
|
|
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eCommandPoolMemoryReservationCreateInfo;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR CommandPoolMemoryReservationCreateInfo( VULKAN_HPP_NAMESPACE::DeviceSize commandPoolReservedSize_ = {},
|
|
uint32_t commandPoolMaxCommandBuffers_ = {},
|
|
const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
|
|
: pNext( pNext_ )
|
|
, commandPoolReservedSize( commandPoolReservedSize_ )
|
|
, commandPoolMaxCommandBuffers( commandPoolMaxCommandBuffers_ )
|
|
{
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR CommandPoolMemoryReservationCreateInfo( CommandPoolMemoryReservationCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
CommandPoolMemoryReservationCreateInfo( VkCommandPoolMemoryReservationCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: CommandPoolMemoryReservationCreateInfo( *reinterpret_cast<CommandPoolMemoryReservationCreateInfo const *>( &rhs ) )
|
|
{
|
|
}
|
|
|
|
CommandPoolMemoryReservationCreateInfo & operator=( CommandPoolMemoryReservationCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
CommandPoolMemoryReservationCreateInfo & operator=( VkCommandPoolMemoryReservationCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::CommandPoolMemoryReservationCreateInfo const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
|
|
VULKAN_HPP_CONSTEXPR_14 CommandPoolMemoryReservationCreateInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pNext = pNext_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 CommandPoolMemoryReservationCreateInfo &
|
|
setCommandPoolReservedSize( VULKAN_HPP_NAMESPACE::DeviceSize commandPoolReservedSize_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
commandPoolReservedSize = commandPoolReservedSize_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 CommandPoolMemoryReservationCreateInfo &
|
|
setCommandPoolMaxCommandBuffers( uint32_t commandPoolMaxCommandBuffers_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
commandPoolMaxCommandBuffers = commandPoolMaxCommandBuffers_;
|
|
return *this;
|
|
}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
|
|
|
|
operator VkCommandPoolMemoryReservationCreateInfo const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkCommandPoolMemoryReservationCreateInfo *>( this );
|
|
}
|
|
|
|
operator VkCommandPoolMemoryReservationCreateInfo &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkCommandPoolMemoryReservationCreateInfo *>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
# if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
# else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::DeviceSize const &, uint32_t const &>
|
|
# endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( sType, pNext, commandPoolReservedSize, commandPoolMaxCommandBuffers );
|
|
}
|
|
#endif
|
|
|
|
#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
|
|
auto operator<=>( CommandPoolMemoryReservationCreateInfo const & ) const = default;
|
|
#else
|
|
bool operator==( CommandPoolMemoryReservationCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
# if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
# else
|
|
return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( commandPoolReservedSize == rhs.commandPoolReservedSize ) &&
|
|
( commandPoolMaxCommandBuffers == rhs.commandPoolMaxCommandBuffers );
|
|
# endif
|
|
}
|
|
|
|
bool operator!=( CommandPoolMemoryReservationCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eCommandPoolMemoryReservationCreateInfo;
|
|
const void * pNext = {};
|
|
VULKAN_HPP_NAMESPACE::DeviceSize commandPoolReservedSize = {};
|
|
uint32_t commandPoolMaxCommandBuffers = {};
|
|
};
|
|
|
|
template <>
|
|
struct CppType<StructureType, StructureType::eCommandPoolMemoryReservationCreateInfo>
|
|
{
|
|
using Type = CommandPoolMemoryReservationCreateInfo;
|
|
};
|
|
|
|
struct ComponentMapping
|
|
{
|
|
using NativeType = VkComponentMapping;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR ComponentMapping( VULKAN_HPP_NAMESPACE::ComponentSwizzle r_ = VULKAN_HPP_NAMESPACE::ComponentSwizzle::eIdentity,
|
|
VULKAN_HPP_NAMESPACE::ComponentSwizzle g_ = VULKAN_HPP_NAMESPACE::ComponentSwizzle::eIdentity,
|
|
VULKAN_HPP_NAMESPACE::ComponentSwizzle b_ = VULKAN_HPP_NAMESPACE::ComponentSwizzle::eIdentity,
|
|
VULKAN_HPP_NAMESPACE::ComponentSwizzle a_ = VULKAN_HPP_NAMESPACE::ComponentSwizzle::eIdentity ) VULKAN_HPP_NOEXCEPT
|
|
: r( r_ )
|
|
, g( g_ )
|
|
, b( b_ )
|
|
, a( a_ )
|
|
{
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR ComponentMapping( ComponentMapping const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
ComponentMapping( VkComponentMapping const & rhs ) VULKAN_HPP_NOEXCEPT : ComponentMapping( *reinterpret_cast<ComponentMapping const *>( &rhs ) ) {}
|
|
|
|
ComponentMapping & operator=( ComponentMapping const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
ComponentMapping & operator=( VkComponentMapping const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ComponentMapping const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
|
|
VULKAN_HPP_CONSTEXPR_14 ComponentMapping & setR( VULKAN_HPP_NAMESPACE::ComponentSwizzle r_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
r = r_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 ComponentMapping & setG( VULKAN_HPP_NAMESPACE::ComponentSwizzle g_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
g = g_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 ComponentMapping & setB( VULKAN_HPP_NAMESPACE::ComponentSwizzle b_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
b = b_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 ComponentMapping & setA( VULKAN_HPP_NAMESPACE::ComponentSwizzle a_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
a = a_;
|
|
return *this;
|
|
}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
|
|
|
|
operator VkComponentMapping const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkComponentMapping *>( this );
|
|
}
|
|
|
|
operator VkComponentMapping &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkComponentMapping *>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
# if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
# else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::ComponentSwizzle const &,
|
|
VULKAN_HPP_NAMESPACE::ComponentSwizzle const &,
|
|
VULKAN_HPP_NAMESPACE::ComponentSwizzle const &,
|
|
VULKAN_HPP_NAMESPACE::ComponentSwizzle const &>
|
|
# endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( r, g, b, a );
|
|
}
|
|
#endif
|
|
|
|
#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
|
|
auto operator<=>( ComponentMapping const & ) const = default;
|
|
#else
|
|
bool operator==( ComponentMapping const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
# if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
# else
|
|
return ( r == rhs.r ) && ( g == rhs.g ) && ( b == rhs.b ) && ( a == rhs.a );
|
|
# endif
|
|
}
|
|
|
|
bool operator!=( ComponentMapping const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::ComponentSwizzle r = VULKAN_HPP_NAMESPACE::ComponentSwizzle::eIdentity;
|
|
VULKAN_HPP_NAMESPACE::ComponentSwizzle g = VULKAN_HPP_NAMESPACE::ComponentSwizzle::eIdentity;
|
|
VULKAN_HPP_NAMESPACE::ComponentSwizzle b = VULKAN_HPP_NAMESPACE::ComponentSwizzle::eIdentity;
|
|
VULKAN_HPP_NAMESPACE::ComponentSwizzle a = VULKAN_HPP_NAMESPACE::ComponentSwizzle::eIdentity;
|
|
};
|
|
|
|
struct SpecializationMapEntry
|
|
{
|
|
using NativeType = VkSpecializationMapEntry;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR SpecializationMapEntry( uint32_t constantID_ = {}, uint32_t offset_ = {}, size_t size_ = {} ) VULKAN_HPP_NOEXCEPT
|
|
: constantID( constantID_ )
|
|
, offset( offset_ )
|
|
, size( size_ )
|
|
{
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR SpecializationMapEntry( SpecializationMapEntry const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
SpecializationMapEntry( VkSpecializationMapEntry const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: SpecializationMapEntry( *reinterpret_cast<SpecializationMapEntry const *>( &rhs ) )
|
|
{
|
|
}
|
|
|
|
SpecializationMapEntry & operator=( SpecializationMapEntry const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
SpecializationMapEntry & operator=( VkSpecializationMapEntry const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::SpecializationMapEntry const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
|
|
VULKAN_HPP_CONSTEXPR_14 SpecializationMapEntry & setConstantID( uint32_t constantID_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
constantID = constantID_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 SpecializationMapEntry & setOffset( uint32_t offset_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
offset = offset_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 SpecializationMapEntry & setSize( size_t size_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
size = size_;
|
|
return *this;
|
|
}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
|
|
|
|
operator VkSpecializationMapEntry const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkSpecializationMapEntry *>( this );
|
|
}
|
|
|
|
operator VkSpecializationMapEntry &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkSpecializationMapEntry *>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
# if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
# else
|
|
std::tuple<uint32_t const &, uint32_t const &, size_t const &>
|
|
# endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( constantID, offset, size );
|
|
}
|
|
#endif
|
|
|
|
#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
|
|
auto operator<=>( SpecializationMapEntry const & ) const = default;
|
|
#else
|
|
bool operator==( SpecializationMapEntry const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
# if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
# else
|
|
return ( constantID == rhs.constantID ) && ( offset == rhs.offset ) && ( size == rhs.size );
|
|
# endif
|
|
}
|
|
|
|
bool operator!=( SpecializationMapEntry const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
uint32_t constantID = {};
|
|
uint32_t offset = {};
|
|
size_t size = {};
|
|
};
|
|
|
|
struct SpecializationInfo
|
|
{
|
|
using NativeType = VkSpecializationInfo;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR SpecializationInfo( uint32_t mapEntryCount_ = {},
|
|
const VULKAN_HPP_NAMESPACE::SpecializationMapEntry * pMapEntries_ = {},
|
|
size_t dataSize_ = {},
|
|
const void * pData_ = {} ) VULKAN_HPP_NOEXCEPT
|
|
: mapEntryCount( mapEntryCount_ )
|
|
, pMapEntries( pMapEntries_ )
|
|
, dataSize( dataSize_ )
|
|
, pData( pData_ )
|
|
{
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR SpecializationInfo( SpecializationInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
SpecializationInfo( VkSpecializationInfo const & rhs ) VULKAN_HPP_NOEXCEPT : SpecializationInfo( *reinterpret_cast<SpecializationInfo const *>( &rhs ) ) {}
|
|
|
|
# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
|
|
template <typename T>
|
|
SpecializationInfo( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::SpecializationMapEntry> const & mapEntries_,
|
|
VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const T> const & data_ = {} )
|
|
: mapEntryCount( static_cast<uint32_t>( mapEntries_.size() ) )
|
|
, pMapEntries( mapEntries_.data() )
|
|
, dataSize( data_.size() * sizeof( T ) )
|
|
, pData( data_.data() )
|
|
{
|
|
}
|
|
# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
|
|
|
|
SpecializationInfo & operator=( SpecializationInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
SpecializationInfo & operator=( VkSpecializationInfo const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::SpecializationInfo const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
|
|
VULKAN_HPP_CONSTEXPR_14 SpecializationInfo & setMapEntryCount( uint32_t mapEntryCount_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
mapEntryCount = mapEntryCount_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 SpecializationInfo & setPMapEntries( const VULKAN_HPP_NAMESPACE::SpecializationMapEntry * pMapEntries_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pMapEntries = pMapEntries_;
|
|
return *this;
|
|
}
|
|
|
|
# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
|
|
SpecializationInfo &
|
|
setMapEntries( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::SpecializationMapEntry> const & mapEntries_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
mapEntryCount = static_cast<uint32_t>( mapEntries_.size() );
|
|
pMapEntries = mapEntries_.data();
|
|
return *this;
|
|
}
|
|
# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 SpecializationInfo & setDataSize( size_t dataSize_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
dataSize = dataSize_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 SpecializationInfo & setPData( const void * pData_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pData = pData_;
|
|
return *this;
|
|
}
|
|
|
|
# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
|
|
template <typename T>
|
|
SpecializationInfo & setData( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const T> const & data_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
dataSize = data_.size() * sizeof( T );
|
|
pData = data_.data();
|
|
return *this;
|
|
}
|
|
# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
|
|
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
|
|
|
|
operator VkSpecializationInfo const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkSpecializationInfo *>( this );
|
|
}
|
|
|
|
operator VkSpecializationInfo &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkSpecializationInfo *>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
# if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
# else
|
|
std::tuple<uint32_t const &, const VULKAN_HPP_NAMESPACE::SpecializationMapEntry * const &, size_t const &, const void * const &>
|
|
# endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( mapEntryCount, pMapEntries, dataSize, pData );
|
|
}
|
|
#endif
|
|
|
|
#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
|
|
auto operator<=>( SpecializationInfo const & ) const = default;
|
|
#else
|
|
bool operator==( SpecializationInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
# if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
# else
|
|
return ( mapEntryCount == rhs.mapEntryCount ) && ( pMapEntries == rhs.pMapEntries ) && ( dataSize == rhs.dataSize ) && ( pData == rhs.pData );
|
|
# endif
|
|
}
|
|
|
|
bool operator!=( SpecializationInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
uint32_t mapEntryCount = {};
|
|
const VULKAN_HPP_NAMESPACE::SpecializationMapEntry * pMapEntries = {};
|
|
size_t dataSize = {};
|
|
const void * pData = {};
|
|
};
|
|
|
|
struct PipelineShaderStageCreateInfo
|
|
{
|
|
using NativeType = VkPipelineShaderStageCreateInfo;
|
|
|
|
static const bool allowDuplicate = false;
|
|
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePipelineShaderStageCreateInfo;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR PipelineShaderStageCreateInfo( VULKAN_HPP_NAMESPACE::PipelineShaderStageCreateFlags flags_ = {},
|
|
VULKAN_HPP_NAMESPACE::ShaderStageFlagBits stage_ = VULKAN_HPP_NAMESPACE::ShaderStageFlagBits::eVertex,
|
|
VULKAN_HPP_NAMESPACE::ShaderModule module_ = {},
|
|
const char * pName_ = {},
|
|
const VULKAN_HPP_NAMESPACE::SpecializationInfo * pSpecializationInfo_ = {},
|
|
const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
|
|
: pNext( pNext_ )
|
|
, flags( flags_ )
|
|
, stage( stage_ )
|
|
, module( module_ )
|
|
, pName( pName_ )
|
|
, pSpecializationInfo( pSpecializationInfo_ )
|
|
{
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR PipelineShaderStageCreateInfo( PipelineShaderStageCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
PipelineShaderStageCreateInfo( VkPipelineShaderStageCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: PipelineShaderStageCreateInfo( *reinterpret_cast<PipelineShaderStageCreateInfo const *>( &rhs ) )
|
|
{
|
|
}
|
|
|
|
PipelineShaderStageCreateInfo & operator=( PipelineShaderStageCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
PipelineShaderStageCreateInfo & operator=( VkPipelineShaderStageCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PipelineShaderStageCreateInfo const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
|
|
VULKAN_HPP_CONSTEXPR_14 PipelineShaderStageCreateInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pNext = pNext_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PipelineShaderStageCreateInfo & setFlags( VULKAN_HPP_NAMESPACE::PipelineShaderStageCreateFlags flags_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
flags = flags_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PipelineShaderStageCreateInfo & setStage( VULKAN_HPP_NAMESPACE::ShaderStageFlagBits stage_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
stage = stage_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PipelineShaderStageCreateInfo & setModule( VULKAN_HPP_NAMESPACE::ShaderModule module_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
module = module_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PipelineShaderStageCreateInfo & setPName( const char * pName_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pName = pName_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PipelineShaderStageCreateInfo &
|
|
setPSpecializationInfo( const VULKAN_HPP_NAMESPACE::SpecializationInfo * pSpecializationInfo_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pSpecializationInfo = pSpecializationInfo_;
|
|
return *this;
|
|
}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
|
|
|
|
operator VkPipelineShaderStageCreateInfo const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkPipelineShaderStageCreateInfo *>( this );
|
|
}
|
|
|
|
operator VkPipelineShaderStageCreateInfo &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkPipelineShaderStageCreateInfo *>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
# if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
# else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &,
|
|
const void * const &,
|
|
VULKAN_HPP_NAMESPACE::PipelineShaderStageCreateFlags const &,
|
|
VULKAN_HPP_NAMESPACE::ShaderStageFlagBits const &,
|
|
VULKAN_HPP_NAMESPACE::ShaderModule const &,
|
|
const char * const &,
|
|
const VULKAN_HPP_NAMESPACE::SpecializationInfo * const &>
|
|
# endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( sType, pNext, flags, stage, module, pName, pSpecializationInfo );
|
|
}
|
|
#endif
|
|
|
|
#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
|
|
std::strong_ordering operator<=>( PipelineShaderStageCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
if ( auto cmp = sType <=> rhs.sType; cmp != 0 )
|
|
return cmp;
|
|
if ( auto cmp = pNext <=> rhs.pNext; cmp != 0 )
|
|
return cmp;
|
|
if ( auto cmp = flags <=> rhs.flags; cmp != 0 )
|
|
return cmp;
|
|
if ( auto cmp = stage <=> rhs.stage; cmp != 0 )
|
|
return cmp;
|
|
if ( auto cmp = module <=> rhs.module; cmp != 0 )
|
|
return cmp;
|
|
if ( pName != rhs.pName )
|
|
if ( auto cmp = strcmp( pName, rhs.pName ); cmp != 0 )
|
|
return ( cmp < 0 ) ? std::strong_ordering::less : std::strong_ordering::greater;
|
|
if ( auto cmp = pSpecializationInfo <=> rhs.pSpecializationInfo; cmp != 0 )
|
|
return cmp;
|
|
|
|
return std::strong_ordering::equivalent;
|
|
}
|
|
#endif
|
|
|
|
bool operator==( PipelineShaderStageCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && ( stage == rhs.stage ) && ( module == rhs.module ) &&
|
|
( ( pName == rhs.pName ) || ( strcmp( pName, rhs.pName ) == 0 ) ) && ( pSpecializationInfo == rhs.pSpecializationInfo );
|
|
}
|
|
|
|
bool operator!=( PipelineShaderStageCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineShaderStageCreateInfo;
|
|
const void * pNext = {};
|
|
VULKAN_HPP_NAMESPACE::PipelineShaderStageCreateFlags flags = {};
|
|
VULKAN_HPP_NAMESPACE::ShaderStageFlagBits stage = VULKAN_HPP_NAMESPACE::ShaderStageFlagBits::eVertex;
|
|
VULKAN_HPP_NAMESPACE::ShaderModule module = {};
|
|
const char * pName = {};
|
|
const VULKAN_HPP_NAMESPACE::SpecializationInfo * pSpecializationInfo = {};
|
|
};
|
|
|
|
template <>
|
|
struct CppType<StructureType, StructureType::ePipelineShaderStageCreateInfo>
|
|
{
|
|
using Type = PipelineShaderStageCreateInfo;
|
|
};
|
|
|
|
struct ComputePipelineCreateInfo
|
|
{
|
|
using NativeType = VkComputePipelineCreateInfo;
|
|
|
|
static const bool allowDuplicate = false;
|
|
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eComputePipelineCreateInfo;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR ComputePipelineCreateInfo( VULKAN_HPP_NAMESPACE::PipelineCreateFlags flags_ = {},
|
|
VULKAN_HPP_NAMESPACE::PipelineShaderStageCreateInfo stage_ = {},
|
|
VULKAN_HPP_NAMESPACE::PipelineLayout layout_ = {},
|
|
VULKAN_HPP_NAMESPACE::Pipeline basePipelineHandle_ = {},
|
|
int32_t basePipelineIndex_ = {},
|
|
const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
|
|
: pNext( pNext_ )
|
|
, flags( flags_ )
|
|
, stage( stage_ )
|
|
, layout( layout_ )
|
|
, basePipelineHandle( basePipelineHandle_ )
|
|
, basePipelineIndex( basePipelineIndex_ )
|
|
{
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR ComputePipelineCreateInfo( ComputePipelineCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
ComputePipelineCreateInfo( VkComputePipelineCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: ComputePipelineCreateInfo( *reinterpret_cast<ComputePipelineCreateInfo const *>( &rhs ) )
|
|
{
|
|
}
|
|
|
|
ComputePipelineCreateInfo & operator=( ComputePipelineCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
ComputePipelineCreateInfo & operator=( VkComputePipelineCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ComputePipelineCreateInfo const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
|
|
VULKAN_HPP_CONSTEXPR_14 ComputePipelineCreateInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pNext = pNext_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 ComputePipelineCreateInfo & setFlags( VULKAN_HPP_NAMESPACE::PipelineCreateFlags flags_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
flags = flags_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 ComputePipelineCreateInfo & setStage( VULKAN_HPP_NAMESPACE::PipelineShaderStageCreateInfo const & stage_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
stage = stage_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 ComputePipelineCreateInfo & setLayout( VULKAN_HPP_NAMESPACE::PipelineLayout layout_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
layout = layout_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 ComputePipelineCreateInfo & setBasePipelineHandle( VULKAN_HPP_NAMESPACE::Pipeline basePipelineHandle_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
basePipelineHandle = basePipelineHandle_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 ComputePipelineCreateInfo & setBasePipelineIndex( int32_t basePipelineIndex_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
basePipelineIndex = basePipelineIndex_;
|
|
return *this;
|
|
}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
|
|
|
|
operator VkComputePipelineCreateInfo const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkComputePipelineCreateInfo *>( this );
|
|
}
|
|
|
|
operator VkComputePipelineCreateInfo &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkComputePipelineCreateInfo *>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
# if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
# else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &,
|
|
const void * const &,
|
|
VULKAN_HPP_NAMESPACE::PipelineCreateFlags const &,
|
|
VULKAN_HPP_NAMESPACE::PipelineShaderStageCreateInfo const &,
|
|
VULKAN_HPP_NAMESPACE::PipelineLayout const &,
|
|
VULKAN_HPP_NAMESPACE::Pipeline const &,
|
|
int32_t const &>
|
|
# endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( sType, pNext, flags, stage, layout, basePipelineHandle, basePipelineIndex );
|
|
}
|
|
#endif
|
|
|
|
#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
|
|
auto operator<=>( ComputePipelineCreateInfo const & ) const = default;
|
|
#else
|
|
bool operator==( ComputePipelineCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
# if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
# else
|
|
return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && ( stage == rhs.stage ) && ( layout == rhs.layout ) &&
|
|
( basePipelineHandle == rhs.basePipelineHandle ) && ( basePipelineIndex == rhs.basePipelineIndex );
|
|
# endif
|
|
}
|
|
|
|
bool operator!=( ComputePipelineCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eComputePipelineCreateInfo;
|
|
const void * pNext = {};
|
|
VULKAN_HPP_NAMESPACE::PipelineCreateFlags flags = {};
|
|
VULKAN_HPP_NAMESPACE::PipelineShaderStageCreateInfo stage = {};
|
|
VULKAN_HPP_NAMESPACE::PipelineLayout layout = {};
|
|
VULKAN_HPP_NAMESPACE::Pipeline basePipelineHandle = {};
|
|
int32_t basePipelineIndex = {};
|
|
};
|
|
|
|
template <>
|
|
struct CppType<StructureType, StructureType::eComputePipelineCreateInfo>
|
|
{
|
|
using Type = ComputePipelineCreateInfo;
|
|
};
|
|
|
|
struct ConformanceVersion
|
|
{
|
|
using NativeType = VkConformanceVersion;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR ConformanceVersion( uint8_t major_ = {}, uint8_t minor_ = {}, uint8_t subminor_ = {}, uint8_t patch_ = {} ) VULKAN_HPP_NOEXCEPT
|
|
: major( major_ )
|
|
, minor( minor_ )
|
|
, subminor( subminor_ )
|
|
, patch( patch_ )
|
|
{
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR ConformanceVersion( ConformanceVersion const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
ConformanceVersion( VkConformanceVersion const & rhs ) VULKAN_HPP_NOEXCEPT : ConformanceVersion( *reinterpret_cast<ConformanceVersion const *>( &rhs ) ) {}
|
|
|
|
ConformanceVersion & operator=( ConformanceVersion const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
ConformanceVersion & operator=( VkConformanceVersion const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ConformanceVersion const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
|
|
VULKAN_HPP_CONSTEXPR_14 ConformanceVersion & setMajor( uint8_t major_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
major = major_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 ConformanceVersion & setMinor( uint8_t minor_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
minor = minor_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 ConformanceVersion & setSubminor( uint8_t subminor_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
subminor = subminor_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 ConformanceVersion & setPatch( uint8_t patch_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
patch = patch_;
|
|
return *this;
|
|
}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
|
|
|
|
operator VkConformanceVersion const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkConformanceVersion *>( this );
|
|
}
|
|
|
|
operator VkConformanceVersion &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkConformanceVersion *>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
# if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
# else
|
|
std::tuple<uint8_t const &, uint8_t const &, uint8_t const &, uint8_t const &>
|
|
# endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( major, minor, subminor, patch );
|
|
}
|
|
#endif
|
|
|
|
#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
|
|
auto operator<=>( ConformanceVersion const & ) const = default;
|
|
#else
|
|
bool operator==( ConformanceVersion const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
# if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
# else
|
|
return ( major == rhs.major ) && ( minor == rhs.minor ) && ( subminor == rhs.subminor ) && ( patch == rhs.patch );
|
|
# endif
|
|
}
|
|
|
|
bool operator!=( ConformanceVersion const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
uint8_t major = {};
|
|
uint8_t minor = {};
|
|
uint8_t subminor = {};
|
|
uint8_t patch = {};
|
|
};
|
|
|
|
using ConformanceVersionKHR = ConformanceVersion;
|
|
|
|
struct CopyBufferInfo2
|
|
{
|
|
using NativeType = VkCopyBufferInfo2;
|
|
|
|
static const bool allowDuplicate = false;
|
|
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eCopyBufferInfo2;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR CopyBufferInfo2( VULKAN_HPP_NAMESPACE::Buffer srcBuffer_ = {},
|
|
VULKAN_HPP_NAMESPACE::Buffer dstBuffer_ = {},
|
|
uint32_t regionCount_ = {},
|
|
const VULKAN_HPP_NAMESPACE::BufferCopy2 * pRegions_ = {},
|
|
const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
|
|
: pNext( pNext_ )
|
|
, srcBuffer( srcBuffer_ )
|
|
, dstBuffer( dstBuffer_ )
|
|
, regionCount( regionCount_ )
|
|
, pRegions( pRegions_ )
|
|
{
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR CopyBufferInfo2( CopyBufferInfo2 const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
CopyBufferInfo2( VkCopyBufferInfo2 const & rhs ) VULKAN_HPP_NOEXCEPT : CopyBufferInfo2( *reinterpret_cast<CopyBufferInfo2 const *>( &rhs ) ) {}
|
|
|
|
# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
|
|
CopyBufferInfo2( VULKAN_HPP_NAMESPACE::Buffer srcBuffer_,
|
|
VULKAN_HPP_NAMESPACE::Buffer dstBuffer_,
|
|
VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::BufferCopy2> const & regions_,
|
|
const void * pNext_ = nullptr )
|
|
: pNext( pNext_ ), srcBuffer( srcBuffer_ ), dstBuffer( dstBuffer_ ), regionCount( static_cast<uint32_t>( regions_.size() ) ), pRegions( regions_.data() )
|
|
{
|
|
}
|
|
# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
|
|
|
|
CopyBufferInfo2 & operator=( CopyBufferInfo2 const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
CopyBufferInfo2 & operator=( VkCopyBufferInfo2 const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::CopyBufferInfo2 const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
|
|
VULKAN_HPP_CONSTEXPR_14 CopyBufferInfo2 & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pNext = pNext_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 CopyBufferInfo2 & setSrcBuffer( VULKAN_HPP_NAMESPACE::Buffer srcBuffer_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
srcBuffer = srcBuffer_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 CopyBufferInfo2 & setDstBuffer( VULKAN_HPP_NAMESPACE::Buffer dstBuffer_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
dstBuffer = dstBuffer_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 CopyBufferInfo2 & setRegionCount( uint32_t regionCount_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
regionCount = regionCount_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 CopyBufferInfo2 & setPRegions( const VULKAN_HPP_NAMESPACE::BufferCopy2 * pRegions_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pRegions = pRegions_;
|
|
return *this;
|
|
}
|
|
|
|
# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
|
|
CopyBufferInfo2 & setRegions( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::BufferCopy2> const & regions_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
regionCount = static_cast<uint32_t>( regions_.size() );
|
|
pRegions = regions_.data();
|
|
return *this;
|
|
}
|
|
# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
|
|
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
|
|
|
|
operator VkCopyBufferInfo2 const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkCopyBufferInfo2 *>( this );
|
|
}
|
|
|
|
operator VkCopyBufferInfo2 &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkCopyBufferInfo2 *>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
# if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
# else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &,
|
|
const void * const &,
|
|
VULKAN_HPP_NAMESPACE::Buffer const &,
|
|
VULKAN_HPP_NAMESPACE::Buffer const &,
|
|
uint32_t const &,
|
|
const VULKAN_HPP_NAMESPACE::BufferCopy2 * const &>
|
|
# endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( sType, pNext, srcBuffer, dstBuffer, regionCount, pRegions );
|
|
}
|
|
#endif
|
|
|
|
#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
|
|
auto operator<=>( CopyBufferInfo2 const & ) const = default;
|
|
#else
|
|
bool operator==( CopyBufferInfo2 const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
# if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
# else
|
|
return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( srcBuffer == rhs.srcBuffer ) && ( dstBuffer == rhs.dstBuffer ) &&
|
|
( regionCount == rhs.regionCount ) && ( pRegions == rhs.pRegions );
|
|
# endif
|
|
}
|
|
|
|
bool operator!=( CopyBufferInfo2 const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eCopyBufferInfo2;
|
|
const void * pNext = {};
|
|
VULKAN_HPP_NAMESPACE::Buffer srcBuffer = {};
|
|
VULKAN_HPP_NAMESPACE::Buffer dstBuffer = {};
|
|
uint32_t regionCount = {};
|
|
const VULKAN_HPP_NAMESPACE::BufferCopy2 * pRegions = {};
|
|
};
|
|
|
|
template <>
|
|
struct CppType<StructureType, StructureType::eCopyBufferInfo2>
|
|
{
|
|
using Type = CopyBufferInfo2;
|
|
};
|
|
|
|
using CopyBufferInfo2KHR = CopyBufferInfo2;
|
|
|
|
struct CopyBufferToImageInfo2
|
|
{
|
|
using NativeType = VkCopyBufferToImageInfo2;
|
|
|
|
static const bool allowDuplicate = false;
|
|
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eCopyBufferToImageInfo2;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR CopyBufferToImageInfo2( VULKAN_HPP_NAMESPACE::Buffer srcBuffer_ = {},
|
|
VULKAN_HPP_NAMESPACE::Image dstImage_ = {},
|
|
VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout_ = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined,
|
|
uint32_t regionCount_ = {},
|
|
const VULKAN_HPP_NAMESPACE::BufferImageCopy2 * pRegions_ = {},
|
|
const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
|
|
: pNext( pNext_ )
|
|
, srcBuffer( srcBuffer_ )
|
|
, dstImage( dstImage_ )
|
|
, dstImageLayout( dstImageLayout_ )
|
|
, regionCount( regionCount_ )
|
|
, pRegions( pRegions_ )
|
|
{
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR CopyBufferToImageInfo2( CopyBufferToImageInfo2 const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
CopyBufferToImageInfo2( VkCopyBufferToImageInfo2 const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: CopyBufferToImageInfo2( *reinterpret_cast<CopyBufferToImageInfo2 const *>( &rhs ) )
|
|
{
|
|
}
|
|
|
|
# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
|
|
CopyBufferToImageInfo2( VULKAN_HPP_NAMESPACE::Buffer srcBuffer_,
|
|
VULKAN_HPP_NAMESPACE::Image dstImage_,
|
|
VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout_,
|
|
VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::BufferImageCopy2> const & regions_,
|
|
const void * pNext_ = nullptr )
|
|
: pNext( pNext_ )
|
|
, srcBuffer( srcBuffer_ )
|
|
, dstImage( dstImage_ )
|
|
, dstImageLayout( dstImageLayout_ )
|
|
, regionCount( static_cast<uint32_t>( regions_.size() ) )
|
|
, pRegions( regions_.data() )
|
|
{
|
|
}
|
|
# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
|
|
|
|
CopyBufferToImageInfo2 & operator=( CopyBufferToImageInfo2 const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
CopyBufferToImageInfo2 & operator=( VkCopyBufferToImageInfo2 const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::CopyBufferToImageInfo2 const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
|
|
VULKAN_HPP_CONSTEXPR_14 CopyBufferToImageInfo2 & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pNext = pNext_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 CopyBufferToImageInfo2 & setSrcBuffer( VULKAN_HPP_NAMESPACE::Buffer srcBuffer_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
srcBuffer = srcBuffer_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 CopyBufferToImageInfo2 & setDstImage( VULKAN_HPP_NAMESPACE::Image dstImage_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
dstImage = dstImage_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 CopyBufferToImageInfo2 & setDstImageLayout( VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
dstImageLayout = dstImageLayout_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 CopyBufferToImageInfo2 & setRegionCount( uint32_t regionCount_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
regionCount = regionCount_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 CopyBufferToImageInfo2 & setPRegions( const VULKAN_HPP_NAMESPACE::BufferImageCopy2 * pRegions_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pRegions = pRegions_;
|
|
return *this;
|
|
}
|
|
|
|
# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
|
|
CopyBufferToImageInfo2 &
|
|
setRegions( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::BufferImageCopy2> const & regions_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
regionCount = static_cast<uint32_t>( regions_.size() );
|
|
pRegions = regions_.data();
|
|
return *this;
|
|
}
|
|
# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
|
|
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
|
|
|
|
operator VkCopyBufferToImageInfo2 const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkCopyBufferToImageInfo2 *>( this );
|
|
}
|
|
|
|
operator VkCopyBufferToImageInfo2 &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkCopyBufferToImageInfo2 *>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
# if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
# else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &,
|
|
const void * const &,
|
|
VULKAN_HPP_NAMESPACE::Buffer const &,
|
|
VULKAN_HPP_NAMESPACE::Image const &,
|
|
VULKAN_HPP_NAMESPACE::ImageLayout const &,
|
|
uint32_t const &,
|
|
const VULKAN_HPP_NAMESPACE::BufferImageCopy2 * const &>
|
|
# endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( sType, pNext, srcBuffer, dstImage, dstImageLayout, regionCount, pRegions );
|
|
}
|
|
#endif
|
|
|
|
#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
|
|
auto operator<=>( CopyBufferToImageInfo2 const & ) const = default;
|
|
#else
|
|
bool operator==( CopyBufferToImageInfo2 const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
# if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
# else
|
|
return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( srcBuffer == rhs.srcBuffer ) && ( dstImage == rhs.dstImage ) &&
|
|
( dstImageLayout == rhs.dstImageLayout ) && ( regionCount == rhs.regionCount ) && ( pRegions == rhs.pRegions );
|
|
# endif
|
|
}
|
|
|
|
bool operator!=( CopyBufferToImageInfo2 const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eCopyBufferToImageInfo2;
|
|
const void * pNext = {};
|
|
VULKAN_HPP_NAMESPACE::Buffer srcBuffer = {};
|
|
VULKAN_HPP_NAMESPACE::Image dstImage = {};
|
|
VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined;
|
|
uint32_t regionCount = {};
|
|
const VULKAN_HPP_NAMESPACE::BufferImageCopy2 * pRegions = {};
|
|
};
|
|
|
|
template <>
|
|
struct CppType<StructureType, StructureType::eCopyBufferToImageInfo2>
|
|
{
|
|
using Type = CopyBufferToImageInfo2;
|
|
};
|
|
|
|
using CopyBufferToImageInfo2KHR = CopyBufferToImageInfo2;
|
|
|
|
struct CopyDescriptorSet
|
|
{
|
|
using NativeType = VkCopyDescriptorSet;
|
|
|
|
static const bool allowDuplicate = false;
|
|
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eCopyDescriptorSet;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR CopyDescriptorSet( VULKAN_HPP_NAMESPACE::DescriptorSet srcSet_ = {},
|
|
uint32_t srcBinding_ = {},
|
|
uint32_t srcArrayElement_ = {},
|
|
VULKAN_HPP_NAMESPACE::DescriptorSet dstSet_ = {},
|
|
uint32_t dstBinding_ = {},
|
|
uint32_t dstArrayElement_ = {},
|
|
uint32_t descriptorCount_ = {},
|
|
const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
|
|
: pNext( pNext_ )
|
|
, srcSet( srcSet_ )
|
|
, srcBinding( srcBinding_ )
|
|
, srcArrayElement( srcArrayElement_ )
|
|
, dstSet( dstSet_ )
|
|
, dstBinding( dstBinding_ )
|
|
, dstArrayElement( dstArrayElement_ )
|
|
, descriptorCount( descriptorCount_ )
|
|
{
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR CopyDescriptorSet( CopyDescriptorSet const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
CopyDescriptorSet( VkCopyDescriptorSet const & rhs ) VULKAN_HPP_NOEXCEPT : CopyDescriptorSet( *reinterpret_cast<CopyDescriptorSet const *>( &rhs ) ) {}
|
|
|
|
CopyDescriptorSet & operator=( CopyDescriptorSet const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
CopyDescriptorSet & operator=( VkCopyDescriptorSet const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::CopyDescriptorSet const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
|
|
VULKAN_HPP_CONSTEXPR_14 CopyDescriptorSet & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pNext = pNext_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 CopyDescriptorSet & setSrcSet( VULKAN_HPP_NAMESPACE::DescriptorSet srcSet_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
srcSet = srcSet_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 CopyDescriptorSet & setSrcBinding( uint32_t srcBinding_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
srcBinding = srcBinding_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 CopyDescriptorSet & setSrcArrayElement( uint32_t srcArrayElement_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
srcArrayElement = srcArrayElement_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 CopyDescriptorSet & setDstSet( VULKAN_HPP_NAMESPACE::DescriptorSet dstSet_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
dstSet = dstSet_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 CopyDescriptorSet & setDstBinding( uint32_t dstBinding_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
dstBinding = dstBinding_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 CopyDescriptorSet & setDstArrayElement( uint32_t dstArrayElement_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
dstArrayElement = dstArrayElement_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 CopyDescriptorSet & setDescriptorCount( uint32_t descriptorCount_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
descriptorCount = descriptorCount_;
|
|
return *this;
|
|
}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
|
|
|
|
operator VkCopyDescriptorSet const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkCopyDescriptorSet *>( this );
|
|
}
|
|
|
|
operator VkCopyDescriptorSet &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkCopyDescriptorSet *>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
# if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
# else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &,
|
|
const void * const &,
|
|
VULKAN_HPP_NAMESPACE::DescriptorSet const &,
|
|
uint32_t const &,
|
|
uint32_t const &,
|
|
VULKAN_HPP_NAMESPACE::DescriptorSet const &,
|
|
uint32_t const &,
|
|
uint32_t const &,
|
|
uint32_t const &>
|
|
# endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( sType, pNext, srcSet, srcBinding, srcArrayElement, dstSet, dstBinding, dstArrayElement, descriptorCount );
|
|
}
|
|
#endif
|
|
|
|
#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
|
|
auto operator<=>( CopyDescriptorSet const & ) const = default;
|
|
#else
|
|
bool operator==( CopyDescriptorSet const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
# if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
# else
|
|
return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( srcSet == rhs.srcSet ) && ( srcBinding == rhs.srcBinding ) &&
|
|
( srcArrayElement == rhs.srcArrayElement ) && ( dstSet == rhs.dstSet ) && ( dstBinding == rhs.dstBinding ) &&
|
|
( dstArrayElement == rhs.dstArrayElement ) && ( descriptorCount == rhs.descriptorCount );
|
|
# endif
|
|
}
|
|
|
|
bool operator!=( CopyDescriptorSet const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eCopyDescriptorSet;
|
|
const void * pNext = {};
|
|
VULKAN_HPP_NAMESPACE::DescriptorSet srcSet = {};
|
|
uint32_t srcBinding = {};
|
|
uint32_t srcArrayElement = {};
|
|
VULKAN_HPP_NAMESPACE::DescriptorSet dstSet = {};
|
|
uint32_t dstBinding = {};
|
|
uint32_t dstArrayElement = {};
|
|
uint32_t descriptorCount = {};
|
|
};
|
|
|
|
template <>
|
|
struct CppType<StructureType, StructureType::eCopyDescriptorSet>
|
|
{
|
|
using Type = CopyDescriptorSet;
|
|
};
|
|
|
|
struct ImageCopy2
|
|
{
|
|
using NativeType = VkImageCopy2;
|
|
|
|
static const bool allowDuplicate = false;
|
|
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eImageCopy2;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR ImageCopy2( VULKAN_HPP_NAMESPACE::ImageSubresourceLayers srcSubresource_ = {},
|
|
VULKAN_HPP_NAMESPACE::Offset3D srcOffset_ = {},
|
|
VULKAN_HPP_NAMESPACE::ImageSubresourceLayers dstSubresource_ = {},
|
|
VULKAN_HPP_NAMESPACE::Offset3D dstOffset_ = {},
|
|
VULKAN_HPP_NAMESPACE::Extent3D extent_ = {},
|
|
const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
|
|
: pNext( pNext_ )
|
|
, srcSubresource( srcSubresource_ )
|
|
, srcOffset( srcOffset_ )
|
|
, dstSubresource( dstSubresource_ )
|
|
, dstOffset( dstOffset_ )
|
|
, extent( extent_ )
|
|
{
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR ImageCopy2( ImageCopy2 const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
ImageCopy2( VkImageCopy2 const & rhs ) VULKAN_HPP_NOEXCEPT : ImageCopy2( *reinterpret_cast<ImageCopy2 const *>( &rhs ) ) {}
|
|
|
|
ImageCopy2 & operator=( ImageCopy2 const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
ImageCopy2 & operator=( VkImageCopy2 const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ImageCopy2 const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
|
|
VULKAN_HPP_CONSTEXPR_14 ImageCopy2 & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pNext = pNext_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 ImageCopy2 & setSrcSubresource( VULKAN_HPP_NAMESPACE::ImageSubresourceLayers const & srcSubresource_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
srcSubresource = srcSubresource_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 ImageCopy2 & setSrcOffset( VULKAN_HPP_NAMESPACE::Offset3D const & srcOffset_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
srcOffset = srcOffset_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 ImageCopy2 & setDstSubresource( VULKAN_HPP_NAMESPACE::ImageSubresourceLayers const & dstSubresource_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
dstSubresource = dstSubresource_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 ImageCopy2 & setDstOffset( VULKAN_HPP_NAMESPACE::Offset3D const & dstOffset_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
dstOffset = dstOffset_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 ImageCopy2 & setExtent( VULKAN_HPP_NAMESPACE::Extent3D const & extent_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
extent = extent_;
|
|
return *this;
|
|
}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
|
|
|
|
operator VkImageCopy2 const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkImageCopy2 *>( this );
|
|
}
|
|
|
|
operator VkImageCopy2 &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkImageCopy2 *>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
# if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
# else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &,
|
|
const void * const &,
|
|
VULKAN_HPP_NAMESPACE::ImageSubresourceLayers const &,
|
|
VULKAN_HPP_NAMESPACE::Offset3D const &,
|
|
VULKAN_HPP_NAMESPACE::ImageSubresourceLayers const &,
|
|
VULKAN_HPP_NAMESPACE::Offset3D const &,
|
|
VULKAN_HPP_NAMESPACE::Extent3D const &>
|
|
# endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( sType, pNext, srcSubresource, srcOffset, dstSubresource, dstOffset, extent );
|
|
}
|
|
#endif
|
|
|
|
#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
|
|
auto operator<=>( ImageCopy2 const & ) const = default;
|
|
#else
|
|
bool operator==( ImageCopy2 const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
# if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
# else
|
|
return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( srcSubresource == rhs.srcSubresource ) && ( srcOffset == rhs.srcOffset ) &&
|
|
( dstSubresource == rhs.dstSubresource ) && ( dstOffset == rhs.dstOffset ) && ( extent == rhs.extent );
|
|
# endif
|
|
}
|
|
|
|
bool operator!=( ImageCopy2 const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eImageCopy2;
|
|
const void * pNext = {};
|
|
VULKAN_HPP_NAMESPACE::ImageSubresourceLayers srcSubresource = {};
|
|
VULKAN_HPP_NAMESPACE::Offset3D srcOffset = {};
|
|
VULKAN_HPP_NAMESPACE::ImageSubresourceLayers dstSubresource = {};
|
|
VULKAN_HPP_NAMESPACE::Offset3D dstOffset = {};
|
|
VULKAN_HPP_NAMESPACE::Extent3D extent = {};
|
|
};
|
|
|
|
template <>
|
|
struct CppType<StructureType, StructureType::eImageCopy2>
|
|
{
|
|
using Type = ImageCopy2;
|
|
};
|
|
|
|
using ImageCopy2KHR = ImageCopy2;
|
|
|
|
struct CopyImageInfo2
|
|
{
|
|
using NativeType = VkCopyImageInfo2;
|
|
|
|
static const bool allowDuplicate = false;
|
|
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eCopyImageInfo2;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR CopyImageInfo2( VULKAN_HPP_NAMESPACE::Image srcImage_ = {},
|
|
VULKAN_HPP_NAMESPACE::ImageLayout srcImageLayout_ = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined,
|
|
VULKAN_HPP_NAMESPACE::Image dstImage_ = {},
|
|
VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout_ = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined,
|
|
uint32_t regionCount_ = {},
|
|
const VULKAN_HPP_NAMESPACE::ImageCopy2 * pRegions_ = {},
|
|
const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
|
|
: pNext( pNext_ )
|
|
, srcImage( srcImage_ )
|
|
, srcImageLayout( srcImageLayout_ )
|
|
, dstImage( dstImage_ )
|
|
, dstImageLayout( dstImageLayout_ )
|
|
, regionCount( regionCount_ )
|
|
, pRegions( pRegions_ )
|
|
{
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR CopyImageInfo2( CopyImageInfo2 const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
CopyImageInfo2( VkCopyImageInfo2 const & rhs ) VULKAN_HPP_NOEXCEPT : CopyImageInfo2( *reinterpret_cast<CopyImageInfo2 const *>( &rhs ) ) {}
|
|
|
|
# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
|
|
CopyImageInfo2( VULKAN_HPP_NAMESPACE::Image srcImage_,
|
|
VULKAN_HPP_NAMESPACE::ImageLayout srcImageLayout_,
|
|
VULKAN_HPP_NAMESPACE::Image dstImage_,
|
|
VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout_,
|
|
VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::ImageCopy2> const & regions_,
|
|
const void * pNext_ = nullptr )
|
|
: pNext( pNext_ )
|
|
, srcImage( srcImage_ )
|
|
, srcImageLayout( srcImageLayout_ )
|
|
, dstImage( dstImage_ )
|
|
, dstImageLayout( dstImageLayout_ )
|
|
, regionCount( static_cast<uint32_t>( regions_.size() ) )
|
|
, pRegions( regions_.data() )
|
|
{
|
|
}
|
|
# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
|
|
|
|
CopyImageInfo2 & operator=( CopyImageInfo2 const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
CopyImageInfo2 & operator=( VkCopyImageInfo2 const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::CopyImageInfo2 const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
|
|
VULKAN_HPP_CONSTEXPR_14 CopyImageInfo2 & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pNext = pNext_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 CopyImageInfo2 & setSrcImage( VULKAN_HPP_NAMESPACE::Image srcImage_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
srcImage = srcImage_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 CopyImageInfo2 & setSrcImageLayout( VULKAN_HPP_NAMESPACE::ImageLayout srcImageLayout_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
srcImageLayout = srcImageLayout_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 CopyImageInfo2 & setDstImage( VULKAN_HPP_NAMESPACE::Image dstImage_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
dstImage = dstImage_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 CopyImageInfo2 & setDstImageLayout( VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
dstImageLayout = dstImageLayout_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 CopyImageInfo2 & setRegionCount( uint32_t regionCount_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
regionCount = regionCount_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 CopyImageInfo2 & setPRegions( const VULKAN_HPP_NAMESPACE::ImageCopy2 * pRegions_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pRegions = pRegions_;
|
|
return *this;
|
|
}
|
|
|
|
# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
|
|
CopyImageInfo2 & setRegions( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::ImageCopy2> const & regions_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
regionCount = static_cast<uint32_t>( regions_.size() );
|
|
pRegions = regions_.data();
|
|
return *this;
|
|
}
|
|
# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
|
|
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
|
|
|
|
operator VkCopyImageInfo2 const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkCopyImageInfo2 *>( this );
|
|
}
|
|
|
|
operator VkCopyImageInfo2 &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkCopyImageInfo2 *>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
# if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
# else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &,
|
|
const void * const &,
|
|
VULKAN_HPP_NAMESPACE::Image const &,
|
|
VULKAN_HPP_NAMESPACE::ImageLayout const &,
|
|
VULKAN_HPP_NAMESPACE::Image const &,
|
|
VULKAN_HPP_NAMESPACE::ImageLayout const &,
|
|
uint32_t const &,
|
|
const VULKAN_HPP_NAMESPACE::ImageCopy2 * const &>
|
|
# endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( sType, pNext, srcImage, srcImageLayout, dstImage, dstImageLayout, regionCount, pRegions );
|
|
}
|
|
#endif
|
|
|
|
#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
|
|
auto operator<=>( CopyImageInfo2 const & ) const = default;
|
|
#else
|
|
bool operator==( CopyImageInfo2 const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
# if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
# else
|
|
return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( srcImage == rhs.srcImage ) && ( srcImageLayout == rhs.srcImageLayout ) &&
|
|
( dstImage == rhs.dstImage ) && ( dstImageLayout == rhs.dstImageLayout ) && ( regionCount == rhs.regionCount ) && ( pRegions == rhs.pRegions );
|
|
# endif
|
|
}
|
|
|
|
bool operator!=( CopyImageInfo2 const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eCopyImageInfo2;
|
|
const void * pNext = {};
|
|
VULKAN_HPP_NAMESPACE::Image srcImage = {};
|
|
VULKAN_HPP_NAMESPACE::ImageLayout srcImageLayout = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined;
|
|
VULKAN_HPP_NAMESPACE::Image dstImage = {};
|
|
VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined;
|
|
uint32_t regionCount = {};
|
|
const VULKAN_HPP_NAMESPACE::ImageCopy2 * pRegions = {};
|
|
};
|
|
|
|
template <>
|
|
struct CppType<StructureType, StructureType::eCopyImageInfo2>
|
|
{
|
|
using Type = CopyImageInfo2;
|
|
};
|
|
|
|
using CopyImageInfo2KHR = CopyImageInfo2;
|
|
|
|
struct CopyImageToBufferInfo2
|
|
{
|
|
using NativeType = VkCopyImageToBufferInfo2;
|
|
|
|
static const bool allowDuplicate = false;
|
|
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eCopyImageToBufferInfo2;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR CopyImageToBufferInfo2( VULKAN_HPP_NAMESPACE::Image srcImage_ = {},
|
|
VULKAN_HPP_NAMESPACE::ImageLayout srcImageLayout_ = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined,
|
|
VULKAN_HPP_NAMESPACE::Buffer dstBuffer_ = {},
|
|
uint32_t regionCount_ = {},
|
|
const VULKAN_HPP_NAMESPACE::BufferImageCopy2 * pRegions_ = {},
|
|
const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
|
|
: pNext( pNext_ )
|
|
, srcImage( srcImage_ )
|
|
, srcImageLayout( srcImageLayout_ )
|
|
, dstBuffer( dstBuffer_ )
|
|
, regionCount( regionCount_ )
|
|
, pRegions( pRegions_ )
|
|
{
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR CopyImageToBufferInfo2( CopyImageToBufferInfo2 const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
CopyImageToBufferInfo2( VkCopyImageToBufferInfo2 const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: CopyImageToBufferInfo2( *reinterpret_cast<CopyImageToBufferInfo2 const *>( &rhs ) )
|
|
{
|
|
}
|
|
|
|
# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
|
|
CopyImageToBufferInfo2( VULKAN_HPP_NAMESPACE::Image srcImage_,
|
|
VULKAN_HPP_NAMESPACE::ImageLayout srcImageLayout_,
|
|
VULKAN_HPP_NAMESPACE::Buffer dstBuffer_,
|
|
VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::BufferImageCopy2> const & regions_,
|
|
const void * pNext_ = nullptr )
|
|
: pNext( pNext_ )
|
|
, srcImage( srcImage_ )
|
|
, srcImageLayout( srcImageLayout_ )
|
|
, dstBuffer( dstBuffer_ )
|
|
, regionCount( static_cast<uint32_t>( regions_.size() ) )
|
|
, pRegions( regions_.data() )
|
|
{
|
|
}
|
|
# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
|
|
|
|
CopyImageToBufferInfo2 & operator=( CopyImageToBufferInfo2 const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
CopyImageToBufferInfo2 & operator=( VkCopyImageToBufferInfo2 const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::CopyImageToBufferInfo2 const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
|
|
VULKAN_HPP_CONSTEXPR_14 CopyImageToBufferInfo2 & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pNext = pNext_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 CopyImageToBufferInfo2 & setSrcImage( VULKAN_HPP_NAMESPACE::Image srcImage_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
srcImage = srcImage_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 CopyImageToBufferInfo2 & setSrcImageLayout( VULKAN_HPP_NAMESPACE::ImageLayout srcImageLayout_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
srcImageLayout = srcImageLayout_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 CopyImageToBufferInfo2 & setDstBuffer( VULKAN_HPP_NAMESPACE::Buffer dstBuffer_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
dstBuffer = dstBuffer_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 CopyImageToBufferInfo2 & setRegionCount( uint32_t regionCount_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
regionCount = regionCount_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 CopyImageToBufferInfo2 & setPRegions( const VULKAN_HPP_NAMESPACE::BufferImageCopy2 * pRegions_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pRegions = pRegions_;
|
|
return *this;
|
|
}
|
|
|
|
# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
|
|
CopyImageToBufferInfo2 &
|
|
setRegions( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::BufferImageCopy2> const & regions_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
regionCount = static_cast<uint32_t>( regions_.size() );
|
|
pRegions = regions_.data();
|
|
return *this;
|
|
}
|
|
# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
|
|
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
|
|
|
|
operator VkCopyImageToBufferInfo2 const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkCopyImageToBufferInfo2 *>( this );
|
|
}
|
|
|
|
operator VkCopyImageToBufferInfo2 &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkCopyImageToBufferInfo2 *>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
# if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
# else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &,
|
|
const void * const &,
|
|
VULKAN_HPP_NAMESPACE::Image const &,
|
|
VULKAN_HPP_NAMESPACE::ImageLayout const &,
|
|
VULKAN_HPP_NAMESPACE::Buffer const &,
|
|
uint32_t const &,
|
|
const VULKAN_HPP_NAMESPACE::BufferImageCopy2 * const &>
|
|
# endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( sType, pNext, srcImage, srcImageLayout, dstBuffer, regionCount, pRegions );
|
|
}
|
|
#endif
|
|
|
|
#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
|
|
auto operator<=>( CopyImageToBufferInfo2 const & ) const = default;
|
|
#else
|
|
bool operator==( CopyImageToBufferInfo2 const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
# if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
# else
|
|
return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( srcImage == rhs.srcImage ) && ( srcImageLayout == rhs.srcImageLayout ) &&
|
|
( dstBuffer == rhs.dstBuffer ) && ( regionCount == rhs.regionCount ) && ( pRegions == rhs.pRegions );
|
|
# endif
|
|
}
|
|
|
|
bool operator!=( CopyImageToBufferInfo2 const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eCopyImageToBufferInfo2;
|
|
const void * pNext = {};
|
|
VULKAN_HPP_NAMESPACE::Image srcImage = {};
|
|
VULKAN_HPP_NAMESPACE::ImageLayout srcImageLayout = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined;
|
|
VULKAN_HPP_NAMESPACE::Buffer dstBuffer = {};
|
|
uint32_t regionCount = {};
|
|
const VULKAN_HPP_NAMESPACE::BufferImageCopy2 * pRegions = {};
|
|
};
|
|
|
|
template <>
|
|
struct CppType<StructureType, StructureType::eCopyImageToBufferInfo2>
|
|
{
|
|
using Type = CopyImageToBufferInfo2;
|
|
};
|
|
|
|
using CopyImageToBufferInfo2KHR = CopyImageToBufferInfo2;
|
|
|
|
struct DebugUtilsLabelEXT
|
|
{
|
|
using NativeType = VkDebugUtilsLabelEXT;
|
|
|
|
static const bool allowDuplicate = false;
|
|
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDebugUtilsLabelEXT;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR_14
|
|
DebugUtilsLabelEXT( const char * pLabelName_ = {}, std::array<float, 4> const & color_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
|
|
: pNext( pNext_ )
|
|
, pLabelName( pLabelName_ )
|
|
, color( color_ )
|
|
{
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 DebugUtilsLabelEXT( DebugUtilsLabelEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
DebugUtilsLabelEXT( VkDebugUtilsLabelEXT const & rhs ) VULKAN_HPP_NOEXCEPT : DebugUtilsLabelEXT( *reinterpret_cast<DebugUtilsLabelEXT const *>( &rhs ) ) {}
|
|
|
|
DebugUtilsLabelEXT & operator=( DebugUtilsLabelEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
DebugUtilsLabelEXT & operator=( VkDebugUtilsLabelEXT const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::DebugUtilsLabelEXT const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
|
|
VULKAN_HPP_CONSTEXPR_14 DebugUtilsLabelEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pNext = pNext_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 DebugUtilsLabelEXT & setPLabelName( const char * pLabelName_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pLabelName = pLabelName_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 DebugUtilsLabelEXT & setColor( std::array<float, 4> color_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
color = color_;
|
|
return *this;
|
|
}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
|
|
|
|
operator VkDebugUtilsLabelEXT const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkDebugUtilsLabelEXT *>( this );
|
|
}
|
|
|
|
operator VkDebugUtilsLabelEXT &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkDebugUtilsLabelEXT *>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
# if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
# else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, const char * const &, VULKAN_HPP_NAMESPACE::ArrayWrapper1D<float, 4> const &>
|
|
# endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( sType, pNext, pLabelName, color );
|
|
}
|
|
#endif
|
|
|
|
#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
|
|
std::partial_ordering operator<=>( DebugUtilsLabelEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
if ( auto cmp = sType <=> rhs.sType; cmp != 0 )
|
|
return cmp;
|
|
if ( auto cmp = pNext <=> rhs.pNext; cmp != 0 )
|
|
return cmp;
|
|
if ( pLabelName != rhs.pLabelName )
|
|
if ( auto cmp = strcmp( pLabelName, rhs.pLabelName ); cmp != 0 )
|
|
return ( cmp < 0 ) ? std::partial_ordering::less : std::partial_ordering::greater;
|
|
if ( auto cmp = color <=> rhs.color; cmp != 0 )
|
|
return cmp;
|
|
|
|
return std::partial_ordering::equivalent;
|
|
}
|
|
#endif
|
|
|
|
bool operator==( DebugUtilsLabelEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( ( pLabelName == rhs.pLabelName ) || ( strcmp( pLabelName, rhs.pLabelName ) == 0 ) ) &&
|
|
( color == rhs.color );
|
|
}
|
|
|
|
bool operator!=( DebugUtilsLabelEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDebugUtilsLabelEXT;
|
|
const void * pNext = {};
|
|
const char * pLabelName = {};
|
|
VULKAN_HPP_NAMESPACE::ArrayWrapper1D<float, 4> color = {};
|
|
};
|
|
|
|
template <>
|
|
struct CppType<StructureType, StructureType::eDebugUtilsLabelEXT>
|
|
{
|
|
using Type = DebugUtilsLabelEXT;
|
|
};
|
|
|
|
struct DebugUtilsObjectNameInfoEXT
|
|
{
|
|
using NativeType = VkDebugUtilsObjectNameInfoEXT;
|
|
|
|
static const bool allowDuplicate = false;
|
|
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDebugUtilsObjectNameInfoEXT;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR DebugUtilsObjectNameInfoEXT( VULKAN_HPP_NAMESPACE::ObjectType objectType_ = VULKAN_HPP_NAMESPACE::ObjectType::eUnknown,
|
|
uint64_t objectHandle_ = {},
|
|
const char * pObjectName_ = {},
|
|
const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
|
|
: pNext( pNext_ )
|
|
, objectType( objectType_ )
|
|
, objectHandle( objectHandle_ )
|
|
, pObjectName( pObjectName_ )
|
|
{
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR DebugUtilsObjectNameInfoEXT( DebugUtilsObjectNameInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
DebugUtilsObjectNameInfoEXT( VkDebugUtilsObjectNameInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: DebugUtilsObjectNameInfoEXT( *reinterpret_cast<DebugUtilsObjectNameInfoEXT const *>( &rhs ) )
|
|
{
|
|
}
|
|
|
|
DebugUtilsObjectNameInfoEXT & operator=( DebugUtilsObjectNameInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
DebugUtilsObjectNameInfoEXT & operator=( VkDebugUtilsObjectNameInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::DebugUtilsObjectNameInfoEXT const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
|
|
VULKAN_HPP_CONSTEXPR_14 DebugUtilsObjectNameInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pNext = pNext_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 DebugUtilsObjectNameInfoEXT & setObjectType( VULKAN_HPP_NAMESPACE::ObjectType objectType_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
objectType = objectType_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 DebugUtilsObjectNameInfoEXT & setObjectHandle( uint64_t objectHandle_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
objectHandle = objectHandle_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 DebugUtilsObjectNameInfoEXT & setPObjectName( const char * pObjectName_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pObjectName = pObjectName_;
|
|
return *this;
|
|
}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
|
|
|
|
operator VkDebugUtilsObjectNameInfoEXT const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkDebugUtilsObjectNameInfoEXT *>( this );
|
|
}
|
|
|
|
operator VkDebugUtilsObjectNameInfoEXT &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkDebugUtilsObjectNameInfoEXT *>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
# if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
# else
|
|
std::
|
|
tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::ObjectType const &, uint64_t const &, const char * const &>
|
|
# endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( sType, pNext, objectType, objectHandle, pObjectName );
|
|
}
|
|
#endif
|
|
|
|
#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
|
|
std::strong_ordering operator<=>( DebugUtilsObjectNameInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
if ( auto cmp = sType <=> rhs.sType; cmp != 0 )
|
|
return cmp;
|
|
if ( auto cmp = pNext <=> rhs.pNext; cmp != 0 )
|
|
return cmp;
|
|
if ( auto cmp = objectType <=> rhs.objectType; cmp != 0 )
|
|
return cmp;
|
|
if ( auto cmp = objectHandle <=> rhs.objectHandle; cmp != 0 )
|
|
return cmp;
|
|
if ( pObjectName != rhs.pObjectName )
|
|
if ( auto cmp = strcmp( pObjectName, rhs.pObjectName ); cmp != 0 )
|
|
return ( cmp < 0 ) ? std::strong_ordering::less : std::strong_ordering::greater;
|
|
|
|
return std::strong_ordering::equivalent;
|
|
}
|
|
#endif
|
|
|
|
bool operator==( DebugUtilsObjectNameInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( objectType == rhs.objectType ) && ( objectHandle == rhs.objectHandle ) &&
|
|
( ( pObjectName == rhs.pObjectName ) || ( strcmp( pObjectName, rhs.pObjectName ) == 0 ) );
|
|
}
|
|
|
|
bool operator!=( DebugUtilsObjectNameInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDebugUtilsObjectNameInfoEXT;
|
|
const void * pNext = {};
|
|
VULKAN_HPP_NAMESPACE::ObjectType objectType = VULKAN_HPP_NAMESPACE::ObjectType::eUnknown;
|
|
uint64_t objectHandle = {};
|
|
const char * pObjectName = {};
|
|
};
|
|
|
|
template <>
|
|
struct CppType<StructureType, StructureType::eDebugUtilsObjectNameInfoEXT>
|
|
{
|
|
using Type = DebugUtilsObjectNameInfoEXT;
|
|
};
|
|
|
|
struct DebugUtilsMessengerCallbackDataEXT
|
|
{
|
|
using NativeType = VkDebugUtilsMessengerCallbackDataEXT;
|
|
|
|
static const bool allowDuplicate = false;
|
|
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDebugUtilsMessengerCallbackDataEXT;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR_14 DebugUtilsMessengerCallbackDataEXT( VULKAN_HPP_NAMESPACE::DebugUtilsMessengerCallbackDataFlagsEXT flags_ = {},
|
|
const char * pMessageIdName_ = {},
|
|
int32_t messageIdNumber_ = {},
|
|
const char * pMessage_ = {},
|
|
uint32_t queueLabelCount_ = {},
|
|
const VULKAN_HPP_NAMESPACE::DebugUtilsLabelEXT * pQueueLabels_ = {},
|
|
uint32_t cmdBufLabelCount_ = {},
|
|
const VULKAN_HPP_NAMESPACE::DebugUtilsLabelEXT * pCmdBufLabels_ = {},
|
|
uint32_t objectCount_ = {},
|
|
const VULKAN_HPP_NAMESPACE::DebugUtilsObjectNameInfoEXT * pObjects_ = {},
|
|
const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
|
|
: pNext( pNext_ )
|
|
, flags( flags_ )
|
|
, pMessageIdName( pMessageIdName_ )
|
|
, messageIdNumber( messageIdNumber_ )
|
|
, pMessage( pMessage_ )
|
|
, queueLabelCount( queueLabelCount_ )
|
|
, pQueueLabels( pQueueLabels_ )
|
|
, cmdBufLabelCount( cmdBufLabelCount_ )
|
|
, pCmdBufLabels( pCmdBufLabels_ )
|
|
, objectCount( objectCount_ )
|
|
, pObjects( pObjects_ )
|
|
{
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 DebugUtilsMessengerCallbackDataEXT( DebugUtilsMessengerCallbackDataEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
DebugUtilsMessengerCallbackDataEXT( VkDebugUtilsMessengerCallbackDataEXT const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: DebugUtilsMessengerCallbackDataEXT( *reinterpret_cast<DebugUtilsMessengerCallbackDataEXT const *>( &rhs ) )
|
|
{
|
|
}
|
|
|
|
# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
|
|
DebugUtilsMessengerCallbackDataEXT(
|
|
VULKAN_HPP_NAMESPACE::DebugUtilsMessengerCallbackDataFlagsEXT flags_,
|
|
const char * pMessageIdName_,
|
|
int32_t messageIdNumber_,
|
|
const char * pMessage_,
|
|
VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::DebugUtilsLabelEXT> const & queueLabels_,
|
|
VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::DebugUtilsLabelEXT> const & cmdBufLabels_ = {},
|
|
VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::DebugUtilsObjectNameInfoEXT> const & objects_ = {},
|
|
const void * pNext_ = nullptr )
|
|
: pNext( pNext_ )
|
|
, flags( flags_ )
|
|
, pMessageIdName( pMessageIdName_ )
|
|
, messageIdNumber( messageIdNumber_ )
|
|
, pMessage( pMessage_ )
|
|
, queueLabelCount( static_cast<uint32_t>( queueLabels_.size() ) )
|
|
, pQueueLabels( queueLabels_.data() )
|
|
, cmdBufLabelCount( static_cast<uint32_t>( cmdBufLabels_.size() ) )
|
|
, pCmdBufLabels( cmdBufLabels_.data() )
|
|
, objectCount( static_cast<uint32_t>( objects_.size() ) )
|
|
, pObjects( objects_.data() )
|
|
{
|
|
}
|
|
# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
|
|
|
|
DebugUtilsMessengerCallbackDataEXT & operator=( DebugUtilsMessengerCallbackDataEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
DebugUtilsMessengerCallbackDataEXT & operator=( VkDebugUtilsMessengerCallbackDataEXT const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::DebugUtilsMessengerCallbackDataEXT const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
|
|
VULKAN_HPP_CONSTEXPR_14 DebugUtilsMessengerCallbackDataEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pNext = pNext_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 DebugUtilsMessengerCallbackDataEXT &
|
|
setFlags( VULKAN_HPP_NAMESPACE::DebugUtilsMessengerCallbackDataFlagsEXT flags_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
flags = flags_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 DebugUtilsMessengerCallbackDataEXT & setPMessageIdName( const char * pMessageIdName_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pMessageIdName = pMessageIdName_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 DebugUtilsMessengerCallbackDataEXT & setMessageIdNumber( int32_t messageIdNumber_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
messageIdNumber = messageIdNumber_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 DebugUtilsMessengerCallbackDataEXT & setPMessage( const char * pMessage_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pMessage = pMessage_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 DebugUtilsMessengerCallbackDataEXT & setQueueLabelCount( uint32_t queueLabelCount_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
queueLabelCount = queueLabelCount_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 DebugUtilsMessengerCallbackDataEXT &
|
|
setPQueueLabels( const VULKAN_HPP_NAMESPACE::DebugUtilsLabelEXT * pQueueLabels_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pQueueLabels = pQueueLabels_;
|
|
return *this;
|
|
}
|
|
|
|
# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
|
|
DebugUtilsMessengerCallbackDataEXT &
|
|
setQueueLabels( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::DebugUtilsLabelEXT> const & queueLabels_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
queueLabelCount = static_cast<uint32_t>( queueLabels_.size() );
|
|
pQueueLabels = queueLabels_.data();
|
|
return *this;
|
|
}
|
|
# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 DebugUtilsMessengerCallbackDataEXT & setCmdBufLabelCount( uint32_t cmdBufLabelCount_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
cmdBufLabelCount = cmdBufLabelCount_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 DebugUtilsMessengerCallbackDataEXT &
|
|
setPCmdBufLabels( const VULKAN_HPP_NAMESPACE::DebugUtilsLabelEXT * pCmdBufLabels_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pCmdBufLabels = pCmdBufLabels_;
|
|
return *this;
|
|
}
|
|
|
|
# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
|
|
DebugUtilsMessengerCallbackDataEXT &
|
|
setCmdBufLabels( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::DebugUtilsLabelEXT> const & cmdBufLabels_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
cmdBufLabelCount = static_cast<uint32_t>( cmdBufLabels_.size() );
|
|
pCmdBufLabels = cmdBufLabels_.data();
|
|
return *this;
|
|
}
|
|
# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 DebugUtilsMessengerCallbackDataEXT & setObjectCount( uint32_t objectCount_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
objectCount = objectCount_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 DebugUtilsMessengerCallbackDataEXT &
|
|
setPObjects( const VULKAN_HPP_NAMESPACE::DebugUtilsObjectNameInfoEXT * pObjects_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pObjects = pObjects_;
|
|
return *this;
|
|
}
|
|
|
|
# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
|
|
DebugUtilsMessengerCallbackDataEXT &
|
|
setObjects( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::DebugUtilsObjectNameInfoEXT> const & objects_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
objectCount = static_cast<uint32_t>( objects_.size() );
|
|
pObjects = objects_.data();
|
|
return *this;
|
|
}
|
|
# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
|
|
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
|
|
|
|
operator VkDebugUtilsMessengerCallbackDataEXT const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkDebugUtilsMessengerCallbackDataEXT *>( this );
|
|
}
|
|
|
|
operator VkDebugUtilsMessengerCallbackDataEXT &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkDebugUtilsMessengerCallbackDataEXT *>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
# if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
# else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &,
|
|
const void * const &,
|
|
VULKAN_HPP_NAMESPACE::DebugUtilsMessengerCallbackDataFlagsEXT const &,
|
|
const char * const &,
|
|
int32_t const &,
|
|
const char * const &,
|
|
uint32_t const &,
|
|
const VULKAN_HPP_NAMESPACE::DebugUtilsLabelEXT * const &,
|
|
uint32_t const &,
|
|
const VULKAN_HPP_NAMESPACE::DebugUtilsLabelEXT * const &,
|
|
uint32_t const &,
|
|
const VULKAN_HPP_NAMESPACE::DebugUtilsObjectNameInfoEXT * const &>
|
|
# endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie(
|
|
sType, pNext, flags, pMessageIdName, messageIdNumber, pMessage, queueLabelCount, pQueueLabels, cmdBufLabelCount, pCmdBufLabels, objectCount, pObjects );
|
|
}
|
|
#endif
|
|
|
|
#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
|
|
std::strong_ordering operator<=>( DebugUtilsMessengerCallbackDataEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
if ( auto cmp = sType <=> rhs.sType; cmp != 0 )
|
|
return cmp;
|
|
if ( auto cmp = pNext <=> rhs.pNext; cmp != 0 )
|
|
return cmp;
|
|
if ( auto cmp = flags <=> rhs.flags; cmp != 0 )
|
|
return cmp;
|
|
if ( pMessageIdName != rhs.pMessageIdName )
|
|
if ( auto cmp = strcmp( pMessageIdName, rhs.pMessageIdName ); cmp != 0 )
|
|
return ( cmp < 0 ) ? std::strong_ordering::less : std::strong_ordering::greater;
|
|
if ( auto cmp = messageIdNumber <=> rhs.messageIdNumber; cmp != 0 )
|
|
return cmp;
|
|
if ( pMessage != rhs.pMessage )
|
|
if ( auto cmp = strcmp( pMessage, rhs.pMessage ); cmp != 0 )
|
|
return ( cmp < 0 ) ? std::strong_ordering::less : std::strong_ordering::greater;
|
|
if ( auto cmp = queueLabelCount <=> rhs.queueLabelCount; cmp != 0 )
|
|
return cmp;
|
|
if ( auto cmp = pQueueLabels <=> rhs.pQueueLabels; cmp != 0 )
|
|
return cmp;
|
|
if ( auto cmp = cmdBufLabelCount <=> rhs.cmdBufLabelCount; cmp != 0 )
|
|
return cmp;
|
|
if ( auto cmp = pCmdBufLabels <=> rhs.pCmdBufLabels; cmp != 0 )
|
|
return cmp;
|
|
if ( auto cmp = objectCount <=> rhs.objectCount; cmp != 0 )
|
|
return cmp;
|
|
if ( auto cmp = pObjects <=> rhs.pObjects; cmp != 0 )
|
|
return cmp;
|
|
|
|
return std::strong_ordering::equivalent;
|
|
}
|
|
#endif
|
|
|
|
bool operator==( DebugUtilsMessengerCallbackDataEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) &&
|
|
( ( pMessageIdName == rhs.pMessageIdName ) || ( strcmp( pMessageIdName, rhs.pMessageIdName ) == 0 ) ) &&
|
|
( messageIdNumber == rhs.messageIdNumber ) && ( ( pMessage == rhs.pMessage ) || ( strcmp( pMessage, rhs.pMessage ) == 0 ) ) &&
|
|
( queueLabelCount == rhs.queueLabelCount ) && ( pQueueLabels == rhs.pQueueLabels ) && ( cmdBufLabelCount == rhs.cmdBufLabelCount ) &&
|
|
( pCmdBufLabels == rhs.pCmdBufLabels ) && ( objectCount == rhs.objectCount ) && ( pObjects == rhs.pObjects );
|
|
}
|
|
|
|
bool operator!=( DebugUtilsMessengerCallbackDataEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDebugUtilsMessengerCallbackDataEXT;
|
|
const void * pNext = {};
|
|
VULKAN_HPP_NAMESPACE::DebugUtilsMessengerCallbackDataFlagsEXT flags = {};
|
|
const char * pMessageIdName = {};
|
|
int32_t messageIdNumber = {};
|
|
const char * pMessage = {};
|
|
uint32_t queueLabelCount = {};
|
|
const VULKAN_HPP_NAMESPACE::DebugUtilsLabelEXT * pQueueLabels = {};
|
|
uint32_t cmdBufLabelCount = {};
|
|
const VULKAN_HPP_NAMESPACE::DebugUtilsLabelEXT * pCmdBufLabels = {};
|
|
uint32_t objectCount = {};
|
|
const VULKAN_HPP_NAMESPACE::DebugUtilsObjectNameInfoEXT * pObjects = {};
|
|
};
|
|
|
|
template <>
|
|
struct CppType<StructureType, StructureType::eDebugUtilsMessengerCallbackDataEXT>
|
|
{
|
|
using Type = DebugUtilsMessengerCallbackDataEXT;
|
|
};
|
|
|
|
struct DebugUtilsMessengerCreateInfoEXT
|
|
{
|
|
using NativeType = VkDebugUtilsMessengerCreateInfoEXT;
|
|
|
|
static const bool allowDuplicate = true;
|
|
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDebugUtilsMessengerCreateInfoEXT;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR DebugUtilsMessengerCreateInfoEXT( VULKAN_HPP_NAMESPACE::DebugUtilsMessengerCreateFlagsEXT flags_ = {},
|
|
VULKAN_HPP_NAMESPACE::DebugUtilsMessageSeverityFlagsEXT messageSeverity_ = {},
|
|
VULKAN_HPP_NAMESPACE::DebugUtilsMessageTypeFlagsEXT messageType_ = {},
|
|
PFN_vkDebugUtilsMessengerCallbackEXT pfnUserCallback_ = {},
|
|
void * pUserData_ = {},
|
|
const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
|
|
: pNext( pNext_ )
|
|
, flags( flags_ )
|
|
, messageSeverity( messageSeverity_ )
|
|
, messageType( messageType_ )
|
|
, pfnUserCallback( pfnUserCallback_ )
|
|
, pUserData( pUserData_ )
|
|
{
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR DebugUtilsMessengerCreateInfoEXT( DebugUtilsMessengerCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
DebugUtilsMessengerCreateInfoEXT( VkDebugUtilsMessengerCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: DebugUtilsMessengerCreateInfoEXT( *reinterpret_cast<DebugUtilsMessengerCreateInfoEXT const *>( &rhs ) )
|
|
{
|
|
}
|
|
|
|
DebugUtilsMessengerCreateInfoEXT & operator=( DebugUtilsMessengerCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
DebugUtilsMessengerCreateInfoEXT & operator=( VkDebugUtilsMessengerCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::DebugUtilsMessengerCreateInfoEXT const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
|
|
VULKAN_HPP_CONSTEXPR_14 DebugUtilsMessengerCreateInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pNext = pNext_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 DebugUtilsMessengerCreateInfoEXT & setFlags( VULKAN_HPP_NAMESPACE::DebugUtilsMessengerCreateFlagsEXT flags_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
flags = flags_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 DebugUtilsMessengerCreateInfoEXT &
|
|
setMessageSeverity( VULKAN_HPP_NAMESPACE::DebugUtilsMessageSeverityFlagsEXT messageSeverity_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
messageSeverity = messageSeverity_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 DebugUtilsMessengerCreateInfoEXT &
|
|
setMessageType( VULKAN_HPP_NAMESPACE::DebugUtilsMessageTypeFlagsEXT messageType_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
messageType = messageType_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 DebugUtilsMessengerCreateInfoEXT & setPfnUserCallback( PFN_vkDebugUtilsMessengerCallbackEXT pfnUserCallback_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pfnUserCallback = pfnUserCallback_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 DebugUtilsMessengerCreateInfoEXT & setPUserData( void * pUserData_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pUserData = pUserData_;
|
|
return *this;
|
|
}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
|
|
|
|
operator VkDebugUtilsMessengerCreateInfoEXT const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkDebugUtilsMessengerCreateInfoEXT *>( this );
|
|
}
|
|
|
|
operator VkDebugUtilsMessengerCreateInfoEXT &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkDebugUtilsMessengerCreateInfoEXT *>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
# if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
# else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &,
|
|
const void * const &,
|
|
VULKAN_HPP_NAMESPACE::DebugUtilsMessengerCreateFlagsEXT const &,
|
|
VULKAN_HPP_NAMESPACE::DebugUtilsMessageSeverityFlagsEXT const &,
|
|
VULKAN_HPP_NAMESPACE::DebugUtilsMessageTypeFlagsEXT const &,
|
|
PFN_vkDebugUtilsMessengerCallbackEXT const &,
|
|
void * const &>
|
|
# endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( sType, pNext, flags, messageSeverity, messageType, pfnUserCallback, pUserData );
|
|
}
|
|
#endif
|
|
|
|
bool operator==( DebugUtilsMessengerCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
#else
|
|
return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && ( messageSeverity == rhs.messageSeverity ) &&
|
|
( messageType == rhs.messageType ) && ( pfnUserCallback == rhs.pfnUserCallback ) && ( pUserData == rhs.pUserData );
|
|
#endif
|
|
}
|
|
|
|
bool operator!=( DebugUtilsMessengerCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDebugUtilsMessengerCreateInfoEXT;
|
|
const void * pNext = {};
|
|
VULKAN_HPP_NAMESPACE::DebugUtilsMessengerCreateFlagsEXT flags = {};
|
|
VULKAN_HPP_NAMESPACE::DebugUtilsMessageSeverityFlagsEXT messageSeverity = {};
|
|
VULKAN_HPP_NAMESPACE::DebugUtilsMessageTypeFlagsEXT messageType = {};
|
|
PFN_vkDebugUtilsMessengerCallbackEXT pfnUserCallback = {};
|
|
void * pUserData = {};
|
|
};
|
|
|
|
template <>
|
|
struct CppType<StructureType, StructureType::eDebugUtilsMessengerCreateInfoEXT>
|
|
{
|
|
using Type = DebugUtilsMessengerCreateInfoEXT;
|
|
};
|
|
|
|
struct DebugUtilsObjectTagInfoEXT
|
|
{
|
|
using NativeType = VkDebugUtilsObjectTagInfoEXT;
|
|
|
|
static const bool allowDuplicate = false;
|
|
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDebugUtilsObjectTagInfoEXT;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR DebugUtilsObjectTagInfoEXT( VULKAN_HPP_NAMESPACE::ObjectType objectType_ = VULKAN_HPP_NAMESPACE::ObjectType::eUnknown,
|
|
uint64_t objectHandle_ = {},
|
|
uint64_t tagName_ = {},
|
|
size_t tagSize_ = {},
|
|
const void * pTag_ = {},
|
|
const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
|
|
: pNext( pNext_ )
|
|
, objectType( objectType_ )
|
|
, objectHandle( objectHandle_ )
|
|
, tagName( tagName_ )
|
|
, tagSize( tagSize_ )
|
|
, pTag( pTag_ )
|
|
{
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR DebugUtilsObjectTagInfoEXT( DebugUtilsObjectTagInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
DebugUtilsObjectTagInfoEXT( VkDebugUtilsObjectTagInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: DebugUtilsObjectTagInfoEXT( *reinterpret_cast<DebugUtilsObjectTagInfoEXT const *>( &rhs ) )
|
|
{
|
|
}
|
|
|
|
# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
|
|
template <typename T>
|
|
DebugUtilsObjectTagInfoEXT( VULKAN_HPP_NAMESPACE::ObjectType objectType_,
|
|
uint64_t objectHandle_,
|
|
uint64_t tagName_,
|
|
VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const T> const & tag_,
|
|
const void * pNext_ = nullptr )
|
|
: pNext( pNext_ )
|
|
, objectType( objectType_ )
|
|
, objectHandle( objectHandle_ )
|
|
, tagName( tagName_ )
|
|
, tagSize( tag_.size() * sizeof( T ) )
|
|
, pTag( tag_.data() )
|
|
{
|
|
}
|
|
# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
|
|
|
|
DebugUtilsObjectTagInfoEXT & operator=( DebugUtilsObjectTagInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
DebugUtilsObjectTagInfoEXT & operator=( VkDebugUtilsObjectTagInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::DebugUtilsObjectTagInfoEXT const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
|
|
VULKAN_HPP_CONSTEXPR_14 DebugUtilsObjectTagInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pNext = pNext_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 DebugUtilsObjectTagInfoEXT & setObjectType( VULKAN_HPP_NAMESPACE::ObjectType objectType_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
objectType = objectType_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 DebugUtilsObjectTagInfoEXT & setObjectHandle( uint64_t objectHandle_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
objectHandle = objectHandle_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 DebugUtilsObjectTagInfoEXT & setTagName( uint64_t tagName_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
tagName = tagName_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 DebugUtilsObjectTagInfoEXT & setTagSize( size_t tagSize_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
tagSize = tagSize_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 DebugUtilsObjectTagInfoEXT & setPTag( const void * pTag_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pTag = pTag_;
|
|
return *this;
|
|
}
|
|
|
|
# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
|
|
template <typename T>
|
|
DebugUtilsObjectTagInfoEXT & setTag( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const T> const & tag_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
tagSize = tag_.size() * sizeof( T );
|
|
pTag = tag_.data();
|
|
return *this;
|
|
}
|
|
# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
|
|
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
|
|
|
|
operator VkDebugUtilsObjectTagInfoEXT const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkDebugUtilsObjectTagInfoEXT *>( this );
|
|
}
|
|
|
|
operator VkDebugUtilsObjectTagInfoEXT &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkDebugUtilsObjectTagInfoEXT *>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
# if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
# else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &,
|
|
const void * const &,
|
|
VULKAN_HPP_NAMESPACE::ObjectType const &,
|
|
uint64_t const &,
|
|
uint64_t const &,
|
|
size_t const &,
|
|
const void * const &>
|
|
# endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( sType, pNext, objectType, objectHandle, tagName, tagSize, pTag );
|
|
}
|
|
#endif
|
|
|
|
#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
|
|
auto operator<=>( DebugUtilsObjectTagInfoEXT const & ) const = default;
|
|
#else
|
|
bool operator==( DebugUtilsObjectTagInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
# if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
# else
|
|
return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( objectType == rhs.objectType ) && ( objectHandle == rhs.objectHandle ) &&
|
|
( tagName == rhs.tagName ) && ( tagSize == rhs.tagSize ) && ( pTag == rhs.pTag );
|
|
# endif
|
|
}
|
|
|
|
bool operator!=( DebugUtilsObjectTagInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDebugUtilsObjectTagInfoEXT;
|
|
const void * pNext = {};
|
|
VULKAN_HPP_NAMESPACE::ObjectType objectType = VULKAN_HPP_NAMESPACE::ObjectType::eUnknown;
|
|
uint64_t objectHandle = {};
|
|
uint64_t tagName = {};
|
|
size_t tagSize = {};
|
|
const void * pTag = {};
|
|
};
|
|
|
|
template <>
|
|
struct CppType<StructureType, StructureType::eDebugUtilsObjectTagInfoEXT>
|
|
{
|
|
using Type = DebugUtilsObjectTagInfoEXT;
|
|
};
|
|
|
|
struct MemoryBarrier2
|
|
{
|
|
using NativeType = VkMemoryBarrier2;
|
|
|
|
static const bool allowDuplicate = false;
|
|
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eMemoryBarrier2;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR MemoryBarrier2( VULKAN_HPP_NAMESPACE::PipelineStageFlags2 srcStageMask_ = {},
|
|
VULKAN_HPP_NAMESPACE::AccessFlags2 srcAccessMask_ = {},
|
|
VULKAN_HPP_NAMESPACE::PipelineStageFlags2 dstStageMask_ = {},
|
|
VULKAN_HPP_NAMESPACE::AccessFlags2 dstAccessMask_ = {},
|
|
const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
|
|
: pNext( pNext_ )
|
|
, srcStageMask( srcStageMask_ )
|
|
, srcAccessMask( srcAccessMask_ )
|
|
, dstStageMask( dstStageMask_ )
|
|
, dstAccessMask( dstAccessMask_ )
|
|
{
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR MemoryBarrier2( MemoryBarrier2 const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
MemoryBarrier2( VkMemoryBarrier2 const & rhs ) VULKAN_HPP_NOEXCEPT : MemoryBarrier2( *reinterpret_cast<MemoryBarrier2 const *>( &rhs ) ) {}
|
|
|
|
MemoryBarrier2 & operator=( MemoryBarrier2 const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
MemoryBarrier2 & operator=( VkMemoryBarrier2 const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::MemoryBarrier2 const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
|
|
VULKAN_HPP_CONSTEXPR_14 MemoryBarrier2 & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pNext = pNext_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 MemoryBarrier2 & setSrcStageMask( VULKAN_HPP_NAMESPACE::PipelineStageFlags2 srcStageMask_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
srcStageMask = srcStageMask_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 MemoryBarrier2 & setSrcAccessMask( VULKAN_HPP_NAMESPACE::AccessFlags2 srcAccessMask_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
srcAccessMask = srcAccessMask_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 MemoryBarrier2 & setDstStageMask( VULKAN_HPP_NAMESPACE::PipelineStageFlags2 dstStageMask_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
dstStageMask = dstStageMask_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 MemoryBarrier2 & setDstAccessMask( VULKAN_HPP_NAMESPACE::AccessFlags2 dstAccessMask_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
dstAccessMask = dstAccessMask_;
|
|
return *this;
|
|
}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
|
|
|
|
operator VkMemoryBarrier2 const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkMemoryBarrier2 *>( this );
|
|
}
|
|
|
|
operator VkMemoryBarrier2 &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkMemoryBarrier2 *>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
# if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
# else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &,
|
|
const void * const &,
|
|
VULKAN_HPP_NAMESPACE::PipelineStageFlags2 const &,
|
|
VULKAN_HPP_NAMESPACE::AccessFlags2 const &,
|
|
VULKAN_HPP_NAMESPACE::PipelineStageFlags2 const &,
|
|
VULKAN_HPP_NAMESPACE::AccessFlags2 const &>
|
|
# endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( sType, pNext, srcStageMask, srcAccessMask, dstStageMask, dstAccessMask );
|
|
}
|
|
#endif
|
|
|
|
#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
|
|
auto operator<=>( MemoryBarrier2 const & ) const = default;
|
|
#else
|
|
bool operator==( MemoryBarrier2 const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
# if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
# else
|
|
return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( srcStageMask == rhs.srcStageMask ) && ( srcAccessMask == rhs.srcAccessMask ) &&
|
|
( dstStageMask == rhs.dstStageMask ) && ( dstAccessMask == rhs.dstAccessMask );
|
|
# endif
|
|
}
|
|
|
|
bool operator!=( MemoryBarrier2 const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eMemoryBarrier2;
|
|
const void * pNext = {};
|
|
VULKAN_HPP_NAMESPACE::PipelineStageFlags2 srcStageMask = {};
|
|
VULKAN_HPP_NAMESPACE::AccessFlags2 srcAccessMask = {};
|
|
VULKAN_HPP_NAMESPACE::PipelineStageFlags2 dstStageMask = {};
|
|
VULKAN_HPP_NAMESPACE::AccessFlags2 dstAccessMask = {};
|
|
};
|
|
|
|
template <>
|
|
struct CppType<StructureType, StructureType::eMemoryBarrier2>
|
|
{
|
|
using Type = MemoryBarrier2;
|
|
};
|
|
|
|
using MemoryBarrier2KHR = MemoryBarrier2;
|
|
|
|
struct ImageSubresourceRange
|
|
{
|
|
using NativeType = VkImageSubresourceRange;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR ImageSubresourceRange( VULKAN_HPP_NAMESPACE::ImageAspectFlags aspectMask_ = {},
|
|
uint32_t baseMipLevel_ = {},
|
|
uint32_t levelCount_ = {},
|
|
uint32_t baseArrayLayer_ = {},
|
|
uint32_t layerCount_ = {} ) VULKAN_HPP_NOEXCEPT
|
|
: aspectMask( aspectMask_ )
|
|
, baseMipLevel( baseMipLevel_ )
|
|
, levelCount( levelCount_ )
|
|
, baseArrayLayer( baseArrayLayer_ )
|
|
, layerCount( layerCount_ )
|
|
{
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR ImageSubresourceRange( ImageSubresourceRange const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
ImageSubresourceRange( VkImageSubresourceRange const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: ImageSubresourceRange( *reinterpret_cast<ImageSubresourceRange const *>( &rhs ) )
|
|
{
|
|
}
|
|
|
|
ImageSubresourceRange & operator=( ImageSubresourceRange const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
ImageSubresourceRange & operator=( VkImageSubresourceRange const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ImageSubresourceRange const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
|
|
VULKAN_HPP_CONSTEXPR_14 ImageSubresourceRange & setAspectMask( VULKAN_HPP_NAMESPACE::ImageAspectFlags aspectMask_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
aspectMask = aspectMask_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 ImageSubresourceRange & setBaseMipLevel( uint32_t baseMipLevel_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
baseMipLevel = baseMipLevel_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 ImageSubresourceRange & setLevelCount( uint32_t levelCount_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
levelCount = levelCount_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 ImageSubresourceRange & setBaseArrayLayer( uint32_t baseArrayLayer_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
baseArrayLayer = baseArrayLayer_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 ImageSubresourceRange & setLayerCount( uint32_t layerCount_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
layerCount = layerCount_;
|
|
return *this;
|
|
}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
|
|
|
|
operator VkImageSubresourceRange const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkImageSubresourceRange *>( this );
|
|
}
|
|
|
|
operator VkImageSubresourceRange &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkImageSubresourceRange *>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
# if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
# else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::ImageAspectFlags const &, uint32_t const &, uint32_t const &, uint32_t const &, uint32_t const &>
|
|
# endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( aspectMask, baseMipLevel, levelCount, baseArrayLayer, layerCount );
|
|
}
|
|
#endif
|
|
|
|
#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
|
|
auto operator<=>( ImageSubresourceRange const & ) const = default;
|
|
#else
|
|
bool operator==( ImageSubresourceRange const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
# if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
# else
|
|
return ( aspectMask == rhs.aspectMask ) && ( baseMipLevel == rhs.baseMipLevel ) && ( levelCount == rhs.levelCount ) &&
|
|
( baseArrayLayer == rhs.baseArrayLayer ) && ( layerCount == rhs.layerCount );
|
|
# endif
|
|
}
|
|
|
|
bool operator!=( ImageSubresourceRange const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::ImageAspectFlags aspectMask = {};
|
|
uint32_t baseMipLevel = {};
|
|
uint32_t levelCount = {};
|
|
uint32_t baseArrayLayer = {};
|
|
uint32_t layerCount = {};
|
|
};
|
|
|
|
struct ImageMemoryBarrier2
|
|
{
|
|
using NativeType = VkImageMemoryBarrier2;
|
|
|
|
static const bool allowDuplicate = false;
|
|
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eImageMemoryBarrier2;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR ImageMemoryBarrier2( VULKAN_HPP_NAMESPACE::PipelineStageFlags2 srcStageMask_ = {},
|
|
VULKAN_HPP_NAMESPACE::AccessFlags2 srcAccessMask_ = {},
|
|
VULKAN_HPP_NAMESPACE::PipelineStageFlags2 dstStageMask_ = {},
|
|
VULKAN_HPP_NAMESPACE::AccessFlags2 dstAccessMask_ = {},
|
|
VULKAN_HPP_NAMESPACE::ImageLayout oldLayout_ = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined,
|
|
VULKAN_HPP_NAMESPACE::ImageLayout newLayout_ = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined,
|
|
uint32_t srcQueueFamilyIndex_ = {},
|
|
uint32_t dstQueueFamilyIndex_ = {},
|
|
VULKAN_HPP_NAMESPACE::Image image_ = {},
|
|
VULKAN_HPP_NAMESPACE::ImageSubresourceRange subresourceRange_ = {},
|
|
const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
|
|
: pNext( pNext_ )
|
|
, srcStageMask( srcStageMask_ )
|
|
, srcAccessMask( srcAccessMask_ )
|
|
, dstStageMask( dstStageMask_ )
|
|
, dstAccessMask( dstAccessMask_ )
|
|
, oldLayout( oldLayout_ )
|
|
, newLayout( newLayout_ )
|
|
, srcQueueFamilyIndex( srcQueueFamilyIndex_ )
|
|
, dstQueueFamilyIndex( dstQueueFamilyIndex_ )
|
|
, image( image_ )
|
|
, subresourceRange( subresourceRange_ )
|
|
{
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR ImageMemoryBarrier2( ImageMemoryBarrier2 const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
ImageMemoryBarrier2( VkImageMemoryBarrier2 const & rhs ) VULKAN_HPP_NOEXCEPT : ImageMemoryBarrier2( *reinterpret_cast<ImageMemoryBarrier2 const *>( &rhs ) )
|
|
{
|
|
}
|
|
|
|
ImageMemoryBarrier2 & operator=( ImageMemoryBarrier2 const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
ImageMemoryBarrier2 & operator=( VkImageMemoryBarrier2 const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ImageMemoryBarrier2 const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
|
|
VULKAN_HPP_CONSTEXPR_14 ImageMemoryBarrier2 & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pNext = pNext_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 ImageMemoryBarrier2 & setSrcStageMask( VULKAN_HPP_NAMESPACE::PipelineStageFlags2 srcStageMask_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
srcStageMask = srcStageMask_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 ImageMemoryBarrier2 & setSrcAccessMask( VULKAN_HPP_NAMESPACE::AccessFlags2 srcAccessMask_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
srcAccessMask = srcAccessMask_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 ImageMemoryBarrier2 & setDstStageMask( VULKAN_HPP_NAMESPACE::PipelineStageFlags2 dstStageMask_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
dstStageMask = dstStageMask_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 ImageMemoryBarrier2 & setDstAccessMask( VULKAN_HPP_NAMESPACE::AccessFlags2 dstAccessMask_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
dstAccessMask = dstAccessMask_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 ImageMemoryBarrier2 & setOldLayout( VULKAN_HPP_NAMESPACE::ImageLayout oldLayout_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
oldLayout = oldLayout_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 ImageMemoryBarrier2 & setNewLayout( VULKAN_HPP_NAMESPACE::ImageLayout newLayout_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
newLayout = newLayout_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 ImageMemoryBarrier2 & setSrcQueueFamilyIndex( uint32_t srcQueueFamilyIndex_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
srcQueueFamilyIndex = srcQueueFamilyIndex_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 ImageMemoryBarrier2 & setDstQueueFamilyIndex( uint32_t dstQueueFamilyIndex_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
dstQueueFamilyIndex = dstQueueFamilyIndex_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 ImageMemoryBarrier2 & setImage( VULKAN_HPP_NAMESPACE::Image image_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
image = image_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 ImageMemoryBarrier2 &
|
|
setSubresourceRange( VULKAN_HPP_NAMESPACE::ImageSubresourceRange const & subresourceRange_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
subresourceRange = subresourceRange_;
|
|
return *this;
|
|
}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
|
|
|
|
operator VkImageMemoryBarrier2 const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkImageMemoryBarrier2 *>( this );
|
|
}
|
|
|
|
operator VkImageMemoryBarrier2 &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkImageMemoryBarrier2 *>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
# if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
# else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &,
|
|
const void * const &,
|
|
VULKAN_HPP_NAMESPACE::PipelineStageFlags2 const &,
|
|
VULKAN_HPP_NAMESPACE::AccessFlags2 const &,
|
|
VULKAN_HPP_NAMESPACE::PipelineStageFlags2 const &,
|
|
VULKAN_HPP_NAMESPACE::AccessFlags2 const &,
|
|
VULKAN_HPP_NAMESPACE::ImageLayout const &,
|
|
VULKAN_HPP_NAMESPACE::ImageLayout const &,
|
|
uint32_t const &,
|
|
uint32_t const &,
|
|
VULKAN_HPP_NAMESPACE::Image const &,
|
|
VULKAN_HPP_NAMESPACE::ImageSubresourceRange const &>
|
|
# endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( sType,
|
|
pNext,
|
|
srcStageMask,
|
|
srcAccessMask,
|
|
dstStageMask,
|
|
dstAccessMask,
|
|
oldLayout,
|
|
newLayout,
|
|
srcQueueFamilyIndex,
|
|
dstQueueFamilyIndex,
|
|
image,
|
|
subresourceRange );
|
|
}
|
|
#endif
|
|
|
|
#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
|
|
auto operator<=>( ImageMemoryBarrier2 const & ) const = default;
|
|
#else
|
|
bool operator==( ImageMemoryBarrier2 const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
# if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
# else
|
|
return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( srcStageMask == rhs.srcStageMask ) && ( srcAccessMask == rhs.srcAccessMask ) &&
|
|
( dstStageMask == rhs.dstStageMask ) && ( dstAccessMask == rhs.dstAccessMask ) && ( oldLayout == rhs.oldLayout ) &&
|
|
( newLayout == rhs.newLayout ) && ( srcQueueFamilyIndex == rhs.srcQueueFamilyIndex ) && ( dstQueueFamilyIndex == rhs.dstQueueFamilyIndex ) &&
|
|
( image == rhs.image ) && ( subresourceRange == rhs.subresourceRange );
|
|
# endif
|
|
}
|
|
|
|
bool operator!=( ImageMemoryBarrier2 const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eImageMemoryBarrier2;
|
|
const void * pNext = {};
|
|
VULKAN_HPP_NAMESPACE::PipelineStageFlags2 srcStageMask = {};
|
|
VULKAN_HPP_NAMESPACE::AccessFlags2 srcAccessMask = {};
|
|
VULKAN_HPP_NAMESPACE::PipelineStageFlags2 dstStageMask = {};
|
|
VULKAN_HPP_NAMESPACE::AccessFlags2 dstAccessMask = {};
|
|
VULKAN_HPP_NAMESPACE::ImageLayout oldLayout = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined;
|
|
VULKAN_HPP_NAMESPACE::ImageLayout newLayout = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined;
|
|
uint32_t srcQueueFamilyIndex = {};
|
|
uint32_t dstQueueFamilyIndex = {};
|
|
VULKAN_HPP_NAMESPACE::Image image = {};
|
|
VULKAN_HPP_NAMESPACE::ImageSubresourceRange subresourceRange = {};
|
|
};
|
|
|
|
template <>
|
|
struct CppType<StructureType, StructureType::eImageMemoryBarrier2>
|
|
{
|
|
using Type = ImageMemoryBarrier2;
|
|
};
|
|
|
|
using ImageMemoryBarrier2KHR = ImageMemoryBarrier2;
|
|
|
|
struct DependencyInfo
|
|
{
|
|
using NativeType = VkDependencyInfo;
|
|
|
|
static const bool allowDuplicate = false;
|
|
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDependencyInfo;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR DependencyInfo( VULKAN_HPP_NAMESPACE::DependencyFlags dependencyFlags_ = {},
|
|
uint32_t memoryBarrierCount_ = {},
|
|
const VULKAN_HPP_NAMESPACE::MemoryBarrier2 * pMemoryBarriers_ = {},
|
|
uint32_t bufferMemoryBarrierCount_ = {},
|
|
const VULKAN_HPP_NAMESPACE::BufferMemoryBarrier2 * pBufferMemoryBarriers_ = {},
|
|
uint32_t imageMemoryBarrierCount_ = {},
|
|
const VULKAN_HPP_NAMESPACE::ImageMemoryBarrier2 * pImageMemoryBarriers_ = {},
|
|
const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
|
|
: pNext( pNext_ )
|
|
, dependencyFlags( dependencyFlags_ )
|
|
, memoryBarrierCount( memoryBarrierCount_ )
|
|
, pMemoryBarriers( pMemoryBarriers_ )
|
|
, bufferMemoryBarrierCount( bufferMemoryBarrierCount_ )
|
|
, pBufferMemoryBarriers( pBufferMemoryBarriers_ )
|
|
, imageMemoryBarrierCount( imageMemoryBarrierCount_ )
|
|
, pImageMemoryBarriers( pImageMemoryBarriers_ )
|
|
{
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR DependencyInfo( DependencyInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
DependencyInfo( VkDependencyInfo const & rhs ) VULKAN_HPP_NOEXCEPT : DependencyInfo( *reinterpret_cast<DependencyInfo const *>( &rhs ) ) {}
|
|
|
|
# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
|
|
DependencyInfo( VULKAN_HPP_NAMESPACE::DependencyFlags dependencyFlags_,
|
|
VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::MemoryBarrier2> const & memoryBarriers_,
|
|
VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::BufferMemoryBarrier2> const & bufferMemoryBarriers_ = {},
|
|
VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::ImageMemoryBarrier2> const & imageMemoryBarriers_ = {},
|
|
const void * pNext_ = nullptr )
|
|
: pNext( pNext_ )
|
|
, dependencyFlags( dependencyFlags_ )
|
|
, memoryBarrierCount( static_cast<uint32_t>( memoryBarriers_.size() ) )
|
|
, pMemoryBarriers( memoryBarriers_.data() )
|
|
, bufferMemoryBarrierCount( static_cast<uint32_t>( bufferMemoryBarriers_.size() ) )
|
|
, pBufferMemoryBarriers( bufferMemoryBarriers_.data() )
|
|
, imageMemoryBarrierCount( static_cast<uint32_t>( imageMemoryBarriers_.size() ) )
|
|
, pImageMemoryBarriers( imageMemoryBarriers_.data() )
|
|
{
|
|
}
|
|
# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
|
|
|
|
DependencyInfo & operator=( DependencyInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
DependencyInfo & operator=( VkDependencyInfo const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::DependencyInfo const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
|
|
VULKAN_HPP_CONSTEXPR_14 DependencyInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pNext = pNext_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 DependencyInfo & setDependencyFlags( VULKAN_HPP_NAMESPACE::DependencyFlags dependencyFlags_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
dependencyFlags = dependencyFlags_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 DependencyInfo & setMemoryBarrierCount( uint32_t memoryBarrierCount_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
memoryBarrierCount = memoryBarrierCount_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 DependencyInfo & setPMemoryBarriers( const VULKAN_HPP_NAMESPACE::MemoryBarrier2 * pMemoryBarriers_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pMemoryBarriers = pMemoryBarriers_;
|
|
return *this;
|
|
}
|
|
|
|
# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
|
|
DependencyInfo &
|
|
setMemoryBarriers( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::MemoryBarrier2> const & memoryBarriers_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
memoryBarrierCount = static_cast<uint32_t>( memoryBarriers_.size() );
|
|
pMemoryBarriers = memoryBarriers_.data();
|
|
return *this;
|
|
}
|
|
# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 DependencyInfo & setBufferMemoryBarrierCount( uint32_t bufferMemoryBarrierCount_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
bufferMemoryBarrierCount = bufferMemoryBarrierCount_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 DependencyInfo &
|
|
setPBufferMemoryBarriers( const VULKAN_HPP_NAMESPACE::BufferMemoryBarrier2 * pBufferMemoryBarriers_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pBufferMemoryBarriers = pBufferMemoryBarriers_;
|
|
return *this;
|
|
}
|
|
|
|
# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
|
|
DependencyInfo & setBufferMemoryBarriers(
|
|
VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::BufferMemoryBarrier2> const & bufferMemoryBarriers_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
bufferMemoryBarrierCount = static_cast<uint32_t>( bufferMemoryBarriers_.size() );
|
|
pBufferMemoryBarriers = bufferMemoryBarriers_.data();
|
|
return *this;
|
|
}
|
|
# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 DependencyInfo & setImageMemoryBarrierCount( uint32_t imageMemoryBarrierCount_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
imageMemoryBarrierCount = imageMemoryBarrierCount_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 DependencyInfo &
|
|
setPImageMemoryBarriers( const VULKAN_HPP_NAMESPACE::ImageMemoryBarrier2 * pImageMemoryBarriers_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pImageMemoryBarriers = pImageMemoryBarriers_;
|
|
return *this;
|
|
}
|
|
|
|
# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
|
|
DependencyInfo & setImageMemoryBarriers(
|
|
VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::ImageMemoryBarrier2> const & imageMemoryBarriers_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
imageMemoryBarrierCount = static_cast<uint32_t>( imageMemoryBarriers_.size() );
|
|
pImageMemoryBarriers = imageMemoryBarriers_.data();
|
|
return *this;
|
|
}
|
|
# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
|
|
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
|
|
|
|
operator VkDependencyInfo const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkDependencyInfo *>( this );
|
|
}
|
|
|
|
operator VkDependencyInfo &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkDependencyInfo *>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
# if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
# else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &,
|
|
const void * const &,
|
|
VULKAN_HPP_NAMESPACE::DependencyFlags const &,
|
|
uint32_t const &,
|
|
const VULKAN_HPP_NAMESPACE::MemoryBarrier2 * const &,
|
|
uint32_t const &,
|
|
const VULKAN_HPP_NAMESPACE::BufferMemoryBarrier2 * const &,
|
|
uint32_t const &,
|
|
const VULKAN_HPP_NAMESPACE::ImageMemoryBarrier2 * const &>
|
|
# endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( sType,
|
|
pNext,
|
|
dependencyFlags,
|
|
memoryBarrierCount,
|
|
pMemoryBarriers,
|
|
bufferMemoryBarrierCount,
|
|
pBufferMemoryBarriers,
|
|
imageMemoryBarrierCount,
|
|
pImageMemoryBarriers );
|
|
}
|
|
#endif
|
|
|
|
#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
|
|
auto operator<=>( DependencyInfo const & ) const = default;
|
|
#else
|
|
bool operator==( DependencyInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
# if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
# else
|
|
return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( dependencyFlags == rhs.dependencyFlags ) &&
|
|
( memoryBarrierCount == rhs.memoryBarrierCount ) && ( pMemoryBarriers == rhs.pMemoryBarriers ) &&
|
|
( bufferMemoryBarrierCount == rhs.bufferMemoryBarrierCount ) && ( pBufferMemoryBarriers == rhs.pBufferMemoryBarriers ) &&
|
|
( imageMemoryBarrierCount == rhs.imageMemoryBarrierCount ) && ( pImageMemoryBarriers == rhs.pImageMemoryBarriers );
|
|
# endif
|
|
}
|
|
|
|
bool operator!=( DependencyInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDependencyInfo;
|
|
const void * pNext = {};
|
|
VULKAN_HPP_NAMESPACE::DependencyFlags dependencyFlags = {};
|
|
uint32_t memoryBarrierCount = {};
|
|
const VULKAN_HPP_NAMESPACE::MemoryBarrier2 * pMemoryBarriers = {};
|
|
uint32_t bufferMemoryBarrierCount = {};
|
|
const VULKAN_HPP_NAMESPACE::BufferMemoryBarrier2 * pBufferMemoryBarriers = {};
|
|
uint32_t imageMemoryBarrierCount = {};
|
|
const VULKAN_HPP_NAMESPACE::ImageMemoryBarrier2 * pImageMemoryBarriers = {};
|
|
};
|
|
|
|
template <>
|
|
struct CppType<StructureType, StructureType::eDependencyInfo>
|
|
{
|
|
using Type = DependencyInfo;
|
|
};
|
|
|
|
using DependencyInfoKHR = DependencyInfo;
|
|
|
|
struct DescriptorBufferInfo
|
|
{
|
|
using NativeType = VkDescriptorBufferInfo;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR DescriptorBufferInfo( VULKAN_HPP_NAMESPACE::Buffer buffer_ = {},
|
|
VULKAN_HPP_NAMESPACE::DeviceSize offset_ = {},
|
|
VULKAN_HPP_NAMESPACE::DeviceSize range_ = {} ) VULKAN_HPP_NOEXCEPT
|
|
: buffer( buffer_ )
|
|
, offset( offset_ )
|
|
, range( range_ )
|
|
{
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR DescriptorBufferInfo( DescriptorBufferInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
DescriptorBufferInfo( VkDescriptorBufferInfo const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: DescriptorBufferInfo( *reinterpret_cast<DescriptorBufferInfo const *>( &rhs ) )
|
|
{
|
|
}
|
|
|
|
DescriptorBufferInfo & operator=( DescriptorBufferInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
DescriptorBufferInfo & operator=( VkDescriptorBufferInfo const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::DescriptorBufferInfo const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
|
|
VULKAN_HPP_CONSTEXPR_14 DescriptorBufferInfo & setBuffer( VULKAN_HPP_NAMESPACE::Buffer buffer_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
buffer = buffer_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 DescriptorBufferInfo & setOffset( VULKAN_HPP_NAMESPACE::DeviceSize offset_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
offset = offset_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 DescriptorBufferInfo & setRange( VULKAN_HPP_NAMESPACE::DeviceSize range_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
range = range_;
|
|
return *this;
|
|
}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
|
|
|
|
operator VkDescriptorBufferInfo const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkDescriptorBufferInfo *>( this );
|
|
}
|
|
|
|
operator VkDescriptorBufferInfo &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkDescriptorBufferInfo *>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
# if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
# else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::Buffer const &, VULKAN_HPP_NAMESPACE::DeviceSize const &, VULKAN_HPP_NAMESPACE::DeviceSize const &>
|
|
# endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( buffer, offset, range );
|
|
}
|
|
#endif
|
|
|
|
#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
|
|
auto operator<=>( DescriptorBufferInfo const & ) const = default;
|
|
#else
|
|
bool operator==( DescriptorBufferInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
# if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
# else
|
|
return ( buffer == rhs.buffer ) && ( offset == rhs.offset ) && ( range == rhs.range );
|
|
# endif
|
|
}
|
|
|
|
bool operator!=( DescriptorBufferInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::Buffer buffer = {};
|
|
VULKAN_HPP_NAMESPACE::DeviceSize offset = {};
|
|
VULKAN_HPP_NAMESPACE::DeviceSize range = {};
|
|
};
|
|
|
|
struct DescriptorImageInfo
|
|
{
|
|
using NativeType = VkDescriptorImageInfo;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR
|
|
DescriptorImageInfo( VULKAN_HPP_NAMESPACE::Sampler sampler_ = {},
|
|
VULKAN_HPP_NAMESPACE::ImageView imageView_ = {},
|
|
VULKAN_HPP_NAMESPACE::ImageLayout imageLayout_ = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined ) VULKAN_HPP_NOEXCEPT
|
|
: sampler( sampler_ )
|
|
, imageView( imageView_ )
|
|
, imageLayout( imageLayout_ )
|
|
{
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR DescriptorImageInfo( DescriptorImageInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
DescriptorImageInfo( VkDescriptorImageInfo const & rhs ) VULKAN_HPP_NOEXCEPT : DescriptorImageInfo( *reinterpret_cast<DescriptorImageInfo const *>( &rhs ) )
|
|
{
|
|
}
|
|
|
|
DescriptorImageInfo & operator=( DescriptorImageInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
DescriptorImageInfo & operator=( VkDescriptorImageInfo const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::DescriptorImageInfo const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
|
|
VULKAN_HPP_CONSTEXPR_14 DescriptorImageInfo & setSampler( VULKAN_HPP_NAMESPACE::Sampler sampler_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
sampler = sampler_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 DescriptorImageInfo & setImageView( VULKAN_HPP_NAMESPACE::ImageView imageView_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
imageView = imageView_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 DescriptorImageInfo & setImageLayout( VULKAN_HPP_NAMESPACE::ImageLayout imageLayout_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
imageLayout = imageLayout_;
|
|
return *this;
|
|
}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
|
|
|
|
operator VkDescriptorImageInfo const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkDescriptorImageInfo *>( this );
|
|
}
|
|
|
|
operator VkDescriptorImageInfo &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkDescriptorImageInfo *>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
# if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
# else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::Sampler const &, VULKAN_HPP_NAMESPACE::ImageView const &, VULKAN_HPP_NAMESPACE::ImageLayout const &>
|
|
# endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( sampler, imageView, imageLayout );
|
|
}
|
|
#endif
|
|
|
|
#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
|
|
auto operator<=>( DescriptorImageInfo const & ) const = default;
|
|
#else
|
|
bool operator==( DescriptorImageInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
# if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
# else
|
|
return ( sampler == rhs.sampler ) && ( imageView == rhs.imageView ) && ( imageLayout == rhs.imageLayout );
|
|
# endif
|
|
}
|
|
|
|
bool operator!=( DescriptorImageInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::Sampler sampler = {};
|
|
VULKAN_HPP_NAMESPACE::ImageView imageView = {};
|
|
VULKAN_HPP_NAMESPACE::ImageLayout imageLayout = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined;
|
|
};
|
|
|
|
struct DescriptorPoolSize
|
|
{
|
|
using NativeType = VkDescriptorPoolSize;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR DescriptorPoolSize( VULKAN_HPP_NAMESPACE::DescriptorType type_ = VULKAN_HPP_NAMESPACE::DescriptorType::eSampler,
|
|
uint32_t descriptorCount_ = {} ) VULKAN_HPP_NOEXCEPT
|
|
: type( type_ )
|
|
, descriptorCount( descriptorCount_ )
|
|
{
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR DescriptorPoolSize( DescriptorPoolSize const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
DescriptorPoolSize( VkDescriptorPoolSize const & rhs ) VULKAN_HPP_NOEXCEPT : DescriptorPoolSize( *reinterpret_cast<DescriptorPoolSize const *>( &rhs ) ) {}
|
|
|
|
DescriptorPoolSize & operator=( DescriptorPoolSize const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
DescriptorPoolSize & operator=( VkDescriptorPoolSize const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::DescriptorPoolSize const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
|
|
VULKAN_HPP_CONSTEXPR_14 DescriptorPoolSize & setType( VULKAN_HPP_NAMESPACE::DescriptorType type_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
type = type_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 DescriptorPoolSize & setDescriptorCount( uint32_t descriptorCount_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
descriptorCount = descriptorCount_;
|
|
return *this;
|
|
}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
|
|
|
|
operator VkDescriptorPoolSize const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkDescriptorPoolSize *>( this );
|
|
}
|
|
|
|
operator VkDescriptorPoolSize &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkDescriptorPoolSize *>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
# if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
# else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::DescriptorType const &, uint32_t const &>
|
|
# endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( type, descriptorCount );
|
|
}
|
|
#endif
|
|
|
|
#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
|
|
auto operator<=>( DescriptorPoolSize const & ) const = default;
|
|
#else
|
|
bool operator==( DescriptorPoolSize const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
# if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
# else
|
|
return ( type == rhs.type ) && ( descriptorCount == rhs.descriptorCount );
|
|
# endif
|
|
}
|
|
|
|
bool operator!=( DescriptorPoolSize const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::DescriptorType type = VULKAN_HPP_NAMESPACE::DescriptorType::eSampler;
|
|
uint32_t descriptorCount = {};
|
|
};
|
|
|
|
struct DescriptorPoolCreateInfo
|
|
{
|
|
using NativeType = VkDescriptorPoolCreateInfo;
|
|
|
|
static const bool allowDuplicate = false;
|
|
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDescriptorPoolCreateInfo;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR DescriptorPoolCreateInfo( VULKAN_HPP_NAMESPACE::DescriptorPoolCreateFlags flags_ = {},
|
|
uint32_t maxSets_ = {},
|
|
uint32_t poolSizeCount_ = {},
|
|
const VULKAN_HPP_NAMESPACE::DescriptorPoolSize * pPoolSizes_ = {},
|
|
const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
|
|
: pNext( pNext_ )
|
|
, flags( flags_ )
|
|
, maxSets( maxSets_ )
|
|
, poolSizeCount( poolSizeCount_ )
|
|
, pPoolSizes( pPoolSizes_ )
|
|
{
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR DescriptorPoolCreateInfo( DescriptorPoolCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
DescriptorPoolCreateInfo( VkDescriptorPoolCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: DescriptorPoolCreateInfo( *reinterpret_cast<DescriptorPoolCreateInfo const *>( &rhs ) )
|
|
{
|
|
}
|
|
|
|
# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
|
|
DescriptorPoolCreateInfo( VULKAN_HPP_NAMESPACE::DescriptorPoolCreateFlags flags_,
|
|
uint32_t maxSets_,
|
|
VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::DescriptorPoolSize> const & poolSizes_,
|
|
const void * pNext_ = nullptr )
|
|
: pNext( pNext_ ), flags( flags_ ), maxSets( maxSets_ ), poolSizeCount( static_cast<uint32_t>( poolSizes_.size() ) ), pPoolSizes( poolSizes_.data() )
|
|
{
|
|
}
|
|
# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
|
|
|
|
DescriptorPoolCreateInfo & operator=( DescriptorPoolCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
DescriptorPoolCreateInfo & operator=( VkDescriptorPoolCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::DescriptorPoolCreateInfo const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
|
|
VULKAN_HPP_CONSTEXPR_14 DescriptorPoolCreateInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pNext = pNext_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 DescriptorPoolCreateInfo & setFlags( VULKAN_HPP_NAMESPACE::DescriptorPoolCreateFlags flags_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
flags = flags_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 DescriptorPoolCreateInfo & setMaxSets( uint32_t maxSets_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
maxSets = maxSets_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 DescriptorPoolCreateInfo & setPoolSizeCount( uint32_t poolSizeCount_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
poolSizeCount = poolSizeCount_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 DescriptorPoolCreateInfo & setPPoolSizes( const VULKAN_HPP_NAMESPACE::DescriptorPoolSize * pPoolSizes_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pPoolSizes = pPoolSizes_;
|
|
return *this;
|
|
}
|
|
|
|
# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
|
|
DescriptorPoolCreateInfo &
|
|
setPoolSizes( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::DescriptorPoolSize> const & poolSizes_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
poolSizeCount = static_cast<uint32_t>( poolSizes_.size() );
|
|
pPoolSizes = poolSizes_.data();
|
|
return *this;
|
|
}
|
|
# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
|
|
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
|
|
|
|
operator VkDescriptorPoolCreateInfo const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkDescriptorPoolCreateInfo *>( this );
|
|
}
|
|
|
|
operator VkDescriptorPoolCreateInfo &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkDescriptorPoolCreateInfo *>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
# if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
# else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &,
|
|
const void * const &,
|
|
VULKAN_HPP_NAMESPACE::DescriptorPoolCreateFlags const &,
|
|
uint32_t const &,
|
|
uint32_t const &,
|
|
const VULKAN_HPP_NAMESPACE::DescriptorPoolSize * const &>
|
|
# endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( sType, pNext, flags, maxSets, poolSizeCount, pPoolSizes );
|
|
}
|
|
#endif
|
|
|
|
#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
|
|
auto operator<=>( DescriptorPoolCreateInfo const & ) const = default;
|
|
#else
|
|
bool operator==( DescriptorPoolCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
# if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
# else
|
|
return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && ( maxSets == rhs.maxSets ) &&
|
|
( poolSizeCount == rhs.poolSizeCount ) && ( pPoolSizes == rhs.pPoolSizes );
|
|
# endif
|
|
}
|
|
|
|
bool operator!=( DescriptorPoolCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDescriptorPoolCreateInfo;
|
|
const void * pNext = {};
|
|
VULKAN_HPP_NAMESPACE::DescriptorPoolCreateFlags flags = {};
|
|
uint32_t maxSets = {};
|
|
uint32_t poolSizeCount = {};
|
|
const VULKAN_HPP_NAMESPACE::DescriptorPoolSize * pPoolSizes = {};
|
|
};
|
|
|
|
template <>
|
|
struct CppType<StructureType, StructureType::eDescriptorPoolCreateInfo>
|
|
{
|
|
using Type = DescriptorPoolCreateInfo;
|
|
};
|
|
|
|
struct DescriptorPoolInlineUniformBlockCreateInfo
|
|
{
|
|
using NativeType = VkDescriptorPoolInlineUniformBlockCreateInfo;
|
|
|
|
static const bool allowDuplicate = false;
|
|
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDescriptorPoolInlineUniformBlockCreateInfo;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR DescriptorPoolInlineUniformBlockCreateInfo( uint32_t maxInlineUniformBlockBindings_ = {},
|
|
const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
|
|
: pNext( pNext_ )
|
|
, maxInlineUniformBlockBindings( maxInlineUniformBlockBindings_ )
|
|
{
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR DescriptorPoolInlineUniformBlockCreateInfo( DescriptorPoolInlineUniformBlockCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
DescriptorPoolInlineUniformBlockCreateInfo( VkDescriptorPoolInlineUniformBlockCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: DescriptorPoolInlineUniformBlockCreateInfo( *reinterpret_cast<DescriptorPoolInlineUniformBlockCreateInfo const *>( &rhs ) )
|
|
{
|
|
}
|
|
|
|
DescriptorPoolInlineUniformBlockCreateInfo & operator=( DescriptorPoolInlineUniformBlockCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
DescriptorPoolInlineUniformBlockCreateInfo & operator=( VkDescriptorPoolInlineUniformBlockCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::DescriptorPoolInlineUniformBlockCreateInfo const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
|
|
VULKAN_HPP_CONSTEXPR_14 DescriptorPoolInlineUniformBlockCreateInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pNext = pNext_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 DescriptorPoolInlineUniformBlockCreateInfo &
|
|
setMaxInlineUniformBlockBindings( uint32_t maxInlineUniformBlockBindings_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
maxInlineUniformBlockBindings = maxInlineUniformBlockBindings_;
|
|
return *this;
|
|
}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
|
|
|
|
operator VkDescriptorPoolInlineUniformBlockCreateInfo const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkDescriptorPoolInlineUniformBlockCreateInfo *>( this );
|
|
}
|
|
|
|
operator VkDescriptorPoolInlineUniformBlockCreateInfo &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkDescriptorPoolInlineUniformBlockCreateInfo *>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
# if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
# else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, uint32_t const &>
|
|
# endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( sType, pNext, maxInlineUniformBlockBindings );
|
|
}
|
|
#endif
|
|
|
|
#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
|
|
auto operator<=>( DescriptorPoolInlineUniformBlockCreateInfo const & ) const = default;
|
|
#else
|
|
bool operator==( DescriptorPoolInlineUniformBlockCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
# if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
# else
|
|
return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( maxInlineUniformBlockBindings == rhs.maxInlineUniformBlockBindings );
|
|
# endif
|
|
}
|
|
|
|
bool operator!=( DescriptorPoolInlineUniformBlockCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDescriptorPoolInlineUniformBlockCreateInfo;
|
|
const void * pNext = {};
|
|
uint32_t maxInlineUniformBlockBindings = {};
|
|
};
|
|
|
|
template <>
|
|
struct CppType<StructureType, StructureType::eDescriptorPoolInlineUniformBlockCreateInfo>
|
|
{
|
|
using Type = DescriptorPoolInlineUniformBlockCreateInfo;
|
|
};
|
|
|
|
using DescriptorPoolInlineUniformBlockCreateInfoEXT = DescriptorPoolInlineUniformBlockCreateInfo;
|
|
|
|
struct DescriptorSetAllocateInfo
|
|
{
|
|
using NativeType = VkDescriptorSetAllocateInfo;
|
|
|
|
static const bool allowDuplicate = false;
|
|
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDescriptorSetAllocateInfo;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR DescriptorSetAllocateInfo( VULKAN_HPP_NAMESPACE::DescriptorPool descriptorPool_ = {},
|
|
uint32_t descriptorSetCount_ = {},
|
|
const VULKAN_HPP_NAMESPACE::DescriptorSetLayout * pSetLayouts_ = {},
|
|
const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
|
|
: pNext( pNext_ )
|
|
, descriptorPool( descriptorPool_ )
|
|
, descriptorSetCount( descriptorSetCount_ )
|
|
, pSetLayouts( pSetLayouts_ )
|
|
{
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR DescriptorSetAllocateInfo( DescriptorSetAllocateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
DescriptorSetAllocateInfo( VkDescriptorSetAllocateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: DescriptorSetAllocateInfo( *reinterpret_cast<DescriptorSetAllocateInfo const *>( &rhs ) )
|
|
{
|
|
}
|
|
|
|
# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
|
|
DescriptorSetAllocateInfo( VULKAN_HPP_NAMESPACE::DescriptorPool descriptorPool_,
|
|
VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::DescriptorSetLayout> const & setLayouts_,
|
|
const void * pNext_ = nullptr )
|
|
: pNext( pNext_ ), descriptorPool( descriptorPool_ ), descriptorSetCount( static_cast<uint32_t>( setLayouts_.size() ) ), pSetLayouts( setLayouts_.data() )
|
|
{
|
|
}
|
|
# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
|
|
|
|
DescriptorSetAllocateInfo & operator=( DescriptorSetAllocateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
DescriptorSetAllocateInfo & operator=( VkDescriptorSetAllocateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::DescriptorSetAllocateInfo const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
|
|
VULKAN_HPP_CONSTEXPR_14 DescriptorSetAllocateInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pNext = pNext_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 DescriptorSetAllocateInfo & setDescriptorPool( VULKAN_HPP_NAMESPACE::DescriptorPool descriptorPool_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
descriptorPool = descriptorPool_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 DescriptorSetAllocateInfo & setDescriptorSetCount( uint32_t descriptorSetCount_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
descriptorSetCount = descriptorSetCount_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 DescriptorSetAllocateInfo & setPSetLayouts( const VULKAN_HPP_NAMESPACE::DescriptorSetLayout * pSetLayouts_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pSetLayouts = pSetLayouts_;
|
|
return *this;
|
|
}
|
|
|
|
# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
|
|
DescriptorSetAllocateInfo &
|
|
setSetLayouts( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::DescriptorSetLayout> const & setLayouts_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
descriptorSetCount = static_cast<uint32_t>( setLayouts_.size() );
|
|
pSetLayouts = setLayouts_.data();
|
|
return *this;
|
|
}
|
|
# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
|
|
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
|
|
|
|
operator VkDescriptorSetAllocateInfo const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkDescriptorSetAllocateInfo *>( this );
|
|
}
|
|
|
|
operator VkDescriptorSetAllocateInfo &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkDescriptorSetAllocateInfo *>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
# if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
# else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &,
|
|
const void * const &,
|
|
VULKAN_HPP_NAMESPACE::DescriptorPool const &,
|
|
uint32_t const &,
|
|
const VULKAN_HPP_NAMESPACE::DescriptorSetLayout * const &>
|
|
# endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( sType, pNext, descriptorPool, descriptorSetCount, pSetLayouts );
|
|
}
|
|
#endif
|
|
|
|
#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
|
|
auto operator<=>( DescriptorSetAllocateInfo const & ) const = default;
|
|
#else
|
|
bool operator==( DescriptorSetAllocateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
# if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
# else
|
|
return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( descriptorPool == rhs.descriptorPool ) && ( descriptorSetCount == rhs.descriptorSetCount ) &&
|
|
( pSetLayouts == rhs.pSetLayouts );
|
|
# endif
|
|
}
|
|
|
|
bool operator!=( DescriptorSetAllocateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDescriptorSetAllocateInfo;
|
|
const void * pNext = {};
|
|
VULKAN_HPP_NAMESPACE::DescriptorPool descriptorPool = {};
|
|
uint32_t descriptorSetCount = {};
|
|
const VULKAN_HPP_NAMESPACE::DescriptorSetLayout * pSetLayouts = {};
|
|
};
|
|
|
|
template <>
|
|
struct CppType<StructureType, StructureType::eDescriptorSetAllocateInfo>
|
|
{
|
|
using Type = DescriptorSetAllocateInfo;
|
|
};
|
|
|
|
struct DescriptorSetLayoutBinding
|
|
{
|
|
using NativeType = VkDescriptorSetLayoutBinding;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR DescriptorSetLayoutBinding( uint32_t binding_ = {},
|
|
VULKAN_HPP_NAMESPACE::DescriptorType descriptorType_ = VULKAN_HPP_NAMESPACE::DescriptorType::eSampler,
|
|
uint32_t descriptorCount_ = {},
|
|
VULKAN_HPP_NAMESPACE::ShaderStageFlags stageFlags_ = {},
|
|
const VULKAN_HPP_NAMESPACE::Sampler * pImmutableSamplers_ = {} ) VULKAN_HPP_NOEXCEPT
|
|
: binding( binding_ )
|
|
, descriptorType( descriptorType_ )
|
|
, descriptorCount( descriptorCount_ )
|
|
, stageFlags( stageFlags_ )
|
|
, pImmutableSamplers( pImmutableSamplers_ )
|
|
{
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR DescriptorSetLayoutBinding( DescriptorSetLayoutBinding const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
DescriptorSetLayoutBinding( VkDescriptorSetLayoutBinding const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: DescriptorSetLayoutBinding( *reinterpret_cast<DescriptorSetLayoutBinding const *>( &rhs ) )
|
|
{
|
|
}
|
|
|
|
# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
|
|
DescriptorSetLayoutBinding( uint32_t binding_,
|
|
VULKAN_HPP_NAMESPACE::DescriptorType descriptorType_,
|
|
VULKAN_HPP_NAMESPACE::ShaderStageFlags stageFlags_,
|
|
VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::Sampler> const & immutableSamplers_ )
|
|
: binding( binding_ )
|
|
, descriptorType( descriptorType_ )
|
|
, descriptorCount( static_cast<uint32_t>( immutableSamplers_.size() ) )
|
|
, stageFlags( stageFlags_ )
|
|
, pImmutableSamplers( immutableSamplers_.data() )
|
|
{
|
|
}
|
|
# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
|
|
|
|
DescriptorSetLayoutBinding & operator=( DescriptorSetLayoutBinding const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
DescriptorSetLayoutBinding & operator=( VkDescriptorSetLayoutBinding const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::DescriptorSetLayoutBinding const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
|
|
VULKAN_HPP_CONSTEXPR_14 DescriptorSetLayoutBinding & setBinding( uint32_t binding_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
binding = binding_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 DescriptorSetLayoutBinding & setDescriptorType( VULKAN_HPP_NAMESPACE::DescriptorType descriptorType_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
descriptorType = descriptorType_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 DescriptorSetLayoutBinding & setDescriptorCount( uint32_t descriptorCount_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
descriptorCount = descriptorCount_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 DescriptorSetLayoutBinding & setStageFlags( VULKAN_HPP_NAMESPACE::ShaderStageFlags stageFlags_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
stageFlags = stageFlags_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 DescriptorSetLayoutBinding & setPImmutableSamplers( const VULKAN_HPP_NAMESPACE::Sampler * pImmutableSamplers_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pImmutableSamplers = pImmutableSamplers_;
|
|
return *this;
|
|
}
|
|
|
|
# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
|
|
DescriptorSetLayoutBinding &
|
|
setImmutableSamplers( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::Sampler> const & immutableSamplers_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
descriptorCount = static_cast<uint32_t>( immutableSamplers_.size() );
|
|
pImmutableSamplers = immutableSamplers_.data();
|
|
return *this;
|
|
}
|
|
# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
|
|
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
|
|
|
|
operator VkDescriptorSetLayoutBinding const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkDescriptorSetLayoutBinding *>( this );
|
|
}
|
|
|
|
operator VkDescriptorSetLayoutBinding &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkDescriptorSetLayoutBinding *>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
# if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
# else
|
|
std::tuple<uint32_t const &,
|
|
VULKAN_HPP_NAMESPACE::DescriptorType const &,
|
|
uint32_t const &,
|
|
VULKAN_HPP_NAMESPACE::ShaderStageFlags const &,
|
|
const VULKAN_HPP_NAMESPACE::Sampler * const &>
|
|
# endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( binding, descriptorType, descriptorCount, stageFlags, pImmutableSamplers );
|
|
}
|
|
#endif
|
|
|
|
#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
|
|
auto operator<=>( DescriptorSetLayoutBinding const & ) const = default;
|
|
#else
|
|
bool operator==( DescriptorSetLayoutBinding const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
# if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
# else
|
|
return ( binding == rhs.binding ) && ( descriptorType == rhs.descriptorType ) && ( descriptorCount == rhs.descriptorCount ) &&
|
|
( stageFlags == rhs.stageFlags ) && ( pImmutableSamplers == rhs.pImmutableSamplers );
|
|
# endif
|
|
}
|
|
|
|
bool operator!=( DescriptorSetLayoutBinding const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
uint32_t binding = {};
|
|
VULKAN_HPP_NAMESPACE::DescriptorType descriptorType = VULKAN_HPP_NAMESPACE::DescriptorType::eSampler;
|
|
uint32_t descriptorCount = {};
|
|
VULKAN_HPP_NAMESPACE::ShaderStageFlags stageFlags = {};
|
|
const VULKAN_HPP_NAMESPACE::Sampler * pImmutableSamplers = {};
|
|
};
|
|
|
|
struct DescriptorSetLayoutBindingFlagsCreateInfo
|
|
{
|
|
using NativeType = VkDescriptorSetLayoutBindingFlagsCreateInfo;
|
|
|
|
static const bool allowDuplicate = false;
|
|
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDescriptorSetLayoutBindingFlagsCreateInfo;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR DescriptorSetLayoutBindingFlagsCreateInfo( uint32_t bindingCount_ = {},
|
|
const VULKAN_HPP_NAMESPACE::DescriptorBindingFlags * pBindingFlags_ = {},
|
|
const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
|
|
: pNext( pNext_ )
|
|
, bindingCount( bindingCount_ )
|
|
, pBindingFlags( pBindingFlags_ )
|
|
{
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR DescriptorSetLayoutBindingFlagsCreateInfo( DescriptorSetLayoutBindingFlagsCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
DescriptorSetLayoutBindingFlagsCreateInfo( VkDescriptorSetLayoutBindingFlagsCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: DescriptorSetLayoutBindingFlagsCreateInfo( *reinterpret_cast<DescriptorSetLayoutBindingFlagsCreateInfo const *>( &rhs ) )
|
|
{
|
|
}
|
|
|
|
# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
|
|
DescriptorSetLayoutBindingFlagsCreateInfo(
|
|
VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::DescriptorBindingFlags> const & bindingFlags_, const void * pNext_ = nullptr )
|
|
: pNext( pNext_ ), bindingCount( static_cast<uint32_t>( bindingFlags_.size() ) ), pBindingFlags( bindingFlags_.data() )
|
|
{
|
|
}
|
|
# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
|
|
|
|
DescriptorSetLayoutBindingFlagsCreateInfo & operator=( DescriptorSetLayoutBindingFlagsCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
DescriptorSetLayoutBindingFlagsCreateInfo & operator=( VkDescriptorSetLayoutBindingFlagsCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::DescriptorSetLayoutBindingFlagsCreateInfo const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
|
|
VULKAN_HPP_CONSTEXPR_14 DescriptorSetLayoutBindingFlagsCreateInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pNext = pNext_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 DescriptorSetLayoutBindingFlagsCreateInfo & setBindingCount( uint32_t bindingCount_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
bindingCount = bindingCount_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 DescriptorSetLayoutBindingFlagsCreateInfo &
|
|
setPBindingFlags( const VULKAN_HPP_NAMESPACE::DescriptorBindingFlags * pBindingFlags_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pBindingFlags = pBindingFlags_;
|
|
return *this;
|
|
}
|
|
|
|
# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
|
|
DescriptorSetLayoutBindingFlagsCreateInfo & setBindingFlags(
|
|
VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::DescriptorBindingFlags> const & bindingFlags_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
bindingCount = static_cast<uint32_t>( bindingFlags_.size() );
|
|
pBindingFlags = bindingFlags_.data();
|
|
return *this;
|
|
}
|
|
# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
|
|
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
|
|
|
|
operator VkDescriptorSetLayoutBindingFlagsCreateInfo const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkDescriptorSetLayoutBindingFlagsCreateInfo *>( this );
|
|
}
|
|
|
|
operator VkDescriptorSetLayoutBindingFlagsCreateInfo &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkDescriptorSetLayoutBindingFlagsCreateInfo *>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
# if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
# else
|
|
std::
|
|
tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, uint32_t const &, const VULKAN_HPP_NAMESPACE::DescriptorBindingFlags * const &>
|
|
# endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( sType, pNext, bindingCount, pBindingFlags );
|
|
}
|
|
#endif
|
|
|
|
#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
|
|
auto operator<=>( DescriptorSetLayoutBindingFlagsCreateInfo const & ) const = default;
|
|
#else
|
|
bool operator==( DescriptorSetLayoutBindingFlagsCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
# if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
# else
|
|
return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( bindingCount == rhs.bindingCount ) && ( pBindingFlags == rhs.pBindingFlags );
|
|
# endif
|
|
}
|
|
|
|
bool operator!=( DescriptorSetLayoutBindingFlagsCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDescriptorSetLayoutBindingFlagsCreateInfo;
|
|
const void * pNext = {};
|
|
uint32_t bindingCount = {};
|
|
const VULKAN_HPP_NAMESPACE::DescriptorBindingFlags * pBindingFlags = {};
|
|
};
|
|
|
|
template <>
|
|
struct CppType<StructureType, StructureType::eDescriptorSetLayoutBindingFlagsCreateInfo>
|
|
{
|
|
using Type = DescriptorSetLayoutBindingFlagsCreateInfo;
|
|
};
|
|
|
|
using DescriptorSetLayoutBindingFlagsCreateInfoEXT = DescriptorSetLayoutBindingFlagsCreateInfo;
|
|
|
|
struct DescriptorSetLayoutCreateInfo
|
|
{
|
|
using NativeType = VkDescriptorSetLayoutCreateInfo;
|
|
|
|
static const bool allowDuplicate = false;
|
|
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDescriptorSetLayoutCreateInfo;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR DescriptorSetLayoutCreateInfo( VULKAN_HPP_NAMESPACE::DescriptorSetLayoutCreateFlags flags_ = {},
|
|
uint32_t bindingCount_ = {},
|
|
const VULKAN_HPP_NAMESPACE::DescriptorSetLayoutBinding * pBindings_ = {},
|
|
const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
|
|
: pNext( pNext_ )
|
|
, flags( flags_ )
|
|
, bindingCount( bindingCount_ )
|
|
, pBindings( pBindings_ )
|
|
{
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR DescriptorSetLayoutCreateInfo( DescriptorSetLayoutCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
DescriptorSetLayoutCreateInfo( VkDescriptorSetLayoutCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: DescriptorSetLayoutCreateInfo( *reinterpret_cast<DescriptorSetLayoutCreateInfo const *>( &rhs ) )
|
|
{
|
|
}
|
|
|
|
# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
|
|
DescriptorSetLayoutCreateInfo( VULKAN_HPP_NAMESPACE::DescriptorSetLayoutCreateFlags flags_,
|
|
VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::DescriptorSetLayoutBinding> const & bindings_,
|
|
const void * pNext_ = nullptr )
|
|
: pNext( pNext_ ), flags( flags_ ), bindingCount( static_cast<uint32_t>( bindings_.size() ) ), pBindings( bindings_.data() )
|
|
{
|
|
}
|
|
# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
|
|
|
|
DescriptorSetLayoutCreateInfo & operator=( DescriptorSetLayoutCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
DescriptorSetLayoutCreateInfo & operator=( VkDescriptorSetLayoutCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::DescriptorSetLayoutCreateInfo const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
|
|
VULKAN_HPP_CONSTEXPR_14 DescriptorSetLayoutCreateInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pNext = pNext_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 DescriptorSetLayoutCreateInfo & setFlags( VULKAN_HPP_NAMESPACE::DescriptorSetLayoutCreateFlags flags_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
flags = flags_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 DescriptorSetLayoutCreateInfo & setBindingCount( uint32_t bindingCount_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
bindingCount = bindingCount_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 DescriptorSetLayoutCreateInfo &
|
|
setPBindings( const VULKAN_HPP_NAMESPACE::DescriptorSetLayoutBinding * pBindings_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pBindings = pBindings_;
|
|
return *this;
|
|
}
|
|
|
|
# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
|
|
DescriptorSetLayoutCreateInfo &
|
|
setBindings( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::DescriptorSetLayoutBinding> const & bindings_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
bindingCount = static_cast<uint32_t>( bindings_.size() );
|
|
pBindings = bindings_.data();
|
|
return *this;
|
|
}
|
|
# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
|
|
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
|
|
|
|
operator VkDescriptorSetLayoutCreateInfo const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkDescriptorSetLayoutCreateInfo *>( this );
|
|
}
|
|
|
|
operator VkDescriptorSetLayoutCreateInfo &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkDescriptorSetLayoutCreateInfo *>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
# if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
# else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &,
|
|
const void * const &,
|
|
VULKAN_HPP_NAMESPACE::DescriptorSetLayoutCreateFlags const &,
|
|
uint32_t const &,
|
|
const VULKAN_HPP_NAMESPACE::DescriptorSetLayoutBinding * const &>
|
|
# endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( sType, pNext, flags, bindingCount, pBindings );
|
|
}
|
|
#endif
|
|
|
|
#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
|
|
auto operator<=>( DescriptorSetLayoutCreateInfo const & ) const = default;
|
|
#else
|
|
bool operator==( DescriptorSetLayoutCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
# if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
# else
|
|
return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && ( bindingCount == rhs.bindingCount ) &&
|
|
( pBindings == rhs.pBindings );
|
|
# endif
|
|
}
|
|
|
|
bool operator!=( DescriptorSetLayoutCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDescriptorSetLayoutCreateInfo;
|
|
const void * pNext = {};
|
|
VULKAN_HPP_NAMESPACE::DescriptorSetLayoutCreateFlags flags = {};
|
|
uint32_t bindingCount = {};
|
|
const VULKAN_HPP_NAMESPACE::DescriptorSetLayoutBinding * pBindings = {};
|
|
};
|
|
|
|
template <>
|
|
struct CppType<StructureType, StructureType::eDescriptorSetLayoutCreateInfo>
|
|
{
|
|
using Type = DescriptorSetLayoutCreateInfo;
|
|
};
|
|
|
|
struct DescriptorSetLayoutSupport
|
|
{
|
|
using NativeType = VkDescriptorSetLayoutSupport;
|
|
|
|
static const bool allowDuplicate = false;
|
|
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDescriptorSetLayoutSupport;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR DescriptorSetLayoutSupport( VULKAN_HPP_NAMESPACE::Bool32 supported_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
|
|
: pNext( pNext_ )
|
|
, supported( supported_ )
|
|
{
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR DescriptorSetLayoutSupport( DescriptorSetLayoutSupport const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
DescriptorSetLayoutSupport( VkDescriptorSetLayoutSupport const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: DescriptorSetLayoutSupport( *reinterpret_cast<DescriptorSetLayoutSupport const *>( &rhs ) )
|
|
{
|
|
}
|
|
|
|
DescriptorSetLayoutSupport & operator=( DescriptorSetLayoutSupport const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
DescriptorSetLayoutSupport & operator=( VkDescriptorSetLayoutSupport const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::DescriptorSetLayoutSupport const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
operator VkDescriptorSetLayoutSupport const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkDescriptorSetLayoutSupport *>( this );
|
|
}
|
|
|
|
operator VkDescriptorSetLayoutSupport &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkDescriptorSetLayoutSupport *>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
# if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
# else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::Bool32 const &>
|
|
# endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( sType, pNext, supported );
|
|
}
|
|
#endif
|
|
|
|
#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
|
|
auto operator<=>( DescriptorSetLayoutSupport const & ) const = default;
|
|
#else
|
|
bool operator==( DescriptorSetLayoutSupport const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
# if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
# else
|
|
return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( supported == rhs.supported );
|
|
# endif
|
|
}
|
|
|
|
bool operator!=( DescriptorSetLayoutSupport const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDescriptorSetLayoutSupport;
|
|
void * pNext = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 supported = {};
|
|
};
|
|
|
|
template <>
|
|
struct CppType<StructureType, StructureType::eDescriptorSetLayoutSupport>
|
|
{
|
|
using Type = DescriptorSetLayoutSupport;
|
|
};
|
|
|
|
using DescriptorSetLayoutSupportKHR = DescriptorSetLayoutSupport;
|
|
|
|
struct DescriptorSetVariableDescriptorCountAllocateInfo
|
|
{
|
|
using NativeType = VkDescriptorSetVariableDescriptorCountAllocateInfo;
|
|
|
|
static const bool allowDuplicate = false;
|
|
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDescriptorSetVariableDescriptorCountAllocateInfo;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR DescriptorSetVariableDescriptorCountAllocateInfo( uint32_t descriptorSetCount_ = {},
|
|
const uint32_t * pDescriptorCounts_ = {},
|
|
const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
|
|
: pNext( pNext_ )
|
|
, descriptorSetCount( descriptorSetCount_ )
|
|
, pDescriptorCounts( pDescriptorCounts_ )
|
|
{
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR
|
|
DescriptorSetVariableDescriptorCountAllocateInfo( DescriptorSetVariableDescriptorCountAllocateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
DescriptorSetVariableDescriptorCountAllocateInfo( VkDescriptorSetVariableDescriptorCountAllocateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: DescriptorSetVariableDescriptorCountAllocateInfo( *reinterpret_cast<DescriptorSetVariableDescriptorCountAllocateInfo const *>( &rhs ) )
|
|
{
|
|
}
|
|
|
|
# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
|
|
DescriptorSetVariableDescriptorCountAllocateInfo( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const uint32_t> const & descriptorCounts_,
|
|
const void * pNext_ = nullptr )
|
|
: pNext( pNext_ ), descriptorSetCount( static_cast<uint32_t>( descriptorCounts_.size() ) ), pDescriptorCounts( descriptorCounts_.data() )
|
|
{
|
|
}
|
|
# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
|
|
|
|
DescriptorSetVariableDescriptorCountAllocateInfo & operator=( DescriptorSetVariableDescriptorCountAllocateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
DescriptorSetVariableDescriptorCountAllocateInfo & operator=( VkDescriptorSetVariableDescriptorCountAllocateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::DescriptorSetVariableDescriptorCountAllocateInfo const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
|
|
VULKAN_HPP_CONSTEXPR_14 DescriptorSetVariableDescriptorCountAllocateInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pNext = pNext_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 DescriptorSetVariableDescriptorCountAllocateInfo & setDescriptorSetCount( uint32_t descriptorSetCount_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
descriptorSetCount = descriptorSetCount_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 DescriptorSetVariableDescriptorCountAllocateInfo & setPDescriptorCounts( const uint32_t * pDescriptorCounts_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pDescriptorCounts = pDescriptorCounts_;
|
|
return *this;
|
|
}
|
|
|
|
# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
|
|
DescriptorSetVariableDescriptorCountAllocateInfo &
|
|
setDescriptorCounts( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const uint32_t> const & descriptorCounts_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
descriptorSetCount = static_cast<uint32_t>( descriptorCounts_.size() );
|
|
pDescriptorCounts = descriptorCounts_.data();
|
|
return *this;
|
|
}
|
|
# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
|
|
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
|
|
|
|
operator VkDescriptorSetVariableDescriptorCountAllocateInfo const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkDescriptorSetVariableDescriptorCountAllocateInfo *>( this );
|
|
}
|
|
|
|
operator VkDescriptorSetVariableDescriptorCountAllocateInfo &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkDescriptorSetVariableDescriptorCountAllocateInfo *>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
# if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
# else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, uint32_t const &, const uint32_t * const &>
|
|
# endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( sType, pNext, descriptorSetCount, pDescriptorCounts );
|
|
}
|
|
#endif
|
|
|
|
#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
|
|
auto operator<=>( DescriptorSetVariableDescriptorCountAllocateInfo const & ) const = default;
|
|
#else
|
|
bool operator==( DescriptorSetVariableDescriptorCountAllocateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
# if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
# else
|
|
return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( descriptorSetCount == rhs.descriptorSetCount ) &&
|
|
( pDescriptorCounts == rhs.pDescriptorCounts );
|
|
# endif
|
|
}
|
|
|
|
bool operator!=( DescriptorSetVariableDescriptorCountAllocateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDescriptorSetVariableDescriptorCountAllocateInfo;
|
|
const void * pNext = {};
|
|
uint32_t descriptorSetCount = {};
|
|
const uint32_t * pDescriptorCounts = {};
|
|
};
|
|
|
|
template <>
|
|
struct CppType<StructureType, StructureType::eDescriptorSetVariableDescriptorCountAllocateInfo>
|
|
{
|
|
using Type = DescriptorSetVariableDescriptorCountAllocateInfo;
|
|
};
|
|
|
|
using DescriptorSetVariableDescriptorCountAllocateInfoEXT = DescriptorSetVariableDescriptorCountAllocateInfo;
|
|
|
|
struct DescriptorSetVariableDescriptorCountLayoutSupport
|
|
{
|
|
using NativeType = VkDescriptorSetVariableDescriptorCountLayoutSupport;
|
|
|
|
static const bool allowDuplicate = false;
|
|
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDescriptorSetVariableDescriptorCountLayoutSupport;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR DescriptorSetVariableDescriptorCountLayoutSupport( uint32_t maxVariableDescriptorCount_ = {},
|
|
void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
|
|
: pNext( pNext_ )
|
|
, maxVariableDescriptorCount( maxVariableDescriptorCount_ )
|
|
{
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR
|
|
DescriptorSetVariableDescriptorCountLayoutSupport( DescriptorSetVariableDescriptorCountLayoutSupport const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
DescriptorSetVariableDescriptorCountLayoutSupport( VkDescriptorSetVariableDescriptorCountLayoutSupport const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: DescriptorSetVariableDescriptorCountLayoutSupport( *reinterpret_cast<DescriptorSetVariableDescriptorCountLayoutSupport const *>( &rhs ) )
|
|
{
|
|
}
|
|
|
|
DescriptorSetVariableDescriptorCountLayoutSupport &
|
|
operator=( DescriptorSetVariableDescriptorCountLayoutSupport const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
DescriptorSetVariableDescriptorCountLayoutSupport & operator=( VkDescriptorSetVariableDescriptorCountLayoutSupport const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::DescriptorSetVariableDescriptorCountLayoutSupport const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
operator VkDescriptorSetVariableDescriptorCountLayoutSupport const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkDescriptorSetVariableDescriptorCountLayoutSupport *>( this );
|
|
}
|
|
|
|
operator VkDescriptorSetVariableDescriptorCountLayoutSupport &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkDescriptorSetVariableDescriptorCountLayoutSupport *>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
# if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
# else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, uint32_t const &>
|
|
# endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( sType, pNext, maxVariableDescriptorCount );
|
|
}
|
|
#endif
|
|
|
|
#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
|
|
auto operator<=>( DescriptorSetVariableDescriptorCountLayoutSupport const & ) const = default;
|
|
#else
|
|
bool operator==( DescriptorSetVariableDescriptorCountLayoutSupport const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
# if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
# else
|
|
return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( maxVariableDescriptorCount == rhs.maxVariableDescriptorCount );
|
|
# endif
|
|
}
|
|
|
|
bool operator!=( DescriptorSetVariableDescriptorCountLayoutSupport const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDescriptorSetVariableDescriptorCountLayoutSupport;
|
|
void * pNext = {};
|
|
uint32_t maxVariableDescriptorCount = {};
|
|
};
|
|
|
|
template <>
|
|
struct CppType<StructureType, StructureType::eDescriptorSetVariableDescriptorCountLayoutSupport>
|
|
{
|
|
using Type = DescriptorSetVariableDescriptorCountLayoutSupport;
|
|
};
|
|
|
|
using DescriptorSetVariableDescriptorCountLayoutSupportEXT = DescriptorSetVariableDescriptorCountLayoutSupport;
|
|
|
|
struct DeviceBufferMemoryRequirements
|
|
{
|
|
using NativeType = VkDeviceBufferMemoryRequirements;
|
|
|
|
static const bool allowDuplicate = false;
|
|
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDeviceBufferMemoryRequirements;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR DeviceBufferMemoryRequirements( const VULKAN_HPP_NAMESPACE::BufferCreateInfo * pCreateInfo_ = {},
|
|
const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
|
|
: pNext( pNext_ )
|
|
, pCreateInfo( pCreateInfo_ )
|
|
{
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR DeviceBufferMemoryRequirements( DeviceBufferMemoryRequirements const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
DeviceBufferMemoryRequirements( VkDeviceBufferMemoryRequirements const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: DeviceBufferMemoryRequirements( *reinterpret_cast<DeviceBufferMemoryRequirements const *>( &rhs ) )
|
|
{
|
|
}
|
|
|
|
DeviceBufferMemoryRequirements & operator=( DeviceBufferMemoryRequirements const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
DeviceBufferMemoryRequirements & operator=( VkDeviceBufferMemoryRequirements const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::DeviceBufferMemoryRequirements const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
|
|
VULKAN_HPP_CONSTEXPR_14 DeviceBufferMemoryRequirements & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pNext = pNext_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 DeviceBufferMemoryRequirements & setPCreateInfo( const VULKAN_HPP_NAMESPACE::BufferCreateInfo * pCreateInfo_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pCreateInfo = pCreateInfo_;
|
|
return *this;
|
|
}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
|
|
|
|
operator VkDeviceBufferMemoryRequirements const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkDeviceBufferMemoryRequirements *>( this );
|
|
}
|
|
|
|
operator VkDeviceBufferMemoryRequirements &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkDeviceBufferMemoryRequirements *>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
# if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
# else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, const VULKAN_HPP_NAMESPACE::BufferCreateInfo * const &>
|
|
# endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( sType, pNext, pCreateInfo );
|
|
}
|
|
#endif
|
|
|
|
#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
|
|
auto operator<=>( DeviceBufferMemoryRequirements const & ) const = default;
|
|
#else
|
|
bool operator==( DeviceBufferMemoryRequirements const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
# if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
# else
|
|
return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( pCreateInfo == rhs.pCreateInfo );
|
|
# endif
|
|
}
|
|
|
|
bool operator!=( DeviceBufferMemoryRequirements const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDeviceBufferMemoryRequirements;
|
|
const void * pNext = {};
|
|
const VULKAN_HPP_NAMESPACE::BufferCreateInfo * pCreateInfo = {};
|
|
};
|
|
|
|
template <>
|
|
struct CppType<StructureType, StructureType::eDeviceBufferMemoryRequirements>
|
|
{
|
|
using Type = DeviceBufferMemoryRequirements;
|
|
};
|
|
|
|
using DeviceBufferMemoryRequirementsKHR = DeviceBufferMemoryRequirements;
|
|
|
|
struct DeviceQueueCreateInfo
|
|
{
|
|
using NativeType = VkDeviceQueueCreateInfo;
|
|
|
|
static const bool allowDuplicate = false;
|
|
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDeviceQueueCreateInfo;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR DeviceQueueCreateInfo( VULKAN_HPP_NAMESPACE::DeviceQueueCreateFlags flags_ = {},
|
|
uint32_t queueFamilyIndex_ = {},
|
|
uint32_t queueCount_ = {},
|
|
const float * pQueuePriorities_ = {},
|
|
const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
|
|
: pNext( pNext_ )
|
|
, flags( flags_ )
|
|
, queueFamilyIndex( queueFamilyIndex_ )
|
|
, queueCount( queueCount_ )
|
|
, pQueuePriorities( pQueuePriorities_ )
|
|
{
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR DeviceQueueCreateInfo( DeviceQueueCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
DeviceQueueCreateInfo( VkDeviceQueueCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: DeviceQueueCreateInfo( *reinterpret_cast<DeviceQueueCreateInfo const *>( &rhs ) )
|
|
{
|
|
}
|
|
|
|
# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
|
|
DeviceQueueCreateInfo( VULKAN_HPP_NAMESPACE::DeviceQueueCreateFlags flags_,
|
|
uint32_t queueFamilyIndex_,
|
|
VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const float> const & queuePriorities_,
|
|
const void * pNext_ = nullptr )
|
|
: pNext( pNext_ )
|
|
, flags( flags_ )
|
|
, queueFamilyIndex( queueFamilyIndex_ )
|
|
, queueCount( static_cast<uint32_t>( queuePriorities_.size() ) )
|
|
, pQueuePriorities( queuePriorities_.data() )
|
|
{
|
|
}
|
|
# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
|
|
|
|
DeviceQueueCreateInfo & operator=( DeviceQueueCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
DeviceQueueCreateInfo & operator=( VkDeviceQueueCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::DeviceQueueCreateInfo const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
|
|
VULKAN_HPP_CONSTEXPR_14 DeviceQueueCreateInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pNext = pNext_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 DeviceQueueCreateInfo & setFlags( VULKAN_HPP_NAMESPACE::DeviceQueueCreateFlags flags_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
flags = flags_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 DeviceQueueCreateInfo & setQueueFamilyIndex( uint32_t queueFamilyIndex_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
queueFamilyIndex = queueFamilyIndex_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 DeviceQueueCreateInfo & setQueueCount( uint32_t queueCount_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
queueCount = queueCount_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 DeviceQueueCreateInfo & setPQueuePriorities( const float * pQueuePriorities_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pQueuePriorities = pQueuePriorities_;
|
|
return *this;
|
|
}
|
|
|
|
# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
|
|
DeviceQueueCreateInfo & setQueuePriorities( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const float> const & queuePriorities_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
queueCount = static_cast<uint32_t>( queuePriorities_.size() );
|
|
pQueuePriorities = queuePriorities_.data();
|
|
return *this;
|
|
}
|
|
# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
|
|
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
|
|
|
|
operator VkDeviceQueueCreateInfo const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkDeviceQueueCreateInfo *>( this );
|
|
}
|
|
|
|
operator VkDeviceQueueCreateInfo &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkDeviceQueueCreateInfo *>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
# if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
# else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &,
|
|
const void * const &,
|
|
VULKAN_HPP_NAMESPACE::DeviceQueueCreateFlags const &,
|
|
uint32_t const &,
|
|
uint32_t const &,
|
|
const float * const &>
|
|
# endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( sType, pNext, flags, queueFamilyIndex, queueCount, pQueuePriorities );
|
|
}
|
|
#endif
|
|
|
|
#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
|
|
auto operator<=>( DeviceQueueCreateInfo const & ) const = default;
|
|
#else
|
|
bool operator==( DeviceQueueCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
# if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
# else
|
|
return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && ( queueFamilyIndex == rhs.queueFamilyIndex ) &&
|
|
( queueCount == rhs.queueCount ) && ( pQueuePriorities == rhs.pQueuePriorities );
|
|
# endif
|
|
}
|
|
|
|
bool operator!=( DeviceQueueCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDeviceQueueCreateInfo;
|
|
const void * pNext = {};
|
|
VULKAN_HPP_NAMESPACE::DeviceQueueCreateFlags flags = {};
|
|
uint32_t queueFamilyIndex = {};
|
|
uint32_t queueCount = {};
|
|
const float * pQueuePriorities = {};
|
|
};
|
|
|
|
template <>
|
|
struct CppType<StructureType, StructureType::eDeviceQueueCreateInfo>
|
|
{
|
|
using Type = DeviceQueueCreateInfo;
|
|
};
|
|
|
|
struct PhysicalDeviceFeatures
|
|
{
|
|
using NativeType = VkPhysicalDeviceFeatures;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR PhysicalDeviceFeatures( VULKAN_HPP_NAMESPACE::Bool32 robustBufferAccess_ = {},
|
|
VULKAN_HPP_NAMESPACE::Bool32 fullDrawIndexUint32_ = {},
|
|
VULKAN_HPP_NAMESPACE::Bool32 imageCubeArray_ = {},
|
|
VULKAN_HPP_NAMESPACE::Bool32 independentBlend_ = {},
|
|
VULKAN_HPP_NAMESPACE::Bool32 geometryShader_ = {},
|
|
VULKAN_HPP_NAMESPACE::Bool32 tessellationShader_ = {},
|
|
VULKAN_HPP_NAMESPACE::Bool32 sampleRateShading_ = {},
|
|
VULKAN_HPP_NAMESPACE::Bool32 dualSrcBlend_ = {},
|
|
VULKAN_HPP_NAMESPACE::Bool32 logicOp_ = {},
|
|
VULKAN_HPP_NAMESPACE::Bool32 multiDrawIndirect_ = {},
|
|
VULKAN_HPP_NAMESPACE::Bool32 drawIndirectFirstInstance_ = {},
|
|
VULKAN_HPP_NAMESPACE::Bool32 depthClamp_ = {},
|
|
VULKAN_HPP_NAMESPACE::Bool32 depthBiasClamp_ = {},
|
|
VULKAN_HPP_NAMESPACE::Bool32 fillModeNonSolid_ = {},
|
|
VULKAN_HPP_NAMESPACE::Bool32 depthBounds_ = {},
|
|
VULKAN_HPP_NAMESPACE::Bool32 wideLines_ = {},
|
|
VULKAN_HPP_NAMESPACE::Bool32 largePoints_ = {},
|
|
VULKAN_HPP_NAMESPACE::Bool32 alphaToOne_ = {},
|
|
VULKAN_HPP_NAMESPACE::Bool32 multiViewport_ = {},
|
|
VULKAN_HPP_NAMESPACE::Bool32 samplerAnisotropy_ = {},
|
|
VULKAN_HPP_NAMESPACE::Bool32 textureCompressionETC2_ = {},
|
|
VULKAN_HPP_NAMESPACE::Bool32 textureCompressionASTC_LDR_ = {},
|
|
VULKAN_HPP_NAMESPACE::Bool32 textureCompressionBC_ = {},
|
|
VULKAN_HPP_NAMESPACE::Bool32 occlusionQueryPrecise_ = {},
|
|
VULKAN_HPP_NAMESPACE::Bool32 pipelineStatisticsQuery_ = {},
|
|
VULKAN_HPP_NAMESPACE::Bool32 vertexPipelineStoresAndAtomics_ = {},
|
|
VULKAN_HPP_NAMESPACE::Bool32 fragmentStoresAndAtomics_ = {},
|
|
VULKAN_HPP_NAMESPACE::Bool32 shaderTessellationAndGeometryPointSize_ = {},
|
|
VULKAN_HPP_NAMESPACE::Bool32 shaderImageGatherExtended_ = {},
|
|
VULKAN_HPP_NAMESPACE::Bool32 shaderStorageImageExtendedFormats_ = {},
|
|
VULKAN_HPP_NAMESPACE::Bool32 shaderStorageImageMultisample_ = {},
|
|
VULKAN_HPP_NAMESPACE::Bool32 shaderStorageImageReadWithoutFormat_ = {},
|
|
VULKAN_HPP_NAMESPACE::Bool32 shaderStorageImageWriteWithoutFormat_ = {},
|
|
VULKAN_HPP_NAMESPACE::Bool32 shaderUniformBufferArrayDynamicIndexing_ = {},
|
|
VULKAN_HPP_NAMESPACE::Bool32 shaderSampledImageArrayDynamicIndexing_ = {},
|
|
VULKAN_HPP_NAMESPACE::Bool32 shaderStorageBufferArrayDynamicIndexing_ = {},
|
|
VULKAN_HPP_NAMESPACE::Bool32 shaderStorageImageArrayDynamicIndexing_ = {},
|
|
VULKAN_HPP_NAMESPACE::Bool32 shaderClipDistance_ = {},
|
|
VULKAN_HPP_NAMESPACE::Bool32 shaderCullDistance_ = {},
|
|
VULKAN_HPP_NAMESPACE::Bool32 shaderFloat64_ = {},
|
|
VULKAN_HPP_NAMESPACE::Bool32 shaderInt64_ = {},
|
|
VULKAN_HPP_NAMESPACE::Bool32 shaderInt16_ = {},
|
|
VULKAN_HPP_NAMESPACE::Bool32 shaderResourceResidency_ = {},
|
|
VULKAN_HPP_NAMESPACE::Bool32 shaderResourceMinLod_ = {},
|
|
VULKAN_HPP_NAMESPACE::Bool32 sparseBinding_ = {},
|
|
VULKAN_HPP_NAMESPACE::Bool32 sparseResidencyBuffer_ = {},
|
|
VULKAN_HPP_NAMESPACE::Bool32 sparseResidencyImage2D_ = {},
|
|
VULKAN_HPP_NAMESPACE::Bool32 sparseResidencyImage3D_ = {},
|
|
VULKAN_HPP_NAMESPACE::Bool32 sparseResidency2Samples_ = {},
|
|
VULKAN_HPP_NAMESPACE::Bool32 sparseResidency4Samples_ = {},
|
|
VULKAN_HPP_NAMESPACE::Bool32 sparseResidency8Samples_ = {},
|
|
VULKAN_HPP_NAMESPACE::Bool32 sparseResidency16Samples_ = {},
|
|
VULKAN_HPP_NAMESPACE::Bool32 sparseResidencyAliased_ = {},
|
|
VULKAN_HPP_NAMESPACE::Bool32 variableMultisampleRate_ = {},
|
|
VULKAN_HPP_NAMESPACE::Bool32 inheritedQueries_ = {} ) VULKAN_HPP_NOEXCEPT
|
|
: robustBufferAccess( robustBufferAccess_ )
|
|
, fullDrawIndexUint32( fullDrawIndexUint32_ )
|
|
, imageCubeArray( imageCubeArray_ )
|
|
, independentBlend( independentBlend_ )
|
|
, geometryShader( geometryShader_ )
|
|
, tessellationShader( tessellationShader_ )
|
|
, sampleRateShading( sampleRateShading_ )
|
|
, dualSrcBlend( dualSrcBlend_ )
|
|
, logicOp( logicOp_ )
|
|
, multiDrawIndirect( multiDrawIndirect_ )
|
|
, drawIndirectFirstInstance( drawIndirectFirstInstance_ )
|
|
, depthClamp( depthClamp_ )
|
|
, depthBiasClamp( depthBiasClamp_ )
|
|
, fillModeNonSolid( fillModeNonSolid_ )
|
|
, depthBounds( depthBounds_ )
|
|
, wideLines( wideLines_ )
|
|
, largePoints( largePoints_ )
|
|
, alphaToOne( alphaToOne_ )
|
|
, multiViewport( multiViewport_ )
|
|
, samplerAnisotropy( samplerAnisotropy_ )
|
|
, textureCompressionETC2( textureCompressionETC2_ )
|
|
, textureCompressionASTC_LDR( textureCompressionASTC_LDR_ )
|
|
, textureCompressionBC( textureCompressionBC_ )
|
|
, occlusionQueryPrecise( occlusionQueryPrecise_ )
|
|
, pipelineStatisticsQuery( pipelineStatisticsQuery_ )
|
|
, vertexPipelineStoresAndAtomics( vertexPipelineStoresAndAtomics_ )
|
|
, fragmentStoresAndAtomics( fragmentStoresAndAtomics_ )
|
|
, shaderTessellationAndGeometryPointSize( shaderTessellationAndGeometryPointSize_ )
|
|
, shaderImageGatherExtended( shaderImageGatherExtended_ )
|
|
, shaderStorageImageExtendedFormats( shaderStorageImageExtendedFormats_ )
|
|
, shaderStorageImageMultisample( shaderStorageImageMultisample_ )
|
|
, shaderStorageImageReadWithoutFormat( shaderStorageImageReadWithoutFormat_ )
|
|
, shaderStorageImageWriteWithoutFormat( shaderStorageImageWriteWithoutFormat_ )
|
|
, shaderUniformBufferArrayDynamicIndexing( shaderUniformBufferArrayDynamicIndexing_ )
|
|
, shaderSampledImageArrayDynamicIndexing( shaderSampledImageArrayDynamicIndexing_ )
|
|
, shaderStorageBufferArrayDynamicIndexing( shaderStorageBufferArrayDynamicIndexing_ )
|
|
, shaderStorageImageArrayDynamicIndexing( shaderStorageImageArrayDynamicIndexing_ )
|
|
, shaderClipDistance( shaderClipDistance_ )
|
|
, shaderCullDistance( shaderCullDistance_ )
|
|
, shaderFloat64( shaderFloat64_ )
|
|
, shaderInt64( shaderInt64_ )
|
|
, shaderInt16( shaderInt16_ )
|
|
, shaderResourceResidency( shaderResourceResidency_ )
|
|
, shaderResourceMinLod( shaderResourceMinLod_ )
|
|
, sparseBinding( sparseBinding_ )
|
|
, sparseResidencyBuffer( sparseResidencyBuffer_ )
|
|
, sparseResidencyImage2D( sparseResidencyImage2D_ )
|
|
, sparseResidencyImage3D( sparseResidencyImage3D_ )
|
|
, sparseResidency2Samples( sparseResidency2Samples_ )
|
|
, sparseResidency4Samples( sparseResidency4Samples_ )
|
|
, sparseResidency8Samples( sparseResidency8Samples_ )
|
|
, sparseResidency16Samples( sparseResidency16Samples_ )
|
|
, sparseResidencyAliased( sparseResidencyAliased_ )
|
|
, variableMultisampleRate( variableMultisampleRate_ )
|
|
, inheritedQueries( inheritedQueries_ )
|
|
{
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR PhysicalDeviceFeatures( PhysicalDeviceFeatures const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
PhysicalDeviceFeatures( VkPhysicalDeviceFeatures const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: PhysicalDeviceFeatures( *reinterpret_cast<PhysicalDeviceFeatures const *>( &rhs ) )
|
|
{
|
|
}
|
|
|
|
PhysicalDeviceFeatures & operator=( PhysicalDeviceFeatures const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
PhysicalDeviceFeatures & operator=( VkPhysicalDeviceFeatures const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceFeatures const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
|
|
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFeatures & setRobustBufferAccess( VULKAN_HPP_NAMESPACE::Bool32 robustBufferAccess_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
robustBufferAccess = robustBufferAccess_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFeatures & setFullDrawIndexUint32( VULKAN_HPP_NAMESPACE::Bool32 fullDrawIndexUint32_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
fullDrawIndexUint32 = fullDrawIndexUint32_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFeatures & setImageCubeArray( VULKAN_HPP_NAMESPACE::Bool32 imageCubeArray_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
imageCubeArray = imageCubeArray_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFeatures & setIndependentBlend( VULKAN_HPP_NAMESPACE::Bool32 independentBlend_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
independentBlend = independentBlend_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFeatures & setGeometryShader( VULKAN_HPP_NAMESPACE::Bool32 geometryShader_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
geometryShader = geometryShader_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFeatures & setTessellationShader( VULKAN_HPP_NAMESPACE::Bool32 tessellationShader_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
tessellationShader = tessellationShader_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFeatures & setSampleRateShading( VULKAN_HPP_NAMESPACE::Bool32 sampleRateShading_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
sampleRateShading = sampleRateShading_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFeatures & setDualSrcBlend( VULKAN_HPP_NAMESPACE::Bool32 dualSrcBlend_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
dualSrcBlend = dualSrcBlend_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFeatures & setLogicOp( VULKAN_HPP_NAMESPACE::Bool32 logicOp_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
logicOp = logicOp_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFeatures & setMultiDrawIndirect( VULKAN_HPP_NAMESPACE::Bool32 multiDrawIndirect_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
multiDrawIndirect = multiDrawIndirect_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFeatures & setDrawIndirectFirstInstance( VULKAN_HPP_NAMESPACE::Bool32 drawIndirectFirstInstance_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
drawIndirectFirstInstance = drawIndirectFirstInstance_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFeatures & setDepthClamp( VULKAN_HPP_NAMESPACE::Bool32 depthClamp_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
depthClamp = depthClamp_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFeatures & setDepthBiasClamp( VULKAN_HPP_NAMESPACE::Bool32 depthBiasClamp_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
depthBiasClamp = depthBiasClamp_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFeatures & setFillModeNonSolid( VULKAN_HPP_NAMESPACE::Bool32 fillModeNonSolid_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
fillModeNonSolid = fillModeNonSolid_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFeatures & setDepthBounds( VULKAN_HPP_NAMESPACE::Bool32 depthBounds_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
depthBounds = depthBounds_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFeatures & setWideLines( VULKAN_HPP_NAMESPACE::Bool32 wideLines_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
wideLines = wideLines_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFeatures & setLargePoints( VULKAN_HPP_NAMESPACE::Bool32 largePoints_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
largePoints = largePoints_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFeatures & setAlphaToOne( VULKAN_HPP_NAMESPACE::Bool32 alphaToOne_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
alphaToOne = alphaToOne_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFeatures & setMultiViewport( VULKAN_HPP_NAMESPACE::Bool32 multiViewport_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
multiViewport = multiViewport_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFeatures & setSamplerAnisotropy( VULKAN_HPP_NAMESPACE::Bool32 samplerAnisotropy_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
samplerAnisotropy = samplerAnisotropy_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFeatures & setTextureCompressionETC2( VULKAN_HPP_NAMESPACE::Bool32 textureCompressionETC2_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
textureCompressionETC2 = textureCompressionETC2_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFeatures &
|
|
setTextureCompressionASTC_LDR( VULKAN_HPP_NAMESPACE::Bool32 textureCompressionASTC_LDR_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
textureCompressionASTC_LDR = textureCompressionASTC_LDR_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFeatures & setTextureCompressionBC( VULKAN_HPP_NAMESPACE::Bool32 textureCompressionBC_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
textureCompressionBC = textureCompressionBC_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFeatures & setOcclusionQueryPrecise( VULKAN_HPP_NAMESPACE::Bool32 occlusionQueryPrecise_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
occlusionQueryPrecise = occlusionQueryPrecise_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFeatures & setPipelineStatisticsQuery( VULKAN_HPP_NAMESPACE::Bool32 pipelineStatisticsQuery_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pipelineStatisticsQuery = pipelineStatisticsQuery_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFeatures &
|
|
setVertexPipelineStoresAndAtomics( VULKAN_HPP_NAMESPACE::Bool32 vertexPipelineStoresAndAtomics_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
vertexPipelineStoresAndAtomics = vertexPipelineStoresAndAtomics_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFeatures & setFragmentStoresAndAtomics( VULKAN_HPP_NAMESPACE::Bool32 fragmentStoresAndAtomics_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
fragmentStoresAndAtomics = fragmentStoresAndAtomics_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFeatures &
|
|
setShaderTessellationAndGeometryPointSize( VULKAN_HPP_NAMESPACE::Bool32 shaderTessellationAndGeometryPointSize_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
shaderTessellationAndGeometryPointSize = shaderTessellationAndGeometryPointSize_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFeatures & setShaderImageGatherExtended( VULKAN_HPP_NAMESPACE::Bool32 shaderImageGatherExtended_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
shaderImageGatherExtended = shaderImageGatherExtended_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFeatures &
|
|
setShaderStorageImageExtendedFormats( VULKAN_HPP_NAMESPACE::Bool32 shaderStorageImageExtendedFormats_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
shaderStorageImageExtendedFormats = shaderStorageImageExtendedFormats_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFeatures &
|
|
setShaderStorageImageMultisample( VULKAN_HPP_NAMESPACE::Bool32 shaderStorageImageMultisample_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
shaderStorageImageMultisample = shaderStorageImageMultisample_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFeatures &
|
|
setShaderStorageImageReadWithoutFormat( VULKAN_HPP_NAMESPACE::Bool32 shaderStorageImageReadWithoutFormat_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
shaderStorageImageReadWithoutFormat = shaderStorageImageReadWithoutFormat_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFeatures &
|
|
setShaderStorageImageWriteWithoutFormat( VULKAN_HPP_NAMESPACE::Bool32 shaderStorageImageWriteWithoutFormat_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
shaderStorageImageWriteWithoutFormat = shaderStorageImageWriteWithoutFormat_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFeatures &
|
|
setShaderUniformBufferArrayDynamicIndexing( VULKAN_HPP_NAMESPACE::Bool32 shaderUniformBufferArrayDynamicIndexing_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
shaderUniformBufferArrayDynamicIndexing = shaderUniformBufferArrayDynamicIndexing_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFeatures &
|
|
setShaderSampledImageArrayDynamicIndexing( VULKAN_HPP_NAMESPACE::Bool32 shaderSampledImageArrayDynamicIndexing_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
shaderSampledImageArrayDynamicIndexing = shaderSampledImageArrayDynamicIndexing_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFeatures &
|
|
setShaderStorageBufferArrayDynamicIndexing( VULKAN_HPP_NAMESPACE::Bool32 shaderStorageBufferArrayDynamicIndexing_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
shaderStorageBufferArrayDynamicIndexing = shaderStorageBufferArrayDynamicIndexing_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFeatures &
|
|
setShaderStorageImageArrayDynamicIndexing( VULKAN_HPP_NAMESPACE::Bool32 shaderStorageImageArrayDynamicIndexing_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
shaderStorageImageArrayDynamicIndexing = shaderStorageImageArrayDynamicIndexing_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFeatures & setShaderClipDistance( VULKAN_HPP_NAMESPACE::Bool32 shaderClipDistance_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
shaderClipDistance = shaderClipDistance_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFeatures & setShaderCullDistance( VULKAN_HPP_NAMESPACE::Bool32 shaderCullDistance_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
shaderCullDistance = shaderCullDistance_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFeatures & setShaderFloat64( VULKAN_HPP_NAMESPACE::Bool32 shaderFloat64_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
shaderFloat64 = shaderFloat64_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFeatures & setShaderInt64( VULKAN_HPP_NAMESPACE::Bool32 shaderInt64_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
shaderInt64 = shaderInt64_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFeatures & setShaderInt16( VULKAN_HPP_NAMESPACE::Bool32 shaderInt16_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
shaderInt16 = shaderInt16_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFeatures & setShaderResourceResidency( VULKAN_HPP_NAMESPACE::Bool32 shaderResourceResidency_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
shaderResourceResidency = shaderResourceResidency_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFeatures & setShaderResourceMinLod( VULKAN_HPP_NAMESPACE::Bool32 shaderResourceMinLod_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
shaderResourceMinLod = shaderResourceMinLod_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFeatures & setSparseBinding( VULKAN_HPP_NAMESPACE::Bool32 sparseBinding_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
sparseBinding = sparseBinding_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFeatures & setSparseResidencyBuffer( VULKAN_HPP_NAMESPACE::Bool32 sparseResidencyBuffer_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
sparseResidencyBuffer = sparseResidencyBuffer_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFeatures & setSparseResidencyImage2D( VULKAN_HPP_NAMESPACE::Bool32 sparseResidencyImage2D_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
sparseResidencyImage2D = sparseResidencyImage2D_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFeatures & setSparseResidencyImage3D( VULKAN_HPP_NAMESPACE::Bool32 sparseResidencyImage3D_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
sparseResidencyImage3D = sparseResidencyImage3D_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFeatures & setSparseResidency2Samples( VULKAN_HPP_NAMESPACE::Bool32 sparseResidency2Samples_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
sparseResidency2Samples = sparseResidency2Samples_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFeatures & setSparseResidency4Samples( VULKAN_HPP_NAMESPACE::Bool32 sparseResidency4Samples_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
sparseResidency4Samples = sparseResidency4Samples_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFeatures & setSparseResidency8Samples( VULKAN_HPP_NAMESPACE::Bool32 sparseResidency8Samples_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
sparseResidency8Samples = sparseResidency8Samples_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFeatures & setSparseResidency16Samples( VULKAN_HPP_NAMESPACE::Bool32 sparseResidency16Samples_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
sparseResidency16Samples = sparseResidency16Samples_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFeatures & setSparseResidencyAliased( VULKAN_HPP_NAMESPACE::Bool32 sparseResidencyAliased_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
sparseResidencyAliased = sparseResidencyAliased_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFeatures & setVariableMultisampleRate( VULKAN_HPP_NAMESPACE::Bool32 variableMultisampleRate_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
variableMultisampleRate = variableMultisampleRate_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFeatures & setInheritedQueries( VULKAN_HPP_NAMESPACE::Bool32 inheritedQueries_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
inheritedQueries = inheritedQueries_;
|
|
return *this;
|
|
}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
|
|
|
|
operator VkPhysicalDeviceFeatures const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkPhysicalDeviceFeatures *>( this );
|
|
}
|
|
|
|
operator VkPhysicalDeviceFeatures &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkPhysicalDeviceFeatures *>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
# if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
# else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::Bool32 const &,
|
|
VULKAN_HPP_NAMESPACE::Bool32 const &,
|
|
VULKAN_HPP_NAMESPACE::Bool32 const &,
|
|
VULKAN_HPP_NAMESPACE::Bool32 const &,
|
|
VULKAN_HPP_NAMESPACE::Bool32 const &,
|
|
VULKAN_HPP_NAMESPACE::Bool32 const &,
|
|
VULKAN_HPP_NAMESPACE::Bool32 const &,
|
|
VULKAN_HPP_NAMESPACE::Bool32 const &,
|
|
VULKAN_HPP_NAMESPACE::Bool32 const &,
|
|
VULKAN_HPP_NAMESPACE::Bool32 const &,
|
|
VULKAN_HPP_NAMESPACE::Bool32 const &,
|
|
VULKAN_HPP_NAMESPACE::Bool32 const &,
|
|
VULKAN_HPP_NAMESPACE::Bool32 const &,
|
|
VULKAN_HPP_NAMESPACE::Bool32 const &,
|
|
VULKAN_HPP_NAMESPACE::Bool32 const &,
|
|
VULKAN_HPP_NAMESPACE::Bool32 const &,
|
|
VULKAN_HPP_NAMESPACE::Bool32 const &,
|
|
VULKAN_HPP_NAMESPACE::Bool32 const &,
|
|
VULKAN_HPP_NAMESPACE::Bool32 const &,
|
|
VULKAN_HPP_NAMESPACE::Bool32 const &,
|
|
VULKAN_HPP_NAMESPACE::Bool32 const &,
|
|
VULKAN_HPP_NAMESPACE::Bool32 const &,
|
|
VULKAN_HPP_NAMESPACE::Bool32 const &,
|
|
VULKAN_HPP_NAMESPACE::Bool32 const &,
|
|
VULKAN_HPP_NAMESPACE::Bool32 const &,
|
|
VULKAN_HPP_NAMESPACE::Bool32 const &,
|
|
VULKAN_HPP_NAMESPACE::Bool32 const &,
|
|
VULKAN_HPP_NAMESPACE::Bool32 const &,
|
|
VULKAN_HPP_NAMESPACE::Bool32 const &,
|
|
VULKAN_HPP_NAMESPACE::Bool32 const &,
|
|
VULKAN_HPP_NAMESPACE::Bool32 const &,
|
|
VULKAN_HPP_NAMESPACE::Bool32 const &,
|
|
VULKAN_HPP_NAMESPACE::Bool32 const &,
|
|
VULKAN_HPP_NAMESPACE::Bool32 const &,
|
|
VULKAN_HPP_NAMESPACE::Bool32 const &,
|
|
VULKAN_HPP_NAMESPACE::Bool32 const &,
|
|
VULKAN_HPP_NAMESPACE::Bool32 const &,
|
|
VULKAN_HPP_NAMESPACE::Bool32 const &,
|
|
VULKAN_HPP_NAMESPACE::Bool32 const &,
|
|
VULKAN_HPP_NAMESPACE::Bool32 const &,
|
|
VULKAN_HPP_NAMESPACE::Bool32 const &,
|
|
VULKAN_HPP_NAMESPACE::Bool32 const &,
|
|
VULKAN_HPP_NAMESPACE::Bool32 const &,
|
|
VULKAN_HPP_NAMESPACE::Bool32 const &,
|
|
VULKAN_HPP_NAMESPACE::Bool32 const &,
|
|
VULKAN_HPP_NAMESPACE::Bool32 const &,
|
|
VULKAN_HPP_NAMESPACE::Bool32 const &,
|
|
VULKAN_HPP_NAMESPACE::Bool32 const &,
|
|
VULKAN_HPP_NAMESPACE::Bool32 const &,
|
|
VULKAN_HPP_NAMESPACE::Bool32 const &,
|
|
VULKAN_HPP_NAMESPACE::Bool32 const &,
|
|
VULKAN_HPP_NAMESPACE::Bool32 const &,
|
|
VULKAN_HPP_NAMESPACE::Bool32 const &,
|
|
VULKAN_HPP_NAMESPACE::Bool32 const &,
|
|
VULKAN_HPP_NAMESPACE::Bool32 const &>
|
|
# endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( robustBufferAccess,
|
|
fullDrawIndexUint32,
|
|
imageCubeArray,
|
|
independentBlend,
|
|
geometryShader,
|
|
tessellationShader,
|
|
sampleRateShading,
|
|
dualSrcBlend,
|
|
logicOp,
|
|
multiDrawIndirect,
|
|
drawIndirectFirstInstance,
|
|
depthClamp,
|
|
depthBiasClamp,
|
|
fillModeNonSolid,
|
|
depthBounds,
|
|
wideLines,
|
|
largePoints,
|
|
alphaToOne,
|
|
multiViewport,
|
|
samplerAnisotropy,
|
|
textureCompressionETC2,
|
|
textureCompressionASTC_LDR,
|
|
textureCompressionBC,
|
|
occlusionQueryPrecise,
|
|
pipelineStatisticsQuery,
|
|
vertexPipelineStoresAndAtomics,
|
|
fragmentStoresAndAtomics,
|
|
shaderTessellationAndGeometryPointSize,
|
|
shaderImageGatherExtended,
|
|
shaderStorageImageExtendedFormats,
|
|
shaderStorageImageMultisample,
|
|
shaderStorageImageReadWithoutFormat,
|
|
shaderStorageImageWriteWithoutFormat,
|
|
shaderUniformBufferArrayDynamicIndexing,
|
|
shaderSampledImageArrayDynamicIndexing,
|
|
shaderStorageBufferArrayDynamicIndexing,
|
|
shaderStorageImageArrayDynamicIndexing,
|
|
shaderClipDistance,
|
|
shaderCullDistance,
|
|
shaderFloat64,
|
|
shaderInt64,
|
|
shaderInt16,
|
|
shaderResourceResidency,
|
|
shaderResourceMinLod,
|
|
sparseBinding,
|
|
sparseResidencyBuffer,
|
|
sparseResidencyImage2D,
|
|
sparseResidencyImage3D,
|
|
sparseResidency2Samples,
|
|
sparseResidency4Samples,
|
|
sparseResidency8Samples,
|
|
sparseResidency16Samples,
|
|
sparseResidencyAliased,
|
|
variableMultisampleRate,
|
|
inheritedQueries );
|
|
}
|
|
#endif
|
|
|
|
#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
|
|
auto operator<=>( PhysicalDeviceFeatures const & ) const = default;
|
|
#else
|
|
bool operator==( PhysicalDeviceFeatures const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
# if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
# else
|
|
return ( robustBufferAccess == rhs.robustBufferAccess ) && ( fullDrawIndexUint32 == rhs.fullDrawIndexUint32 ) &&
|
|
( imageCubeArray == rhs.imageCubeArray ) && ( independentBlend == rhs.independentBlend ) && ( geometryShader == rhs.geometryShader ) &&
|
|
( tessellationShader == rhs.tessellationShader ) && ( sampleRateShading == rhs.sampleRateShading ) && ( dualSrcBlend == rhs.dualSrcBlend ) &&
|
|
( logicOp == rhs.logicOp ) && ( multiDrawIndirect == rhs.multiDrawIndirect ) && ( drawIndirectFirstInstance == rhs.drawIndirectFirstInstance ) &&
|
|
( depthClamp == rhs.depthClamp ) && ( depthBiasClamp == rhs.depthBiasClamp ) && ( fillModeNonSolid == rhs.fillModeNonSolid ) &&
|
|
( depthBounds == rhs.depthBounds ) && ( wideLines == rhs.wideLines ) && ( largePoints == rhs.largePoints ) && ( alphaToOne == rhs.alphaToOne ) &&
|
|
( multiViewport == rhs.multiViewport ) && ( samplerAnisotropy == rhs.samplerAnisotropy ) &&
|
|
( textureCompressionETC2 == rhs.textureCompressionETC2 ) && ( textureCompressionASTC_LDR == rhs.textureCompressionASTC_LDR ) &&
|
|
( textureCompressionBC == rhs.textureCompressionBC ) && ( occlusionQueryPrecise == rhs.occlusionQueryPrecise ) &&
|
|
( pipelineStatisticsQuery == rhs.pipelineStatisticsQuery ) && ( vertexPipelineStoresAndAtomics == rhs.vertexPipelineStoresAndAtomics ) &&
|
|
( fragmentStoresAndAtomics == rhs.fragmentStoresAndAtomics ) &&
|
|
( shaderTessellationAndGeometryPointSize == rhs.shaderTessellationAndGeometryPointSize ) &&
|
|
( shaderImageGatherExtended == rhs.shaderImageGatherExtended ) && ( shaderStorageImageExtendedFormats == rhs.shaderStorageImageExtendedFormats ) &&
|
|
( shaderStorageImageMultisample == rhs.shaderStorageImageMultisample ) &&
|
|
( shaderStorageImageReadWithoutFormat == rhs.shaderStorageImageReadWithoutFormat ) &&
|
|
( shaderStorageImageWriteWithoutFormat == rhs.shaderStorageImageWriteWithoutFormat ) &&
|
|
( shaderUniformBufferArrayDynamicIndexing == rhs.shaderUniformBufferArrayDynamicIndexing ) &&
|
|
( shaderSampledImageArrayDynamicIndexing == rhs.shaderSampledImageArrayDynamicIndexing ) &&
|
|
( shaderStorageBufferArrayDynamicIndexing == rhs.shaderStorageBufferArrayDynamicIndexing ) &&
|
|
( shaderStorageImageArrayDynamicIndexing == rhs.shaderStorageImageArrayDynamicIndexing ) && ( shaderClipDistance == rhs.shaderClipDistance ) &&
|
|
( shaderCullDistance == rhs.shaderCullDistance ) && ( shaderFloat64 == rhs.shaderFloat64 ) && ( shaderInt64 == rhs.shaderInt64 ) &&
|
|
( shaderInt16 == rhs.shaderInt16 ) && ( shaderResourceResidency == rhs.shaderResourceResidency ) &&
|
|
( shaderResourceMinLod == rhs.shaderResourceMinLod ) && ( sparseBinding == rhs.sparseBinding ) &&
|
|
( sparseResidencyBuffer == rhs.sparseResidencyBuffer ) && ( sparseResidencyImage2D == rhs.sparseResidencyImage2D ) &&
|
|
( sparseResidencyImage3D == rhs.sparseResidencyImage3D ) && ( sparseResidency2Samples == rhs.sparseResidency2Samples ) &&
|
|
( sparseResidency4Samples == rhs.sparseResidency4Samples ) && ( sparseResidency8Samples == rhs.sparseResidency8Samples ) &&
|
|
( sparseResidency16Samples == rhs.sparseResidency16Samples ) && ( sparseResidencyAliased == rhs.sparseResidencyAliased ) &&
|
|
( variableMultisampleRate == rhs.variableMultisampleRate ) && ( inheritedQueries == rhs.inheritedQueries );
|
|
# endif
|
|
}
|
|
|
|
bool operator!=( PhysicalDeviceFeatures const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::Bool32 robustBufferAccess = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 fullDrawIndexUint32 = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 imageCubeArray = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 independentBlend = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 geometryShader = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 tessellationShader = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 sampleRateShading = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 dualSrcBlend = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 logicOp = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 multiDrawIndirect = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 drawIndirectFirstInstance = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 depthClamp = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 depthBiasClamp = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 fillModeNonSolid = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 depthBounds = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 wideLines = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 largePoints = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 alphaToOne = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 multiViewport = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 samplerAnisotropy = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 textureCompressionETC2 = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 textureCompressionASTC_LDR = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 textureCompressionBC = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 occlusionQueryPrecise = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 pipelineStatisticsQuery = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 vertexPipelineStoresAndAtomics = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 fragmentStoresAndAtomics = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 shaderTessellationAndGeometryPointSize = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 shaderImageGatherExtended = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 shaderStorageImageExtendedFormats = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 shaderStorageImageMultisample = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 shaderStorageImageReadWithoutFormat = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 shaderStorageImageWriteWithoutFormat = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 shaderUniformBufferArrayDynamicIndexing = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 shaderSampledImageArrayDynamicIndexing = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 shaderStorageBufferArrayDynamicIndexing = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 shaderStorageImageArrayDynamicIndexing = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 shaderClipDistance = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 shaderCullDistance = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 shaderFloat64 = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 shaderInt64 = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 shaderInt16 = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 shaderResourceResidency = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 shaderResourceMinLod = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 sparseBinding = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 sparseResidencyBuffer = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 sparseResidencyImage2D = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 sparseResidencyImage3D = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 sparseResidency2Samples = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 sparseResidency4Samples = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 sparseResidency8Samples = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 sparseResidency16Samples = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 sparseResidencyAliased = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 variableMultisampleRate = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 inheritedQueries = {};
|
|
};
|
|
|
|
struct DeviceCreateInfo
|
|
{
|
|
using NativeType = VkDeviceCreateInfo;
|
|
|
|
static const bool allowDuplicate = false;
|
|
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDeviceCreateInfo;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR DeviceCreateInfo( VULKAN_HPP_NAMESPACE::DeviceCreateFlags flags_ = {},
|
|
uint32_t queueCreateInfoCount_ = {},
|
|
const VULKAN_HPP_NAMESPACE::DeviceQueueCreateInfo * pQueueCreateInfos_ = {},
|
|
uint32_t enabledLayerCount_ = {},
|
|
const char * const * ppEnabledLayerNames_ = {},
|
|
uint32_t enabledExtensionCount_ = {},
|
|
const char * const * ppEnabledExtensionNames_ = {},
|
|
const VULKAN_HPP_NAMESPACE::PhysicalDeviceFeatures * pEnabledFeatures_ = {},
|
|
const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
|
|
: pNext( pNext_ )
|
|
, flags( flags_ )
|
|
, queueCreateInfoCount( queueCreateInfoCount_ )
|
|
, pQueueCreateInfos( pQueueCreateInfos_ )
|
|
, enabledLayerCount( enabledLayerCount_ )
|
|
, ppEnabledLayerNames( ppEnabledLayerNames_ )
|
|
, enabledExtensionCount( enabledExtensionCount_ )
|
|
, ppEnabledExtensionNames( ppEnabledExtensionNames_ )
|
|
, pEnabledFeatures( pEnabledFeatures_ )
|
|
{
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR DeviceCreateInfo( DeviceCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
DeviceCreateInfo( VkDeviceCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT : DeviceCreateInfo( *reinterpret_cast<DeviceCreateInfo const *>( &rhs ) ) {}
|
|
|
|
# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
|
|
DeviceCreateInfo( VULKAN_HPP_NAMESPACE::DeviceCreateFlags flags_,
|
|
VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::DeviceQueueCreateInfo> const & queueCreateInfos_,
|
|
VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const char * const> const & pEnabledLayerNames_ = {},
|
|
VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const char * const> const & pEnabledExtensionNames_ = {},
|
|
const VULKAN_HPP_NAMESPACE::PhysicalDeviceFeatures * pEnabledFeatures_ = {},
|
|
const void * pNext_ = nullptr )
|
|
: pNext( pNext_ )
|
|
, flags( flags_ )
|
|
, queueCreateInfoCount( static_cast<uint32_t>( queueCreateInfos_.size() ) )
|
|
, pQueueCreateInfos( queueCreateInfos_.data() )
|
|
, enabledLayerCount( static_cast<uint32_t>( pEnabledLayerNames_.size() ) )
|
|
, ppEnabledLayerNames( pEnabledLayerNames_.data() )
|
|
, enabledExtensionCount( static_cast<uint32_t>( pEnabledExtensionNames_.size() ) )
|
|
, ppEnabledExtensionNames( pEnabledExtensionNames_.data() )
|
|
, pEnabledFeatures( pEnabledFeatures_ )
|
|
{
|
|
}
|
|
# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
|
|
|
|
DeviceCreateInfo & operator=( DeviceCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
DeviceCreateInfo & operator=( VkDeviceCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::DeviceCreateInfo const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
|
|
VULKAN_HPP_CONSTEXPR_14 DeviceCreateInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pNext = pNext_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 DeviceCreateInfo & setFlags( VULKAN_HPP_NAMESPACE::DeviceCreateFlags flags_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
flags = flags_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 DeviceCreateInfo & setQueueCreateInfoCount( uint32_t queueCreateInfoCount_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
queueCreateInfoCount = queueCreateInfoCount_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 DeviceCreateInfo &
|
|
setPQueueCreateInfos( const VULKAN_HPP_NAMESPACE::DeviceQueueCreateInfo * pQueueCreateInfos_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pQueueCreateInfos = pQueueCreateInfos_;
|
|
return *this;
|
|
}
|
|
|
|
# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
|
|
DeviceCreateInfo & setQueueCreateInfos(
|
|
VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::DeviceQueueCreateInfo> const & queueCreateInfos_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
queueCreateInfoCount = static_cast<uint32_t>( queueCreateInfos_.size() );
|
|
pQueueCreateInfos = queueCreateInfos_.data();
|
|
return *this;
|
|
}
|
|
# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 DeviceCreateInfo & setEnabledLayerCount( uint32_t enabledLayerCount_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
enabledLayerCount = enabledLayerCount_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 DeviceCreateInfo & setPpEnabledLayerNames( const char * const * ppEnabledLayerNames_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
ppEnabledLayerNames = ppEnabledLayerNames_;
|
|
return *this;
|
|
}
|
|
|
|
# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
|
|
DeviceCreateInfo &
|
|
setPEnabledLayerNames( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const char * const> const & pEnabledLayerNames_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
enabledLayerCount = static_cast<uint32_t>( pEnabledLayerNames_.size() );
|
|
ppEnabledLayerNames = pEnabledLayerNames_.data();
|
|
return *this;
|
|
}
|
|
# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 DeviceCreateInfo & setEnabledExtensionCount( uint32_t enabledExtensionCount_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
enabledExtensionCount = enabledExtensionCount_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 DeviceCreateInfo & setPpEnabledExtensionNames( const char * const * ppEnabledExtensionNames_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
ppEnabledExtensionNames = ppEnabledExtensionNames_;
|
|
return *this;
|
|
}
|
|
|
|
# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
|
|
DeviceCreateInfo &
|
|
setPEnabledExtensionNames( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const char * const> const & pEnabledExtensionNames_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
enabledExtensionCount = static_cast<uint32_t>( pEnabledExtensionNames_.size() );
|
|
ppEnabledExtensionNames = pEnabledExtensionNames_.data();
|
|
return *this;
|
|
}
|
|
# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 DeviceCreateInfo & setPEnabledFeatures( const VULKAN_HPP_NAMESPACE::PhysicalDeviceFeatures * pEnabledFeatures_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pEnabledFeatures = pEnabledFeatures_;
|
|
return *this;
|
|
}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
|
|
|
|
operator VkDeviceCreateInfo const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkDeviceCreateInfo *>( this );
|
|
}
|
|
|
|
operator VkDeviceCreateInfo &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkDeviceCreateInfo *>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
# if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
# else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &,
|
|
const void * const &,
|
|
VULKAN_HPP_NAMESPACE::DeviceCreateFlags const &,
|
|
uint32_t const &,
|
|
const VULKAN_HPP_NAMESPACE::DeviceQueueCreateInfo * const &,
|
|
uint32_t const &,
|
|
const char * const * const &,
|
|
uint32_t const &,
|
|
const char * const * const &,
|
|
const VULKAN_HPP_NAMESPACE::PhysicalDeviceFeatures * const &>
|
|
# endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( sType,
|
|
pNext,
|
|
flags,
|
|
queueCreateInfoCount,
|
|
pQueueCreateInfos,
|
|
enabledLayerCount,
|
|
ppEnabledLayerNames,
|
|
enabledExtensionCount,
|
|
ppEnabledExtensionNames,
|
|
pEnabledFeatures );
|
|
}
|
|
#endif
|
|
|
|
#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
|
|
std::strong_ordering operator<=>( DeviceCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
if ( auto cmp = sType <=> rhs.sType; cmp != 0 )
|
|
return cmp;
|
|
if ( auto cmp = pNext <=> rhs.pNext; cmp != 0 )
|
|
return cmp;
|
|
if ( auto cmp = flags <=> rhs.flags; cmp != 0 )
|
|
return cmp;
|
|
if ( auto cmp = queueCreateInfoCount <=> rhs.queueCreateInfoCount; cmp != 0 )
|
|
return cmp;
|
|
if ( auto cmp = pQueueCreateInfos <=> rhs.pQueueCreateInfos; cmp != 0 )
|
|
return cmp;
|
|
if ( auto cmp = enabledLayerCount <=> rhs.enabledLayerCount; cmp != 0 )
|
|
return cmp;
|
|
for ( size_t i = 0; i < enabledLayerCount; ++i )
|
|
{
|
|
if ( ppEnabledLayerNames[i] != rhs.ppEnabledLayerNames[i] )
|
|
if ( auto cmp = strcmp( ppEnabledLayerNames[i], rhs.ppEnabledLayerNames[i] ); cmp != 0 )
|
|
return cmp < 0 ? std::strong_ordering::less : std::strong_ordering::greater;
|
|
}
|
|
if ( auto cmp = enabledExtensionCount <=> rhs.enabledExtensionCount; cmp != 0 )
|
|
return cmp;
|
|
for ( size_t i = 0; i < enabledExtensionCount; ++i )
|
|
{
|
|
if ( ppEnabledExtensionNames[i] != rhs.ppEnabledExtensionNames[i] )
|
|
if ( auto cmp = strcmp( ppEnabledExtensionNames[i], rhs.ppEnabledExtensionNames[i] ); cmp != 0 )
|
|
return cmp < 0 ? std::strong_ordering::less : std::strong_ordering::greater;
|
|
}
|
|
if ( auto cmp = pEnabledFeatures <=> rhs.pEnabledFeatures; cmp != 0 )
|
|
return cmp;
|
|
|
|
return std::strong_ordering::equivalent;
|
|
}
|
|
#endif
|
|
|
|
bool operator==( DeviceCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && ( queueCreateInfoCount == rhs.queueCreateInfoCount ) &&
|
|
( pQueueCreateInfos == rhs.pQueueCreateInfos ) && ( enabledLayerCount == rhs.enabledLayerCount ) &&
|
|
std::equal( ppEnabledLayerNames,
|
|
ppEnabledLayerNames + enabledLayerCount,
|
|
rhs.ppEnabledLayerNames,
|
|
[]( char const * left, char const * right ) { return ( left == right ) || ( strcmp( left, right ) == 0 ); } ) &&
|
|
( enabledExtensionCount == rhs.enabledExtensionCount ) &&
|
|
std::equal( ppEnabledExtensionNames,
|
|
ppEnabledExtensionNames + enabledExtensionCount,
|
|
rhs.ppEnabledExtensionNames,
|
|
[]( char const * left, char const * right ) { return ( left == right ) || ( strcmp( left, right ) == 0 ); } ) &&
|
|
( pEnabledFeatures == rhs.pEnabledFeatures );
|
|
}
|
|
|
|
bool operator!=( DeviceCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDeviceCreateInfo;
|
|
const void * pNext = {};
|
|
VULKAN_HPP_NAMESPACE::DeviceCreateFlags flags = {};
|
|
uint32_t queueCreateInfoCount = {};
|
|
const VULKAN_HPP_NAMESPACE::DeviceQueueCreateInfo * pQueueCreateInfos = {};
|
|
uint32_t enabledLayerCount = {};
|
|
const char * const * ppEnabledLayerNames = {};
|
|
uint32_t enabledExtensionCount = {};
|
|
const char * const * ppEnabledExtensionNames = {};
|
|
const VULKAN_HPP_NAMESPACE::PhysicalDeviceFeatures * pEnabledFeatures = {};
|
|
};
|
|
|
|
template <>
|
|
struct CppType<StructureType, StructureType::eDeviceCreateInfo>
|
|
{
|
|
using Type = DeviceCreateInfo;
|
|
};
|
|
|
|
struct DeviceEventInfoEXT
|
|
{
|
|
using NativeType = VkDeviceEventInfoEXT;
|
|
|
|
static const bool allowDuplicate = false;
|
|
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDeviceEventInfoEXT;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR DeviceEventInfoEXT( VULKAN_HPP_NAMESPACE::DeviceEventTypeEXT deviceEvent_ = VULKAN_HPP_NAMESPACE::DeviceEventTypeEXT::eDisplayHotplug,
|
|
const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
|
|
: pNext( pNext_ )
|
|
, deviceEvent( deviceEvent_ )
|
|
{
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR DeviceEventInfoEXT( DeviceEventInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
DeviceEventInfoEXT( VkDeviceEventInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT : DeviceEventInfoEXT( *reinterpret_cast<DeviceEventInfoEXT const *>( &rhs ) ) {}
|
|
|
|
DeviceEventInfoEXT & operator=( DeviceEventInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
DeviceEventInfoEXT & operator=( VkDeviceEventInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::DeviceEventInfoEXT const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
|
|
VULKAN_HPP_CONSTEXPR_14 DeviceEventInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pNext = pNext_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 DeviceEventInfoEXT & setDeviceEvent( VULKAN_HPP_NAMESPACE::DeviceEventTypeEXT deviceEvent_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
deviceEvent = deviceEvent_;
|
|
return *this;
|
|
}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
|
|
|
|
operator VkDeviceEventInfoEXT const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkDeviceEventInfoEXT *>( this );
|
|
}
|
|
|
|
operator VkDeviceEventInfoEXT &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkDeviceEventInfoEXT *>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
# if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
# else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::DeviceEventTypeEXT const &>
|
|
# endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( sType, pNext, deviceEvent );
|
|
}
|
|
#endif
|
|
|
|
#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
|
|
auto operator<=>( DeviceEventInfoEXT const & ) const = default;
|
|
#else
|
|
bool operator==( DeviceEventInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
# if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
# else
|
|
return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( deviceEvent == rhs.deviceEvent );
|
|
# endif
|
|
}
|
|
|
|
bool operator!=( DeviceEventInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDeviceEventInfoEXT;
|
|
const void * pNext = {};
|
|
VULKAN_HPP_NAMESPACE::DeviceEventTypeEXT deviceEvent = VULKAN_HPP_NAMESPACE::DeviceEventTypeEXT::eDisplayHotplug;
|
|
};
|
|
|
|
template <>
|
|
struct CppType<StructureType, StructureType::eDeviceEventInfoEXT>
|
|
{
|
|
using Type = DeviceEventInfoEXT;
|
|
};
|
|
|
|
struct DeviceGroupCommandBufferBeginInfo
|
|
{
|
|
using NativeType = VkDeviceGroupCommandBufferBeginInfo;
|
|
|
|
static const bool allowDuplicate = false;
|
|
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDeviceGroupCommandBufferBeginInfo;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR DeviceGroupCommandBufferBeginInfo( uint32_t deviceMask_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
|
|
: pNext( pNext_ )
|
|
, deviceMask( deviceMask_ )
|
|
{
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR DeviceGroupCommandBufferBeginInfo( DeviceGroupCommandBufferBeginInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
DeviceGroupCommandBufferBeginInfo( VkDeviceGroupCommandBufferBeginInfo const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: DeviceGroupCommandBufferBeginInfo( *reinterpret_cast<DeviceGroupCommandBufferBeginInfo const *>( &rhs ) )
|
|
{
|
|
}
|
|
|
|
DeviceGroupCommandBufferBeginInfo & operator=( DeviceGroupCommandBufferBeginInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
DeviceGroupCommandBufferBeginInfo & operator=( VkDeviceGroupCommandBufferBeginInfo const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::DeviceGroupCommandBufferBeginInfo const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
|
|
VULKAN_HPP_CONSTEXPR_14 DeviceGroupCommandBufferBeginInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pNext = pNext_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 DeviceGroupCommandBufferBeginInfo & setDeviceMask( uint32_t deviceMask_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
deviceMask = deviceMask_;
|
|
return *this;
|
|
}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
|
|
|
|
operator VkDeviceGroupCommandBufferBeginInfo const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkDeviceGroupCommandBufferBeginInfo *>( this );
|
|
}
|
|
|
|
operator VkDeviceGroupCommandBufferBeginInfo &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkDeviceGroupCommandBufferBeginInfo *>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
# if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
# else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, uint32_t const &>
|
|
# endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( sType, pNext, deviceMask );
|
|
}
|
|
#endif
|
|
|
|
#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
|
|
auto operator<=>( DeviceGroupCommandBufferBeginInfo const & ) const = default;
|
|
#else
|
|
bool operator==( DeviceGroupCommandBufferBeginInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
# if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
# else
|
|
return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( deviceMask == rhs.deviceMask );
|
|
# endif
|
|
}
|
|
|
|
bool operator!=( DeviceGroupCommandBufferBeginInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDeviceGroupCommandBufferBeginInfo;
|
|
const void * pNext = {};
|
|
uint32_t deviceMask = {};
|
|
};
|
|
|
|
template <>
|
|
struct CppType<StructureType, StructureType::eDeviceGroupCommandBufferBeginInfo>
|
|
{
|
|
using Type = DeviceGroupCommandBufferBeginInfo;
|
|
};
|
|
|
|
using DeviceGroupCommandBufferBeginInfoKHR = DeviceGroupCommandBufferBeginInfo;
|
|
|
|
struct DeviceGroupDeviceCreateInfo
|
|
{
|
|
using NativeType = VkDeviceGroupDeviceCreateInfo;
|
|
|
|
static const bool allowDuplicate = false;
|
|
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDeviceGroupDeviceCreateInfo;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR DeviceGroupDeviceCreateInfo( uint32_t physicalDeviceCount_ = {},
|
|
const VULKAN_HPP_NAMESPACE::PhysicalDevice * pPhysicalDevices_ = {},
|
|
const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
|
|
: pNext( pNext_ )
|
|
, physicalDeviceCount( physicalDeviceCount_ )
|
|
, pPhysicalDevices( pPhysicalDevices_ )
|
|
{
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR DeviceGroupDeviceCreateInfo( DeviceGroupDeviceCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
DeviceGroupDeviceCreateInfo( VkDeviceGroupDeviceCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: DeviceGroupDeviceCreateInfo( *reinterpret_cast<DeviceGroupDeviceCreateInfo const *>( &rhs ) )
|
|
{
|
|
}
|
|
|
|
# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
|
|
DeviceGroupDeviceCreateInfo( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::PhysicalDevice> const & physicalDevices_,
|
|
const void * pNext_ = nullptr )
|
|
: pNext( pNext_ ), physicalDeviceCount( static_cast<uint32_t>( physicalDevices_.size() ) ), pPhysicalDevices( physicalDevices_.data() )
|
|
{
|
|
}
|
|
# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
|
|
|
|
DeviceGroupDeviceCreateInfo & operator=( DeviceGroupDeviceCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
DeviceGroupDeviceCreateInfo & operator=( VkDeviceGroupDeviceCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::DeviceGroupDeviceCreateInfo const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
|
|
VULKAN_HPP_CONSTEXPR_14 DeviceGroupDeviceCreateInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pNext = pNext_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 DeviceGroupDeviceCreateInfo & setPhysicalDeviceCount( uint32_t physicalDeviceCount_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
physicalDeviceCount = physicalDeviceCount_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 DeviceGroupDeviceCreateInfo &
|
|
setPPhysicalDevices( const VULKAN_HPP_NAMESPACE::PhysicalDevice * pPhysicalDevices_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pPhysicalDevices = pPhysicalDevices_;
|
|
return *this;
|
|
}
|
|
|
|
# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
|
|
DeviceGroupDeviceCreateInfo & setPhysicalDevices(
|
|
VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::PhysicalDevice> const & physicalDevices_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
physicalDeviceCount = static_cast<uint32_t>( physicalDevices_.size() );
|
|
pPhysicalDevices = physicalDevices_.data();
|
|
return *this;
|
|
}
|
|
# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
|
|
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
|
|
|
|
operator VkDeviceGroupDeviceCreateInfo const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkDeviceGroupDeviceCreateInfo *>( this );
|
|
}
|
|
|
|
operator VkDeviceGroupDeviceCreateInfo &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkDeviceGroupDeviceCreateInfo *>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
# if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
# else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, uint32_t const &, const VULKAN_HPP_NAMESPACE::PhysicalDevice * const &>
|
|
# endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( sType, pNext, physicalDeviceCount, pPhysicalDevices );
|
|
}
|
|
#endif
|
|
|
|
#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
|
|
auto operator<=>( DeviceGroupDeviceCreateInfo const & ) const = default;
|
|
#else
|
|
bool operator==( DeviceGroupDeviceCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
# if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
# else
|
|
return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( physicalDeviceCount == rhs.physicalDeviceCount ) &&
|
|
( pPhysicalDevices == rhs.pPhysicalDevices );
|
|
# endif
|
|
}
|
|
|
|
bool operator!=( DeviceGroupDeviceCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDeviceGroupDeviceCreateInfo;
|
|
const void * pNext = {};
|
|
uint32_t physicalDeviceCount = {};
|
|
const VULKAN_HPP_NAMESPACE::PhysicalDevice * pPhysicalDevices = {};
|
|
};
|
|
|
|
template <>
|
|
struct CppType<StructureType, StructureType::eDeviceGroupDeviceCreateInfo>
|
|
{
|
|
using Type = DeviceGroupDeviceCreateInfo;
|
|
};
|
|
|
|
using DeviceGroupDeviceCreateInfoKHR = DeviceGroupDeviceCreateInfo;
|
|
|
|
struct DeviceGroupPresentCapabilitiesKHR
|
|
{
|
|
using NativeType = VkDeviceGroupPresentCapabilitiesKHR;
|
|
|
|
static const bool allowDuplicate = false;
|
|
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDeviceGroupPresentCapabilitiesKHR;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR_14 DeviceGroupPresentCapabilitiesKHR( std::array<uint32_t, VK_MAX_DEVICE_GROUP_SIZE> const & presentMask_ = {},
|
|
VULKAN_HPP_NAMESPACE::DeviceGroupPresentModeFlagsKHR modes_ = {},
|
|
void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
|
|
: pNext( pNext_ )
|
|
, presentMask( presentMask_ )
|
|
, modes( modes_ )
|
|
{
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 DeviceGroupPresentCapabilitiesKHR( DeviceGroupPresentCapabilitiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
DeviceGroupPresentCapabilitiesKHR( VkDeviceGroupPresentCapabilitiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: DeviceGroupPresentCapabilitiesKHR( *reinterpret_cast<DeviceGroupPresentCapabilitiesKHR const *>( &rhs ) )
|
|
{
|
|
}
|
|
|
|
DeviceGroupPresentCapabilitiesKHR & operator=( DeviceGroupPresentCapabilitiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
DeviceGroupPresentCapabilitiesKHR & operator=( VkDeviceGroupPresentCapabilitiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::DeviceGroupPresentCapabilitiesKHR const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
operator VkDeviceGroupPresentCapabilitiesKHR const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkDeviceGroupPresentCapabilitiesKHR *>( this );
|
|
}
|
|
|
|
operator VkDeviceGroupPresentCapabilitiesKHR &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkDeviceGroupPresentCapabilitiesKHR *>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
# if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
# else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &,
|
|
void * const &,
|
|
VULKAN_HPP_NAMESPACE::ArrayWrapper1D<uint32_t, VK_MAX_DEVICE_GROUP_SIZE> const &,
|
|
VULKAN_HPP_NAMESPACE::DeviceGroupPresentModeFlagsKHR const &>
|
|
# endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( sType, pNext, presentMask, modes );
|
|
}
|
|
#endif
|
|
|
|
#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
|
|
auto operator<=>( DeviceGroupPresentCapabilitiesKHR const & ) const = default;
|
|
#else
|
|
bool operator==( DeviceGroupPresentCapabilitiesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
# if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
# else
|
|
return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( presentMask == rhs.presentMask ) && ( modes == rhs.modes );
|
|
# endif
|
|
}
|
|
|
|
bool operator!=( DeviceGroupPresentCapabilitiesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDeviceGroupPresentCapabilitiesKHR;
|
|
void * pNext = {};
|
|
VULKAN_HPP_NAMESPACE::ArrayWrapper1D<uint32_t, VK_MAX_DEVICE_GROUP_SIZE> presentMask = {};
|
|
VULKAN_HPP_NAMESPACE::DeviceGroupPresentModeFlagsKHR modes = {};
|
|
};
|
|
|
|
template <>
|
|
struct CppType<StructureType, StructureType::eDeviceGroupPresentCapabilitiesKHR>
|
|
{
|
|
using Type = DeviceGroupPresentCapabilitiesKHR;
|
|
};
|
|
|
|
struct DeviceGroupPresentInfoKHR
|
|
{
|
|
using NativeType = VkDeviceGroupPresentInfoKHR;
|
|
|
|
static const bool allowDuplicate = false;
|
|
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDeviceGroupPresentInfoKHR;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR DeviceGroupPresentInfoKHR(
|
|
uint32_t swapchainCount_ = {},
|
|
const uint32_t * pDeviceMasks_ = {},
|
|
VULKAN_HPP_NAMESPACE::DeviceGroupPresentModeFlagBitsKHR mode_ = VULKAN_HPP_NAMESPACE::DeviceGroupPresentModeFlagBitsKHR::eLocal,
|
|
const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
|
|
: pNext( pNext_ )
|
|
, swapchainCount( swapchainCount_ )
|
|
, pDeviceMasks( pDeviceMasks_ )
|
|
, mode( mode_ )
|
|
{
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR DeviceGroupPresentInfoKHR( DeviceGroupPresentInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
DeviceGroupPresentInfoKHR( VkDeviceGroupPresentInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: DeviceGroupPresentInfoKHR( *reinterpret_cast<DeviceGroupPresentInfoKHR const *>( &rhs ) )
|
|
{
|
|
}
|
|
|
|
# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
|
|
DeviceGroupPresentInfoKHR( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const uint32_t> const & deviceMasks_,
|
|
VULKAN_HPP_NAMESPACE::DeviceGroupPresentModeFlagBitsKHR mode_ = VULKAN_HPP_NAMESPACE::DeviceGroupPresentModeFlagBitsKHR::eLocal,
|
|
const void * pNext_ = nullptr )
|
|
: pNext( pNext_ ), swapchainCount( static_cast<uint32_t>( deviceMasks_.size() ) ), pDeviceMasks( deviceMasks_.data() ), mode( mode_ )
|
|
{
|
|
}
|
|
# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
|
|
|
|
DeviceGroupPresentInfoKHR & operator=( DeviceGroupPresentInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
DeviceGroupPresentInfoKHR & operator=( VkDeviceGroupPresentInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::DeviceGroupPresentInfoKHR const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
|
|
VULKAN_HPP_CONSTEXPR_14 DeviceGroupPresentInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pNext = pNext_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 DeviceGroupPresentInfoKHR & setSwapchainCount( uint32_t swapchainCount_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
swapchainCount = swapchainCount_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 DeviceGroupPresentInfoKHR & setPDeviceMasks( const uint32_t * pDeviceMasks_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pDeviceMasks = pDeviceMasks_;
|
|
return *this;
|
|
}
|
|
|
|
# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
|
|
DeviceGroupPresentInfoKHR & setDeviceMasks( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const uint32_t> const & deviceMasks_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
swapchainCount = static_cast<uint32_t>( deviceMasks_.size() );
|
|
pDeviceMasks = deviceMasks_.data();
|
|
return *this;
|
|
}
|
|
# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 DeviceGroupPresentInfoKHR & setMode( VULKAN_HPP_NAMESPACE::DeviceGroupPresentModeFlagBitsKHR mode_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
mode = mode_;
|
|
return *this;
|
|
}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
|
|
|
|
operator VkDeviceGroupPresentInfoKHR const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkDeviceGroupPresentInfoKHR *>( this );
|
|
}
|
|
|
|
operator VkDeviceGroupPresentInfoKHR &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkDeviceGroupPresentInfoKHR *>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
# if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
# else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &,
|
|
const void * const &,
|
|
uint32_t const &,
|
|
const uint32_t * const &,
|
|
VULKAN_HPP_NAMESPACE::DeviceGroupPresentModeFlagBitsKHR const &>
|
|
# endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( sType, pNext, swapchainCount, pDeviceMasks, mode );
|
|
}
|
|
#endif
|
|
|
|
#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
|
|
auto operator<=>( DeviceGroupPresentInfoKHR const & ) const = default;
|
|
#else
|
|
bool operator==( DeviceGroupPresentInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
# if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
# else
|
|
return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( swapchainCount == rhs.swapchainCount ) && ( pDeviceMasks == rhs.pDeviceMasks ) &&
|
|
( mode == rhs.mode );
|
|
# endif
|
|
}
|
|
|
|
bool operator!=( DeviceGroupPresentInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDeviceGroupPresentInfoKHR;
|
|
const void * pNext = {};
|
|
uint32_t swapchainCount = {};
|
|
const uint32_t * pDeviceMasks = {};
|
|
VULKAN_HPP_NAMESPACE::DeviceGroupPresentModeFlagBitsKHR mode = VULKAN_HPP_NAMESPACE::DeviceGroupPresentModeFlagBitsKHR::eLocal;
|
|
};
|
|
|
|
template <>
|
|
struct CppType<StructureType, StructureType::eDeviceGroupPresentInfoKHR>
|
|
{
|
|
using Type = DeviceGroupPresentInfoKHR;
|
|
};
|
|
|
|
struct DeviceGroupRenderPassBeginInfo
|
|
{
|
|
using NativeType = VkDeviceGroupRenderPassBeginInfo;
|
|
|
|
static const bool allowDuplicate = false;
|
|
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDeviceGroupRenderPassBeginInfo;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR DeviceGroupRenderPassBeginInfo( uint32_t deviceMask_ = {},
|
|
uint32_t deviceRenderAreaCount_ = {},
|
|
const VULKAN_HPP_NAMESPACE::Rect2D * pDeviceRenderAreas_ = {},
|
|
const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
|
|
: pNext( pNext_ )
|
|
, deviceMask( deviceMask_ )
|
|
, deviceRenderAreaCount( deviceRenderAreaCount_ )
|
|
, pDeviceRenderAreas( pDeviceRenderAreas_ )
|
|
{
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR DeviceGroupRenderPassBeginInfo( DeviceGroupRenderPassBeginInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
DeviceGroupRenderPassBeginInfo( VkDeviceGroupRenderPassBeginInfo const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: DeviceGroupRenderPassBeginInfo( *reinterpret_cast<DeviceGroupRenderPassBeginInfo const *>( &rhs ) )
|
|
{
|
|
}
|
|
|
|
# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
|
|
DeviceGroupRenderPassBeginInfo( uint32_t deviceMask_,
|
|
VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::Rect2D> const & deviceRenderAreas_,
|
|
const void * pNext_ = nullptr )
|
|
: pNext( pNext_ )
|
|
, deviceMask( deviceMask_ )
|
|
, deviceRenderAreaCount( static_cast<uint32_t>( deviceRenderAreas_.size() ) )
|
|
, pDeviceRenderAreas( deviceRenderAreas_.data() )
|
|
{
|
|
}
|
|
# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
|
|
|
|
DeviceGroupRenderPassBeginInfo & operator=( DeviceGroupRenderPassBeginInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
DeviceGroupRenderPassBeginInfo & operator=( VkDeviceGroupRenderPassBeginInfo const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::DeviceGroupRenderPassBeginInfo const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
|
|
VULKAN_HPP_CONSTEXPR_14 DeviceGroupRenderPassBeginInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pNext = pNext_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 DeviceGroupRenderPassBeginInfo & setDeviceMask( uint32_t deviceMask_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
deviceMask = deviceMask_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 DeviceGroupRenderPassBeginInfo & setDeviceRenderAreaCount( uint32_t deviceRenderAreaCount_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
deviceRenderAreaCount = deviceRenderAreaCount_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 DeviceGroupRenderPassBeginInfo &
|
|
setPDeviceRenderAreas( const VULKAN_HPP_NAMESPACE::Rect2D * pDeviceRenderAreas_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pDeviceRenderAreas = pDeviceRenderAreas_;
|
|
return *this;
|
|
}
|
|
|
|
# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
|
|
DeviceGroupRenderPassBeginInfo &
|
|
setDeviceRenderAreas( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::Rect2D> const & deviceRenderAreas_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
deviceRenderAreaCount = static_cast<uint32_t>( deviceRenderAreas_.size() );
|
|
pDeviceRenderAreas = deviceRenderAreas_.data();
|
|
return *this;
|
|
}
|
|
# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
|
|
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
|
|
|
|
operator VkDeviceGroupRenderPassBeginInfo const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkDeviceGroupRenderPassBeginInfo *>( this );
|
|
}
|
|
|
|
operator VkDeviceGroupRenderPassBeginInfo &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkDeviceGroupRenderPassBeginInfo *>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
# if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
# else
|
|
std::
|
|
tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, uint32_t const &, uint32_t const &, const VULKAN_HPP_NAMESPACE::Rect2D * const &>
|
|
# endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( sType, pNext, deviceMask, deviceRenderAreaCount, pDeviceRenderAreas );
|
|
}
|
|
#endif
|
|
|
|
#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
|
|
auto operator<=>( DeviceGroupRenderPassBeginInfo const & ) const = default;
|
|
#else
|
|
bool operator==( DeviceGroupRenderPassBeginInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
# if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
# else
|
|
return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( deviceMask == rhs.deviceMask ) && ( deviceRenderAreaCount == rhs.deviceRenderAreaCount ) &&
|
|
( pDeviceRenderAreas == rhs.pDeviceRenderAreas );
|
|
# endif
|
|
}
|
|
|
|
bool operator!=( DeviceGroupRenderPassBeginInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDeviceGroupRenderPassBeginInfo;
|
|
const void * pNext = {};
|
|
uint32_t deviceMask = {};
|
|
uint32_t deviceRenderAreaCount = {};
|
|
const VULKAN_HPP_NAMESPACE::Rect2D * pDeviceRenderAreas = {};
|
|
};
|
|
|
|
template <>
|
|
struct CppType<StructureType, StructureType::eDeviceGroupRenderPassBeginInfo>
|
|
{
|
|
using Type = DeviceGroupRenderPassBeginInfo;
|
|
};
|
|
|
|
using DeviceGroupRenderPassBeginInfoKHR = DeviceGroupRenderPassBeginInfo;
|
|
|
|
struct DeviceGroupSubmitInfo
|
|
{
|
|
using NativeType = VkDeviceGroupSubmitInfo;
|
|
|
|
static const bool allowDuplicate = false;
|
|
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDeviceGroupSubmitInfo;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR DeviceGroupSubmitInfo( uint32_t waitSemaphoreCount_ = {},
|
|
const uint32_t * pWaitSemaphoreDeviceIndices_ = {},
|
|
uint32_t commandBufferCount_ = {},
|
|
const uint32_t * pCommandBufferDeviceMasks_ = {},
|
|
uint32_t signalSemaphoreCount_ = {},
|
|
const uint32_t * pSignalSemaphoreDeviceIndices_ = {},
|
|
const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
|
|
: pNext( pNext_ )
|
|
, waitSemaphoreCount( waitSemaphoreCount_ )
|
|
, pWaitSemaphoreDeviceIndices( pWaitSemaphoreDeviceIndices_ )
|
|
, commandBufferCount( commandBufferCount_ )
|
|
, pCommandBufferDeviceMasks( pCommandBufferDeviceMasks_ )
|
|
, signalSemaphoreCount( signalSemaphoreCount_ )
|
|
, pSignalSemaphoreDeviceIndices( pSignalSemaphoreDeviceIndices_ )
|
|
{
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR DeviceGroupSubmitInfo( DeviceGroupSubmitInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
DeviceGroupSubmitInfo( VkDeviceGroupSubmitInfo const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: DeviceGroupSubmitInfo( *reinterpret_cast<DeviceGroupSubmitInfo const *>( &rhs ) )
|
|
{
|
|
}
|
|
|
|
# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
|
|
DeviceGroupSubmitInfo( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const uint32_t> const & waitSemaphoreDeviceIndices_,
|
|
VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const uint32_t> const & commandBufferDeviceMasks_ = {},
|
|
VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const uint32_t> const & signalSemaphoreDeviceIndices_ = {},
|
|
const void * pNext_ = nullptr )
|
|
: pNext( pNext_ )
|
|
, waitSemaphoreCount( static_cast<uint32_t>( waitSemaphoreDeviceIndices_.size() ) )
|
|
, pWaitSemaphoreDeviceIndices( waitSemaphoreDeviceIndices_.data() )
|
|
, commandBufferCount( static_cast<uint32_t>( commandBufferDeviceMasks_.size() ) )
|
|
, pCommandBufferDeviceMasks( commandBufferDeviceMasks_.data() )
|
|
, signalSemaphoreCount( static_cast<uint32_t>( signalSemaphoreDeviceIndices_.size() ) )
|
|
, pSignalSemaphoreDeviceIndices( signalSemaphoreDeviceIndices_.data() )
|
|
{
|
|
}
|
|
# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
|
|
|
|
DeviceGroupSubmitInfo & operator=( DeviceGroupSubmitInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
DeviceGroupSubmitInfo & operator=( VkDeviceGroupSubmitInfo const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::DeviceGroupSubmitInfo const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
|
|
VULKAN_HPP_CONSTEXPR_14 DeviceGroupSubmitInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pNext = pNext_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 DeviceGroupSubmitInfo & setWaitSemaphoreCount( uint32_t waitSemaphoreCount_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
waitSemaphoreCount = waitSemaphoreCount_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 DeviceGroupSubmitInfo & setPWaitSemaphoreDeviceIndices( const uint32_t * pWaitSemaphoreDeviceIndices_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pWaitSemaphoreDeviceIndices = pWaitSemaphoreDeviceIndices_;
|
|
return *this;
|
|
}
|
|
|
|
# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
|
|
DeviceGroupSubmitInfo &
|
|
setWaitSemaphoreDeviceIndices( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const uint32_t> const & waitSemaphoreDeviceIndices_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
waitSemaphoreCount = static_cast<uint32_t>( waitSemaphoreDeviceIndices_.size() );
|
|
pWaitSemaphoreDeviceIndices = waitSemaphoreDeviceIndices_.data();
|
|
return *this;
|
|
}
|
|
# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 DeviceGroupSubmitInfo & setCommandBufferCount( uint32_t commandBufferCount_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
commandBufferCount = commandBufferCount_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 DeviceGroupSubmitInfo & setPCommandBufferDeviceMasks( const uint32_t * pCommandBufferDeviceMasks_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pCommandBufferDeviceMasks = pCommandBufferDeviceMasks_;
|
|
return *this;
|
|
}
|
|
|
|
# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
|
|
DeviceGroupSubmitInfo &
|
|
setCommandBufferDeviceMasks( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const uint32_t> const & commandBufferDeviceMasks_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
commandBufferCount = static_cast<uint32_t>( commandBufferDeviceMasks_.size() );
|
|
pCommandBufferDeviceMasks = commandBufferDeviceMasks_.data();
|
|
return *this;
|
|
}
|
|
# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 DeviceGroupSubmitInfo & setSignalSemaphoreCount( uint32_t signalSemaphoreCount_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
signalSemaphoreCount = signalSemaphoreCount_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 DeviceGroupSubmitInfo & setPSignalSemaphoreDeviceIndices( const uint32_t * pSignalSemaphoreDeviceIndices_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pSignalSemaphoreDeviceIndices = pSignalSemaphoreDeviceIndices_;
|
|
return *this;
|
|
}
|
|
|
|
# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
|
|
DeviceGroupSubmitInfo &
|
|
setSignalSemaphoreDeviceIndices( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const uint32_t> const & signalSemaphoreDeviceIndices_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
signalSemaphoreCount = static_cast<uint32_t>( signalSemaphoreDeviceIndices_.size() );
|
|
pSignalSemaphoreDeviceIndices = signalSemaphoreDeviceIndices_.data();
|
|
return *this;
|
|
}
|
|
# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
|
|
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
|
|
|
|
operator VkDeviceGroupSubmitInfo const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkDeviceGroupSubmitInfo *>( this );
|
|
}
|
|
|
|
operator VkDeviceGroupSubmitInfo &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkDeviceGroupSubmitInfo *>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
# if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
# else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &,
|
|
const void * const &,
|
|
uint32_t const &,
|
|
const uint32_t * const &,
|
|
uint32_t const &,
|
|
const uint32_t * const &,
|
|
uint32_t const &,
|
|
const uint32_t * const &>
|
|
# endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( sType,
|
|
pNext,
|
|
waitSemaphoreCount,
|
|
pWaitSemaphoreDeviceIndices,
|
|
commandBufferCount,
|
|
pCommandBufferDeviceMasks,
|
|
signalSemaphoreCount,
|
|
pSignalSemaphoreDeviceIndices );
|
|
}
|
|
#endif
|
|
|
|
#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
|
|
auto operator<=>( DeviceGroupSubmitInfo const & ) const = default;
|
|
#else
|
|
bool operator==( DeviceGroupSubmitInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
# if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
# else
|
|
return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( waitSemaphoreCount == rhs.waitSemaphoreCount ) &&
|
|
( pWaitSemaphoreDeviceIndices == rhs.pWaitSemaphoreDeviceIndices ) && ( commandBufferCount == rhs.commandBufferCount ) &&
|
|
( pCommandBufferDeviceMasks == rhs.pCommandBufferDeviceMasks ) && ( signalSemaphoreCount == rhs.signalSemaphoreCount ) &&
|
|
( pSignalSemaphoreDeviceIndices == rhs.pSignalSemaphoreDeviceIndices );
|
|
# endif
|
|
}
|
|
|
|
bool operator!=( DeviceGroupSubmitInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDeviceGroupSubmitInfo;
|
|
const void * pNext = {};
|
|
uint32_t waitSemaphoreCount = {};
|
|
const uint32_t * pWaitSemaphoreDeviceIndices = {};
|
|
uint32_t commandBufferCount = {};
|
|
const uint32_t * pCommandBufferDeviceMasks = {};
|
|
uint32_t signalSemaphoreCount = {};
|
|
const uint32_t * pSignalSemaphoreDeviceIndices = {};
|
|
};
|
|
|
|
template <>
|
|
struct CppType<StructureType, StructureType::eDeviceGroupSubmitInfo>
|
|
{
|
|
using Type = DeviceGroupSubmitInfo;
|
|
};
|
|
|
|
using DeviceGroupSubmitInfoKHR = DeviceGroupSubmitInfo;
|
|
|
|
struct DeviceGroupSwapchainCreateInfoKHR
|
|
{
|
|
using NativeType = VkDeviceGroupSwapchainCreateInfoKHR;
|
|
|
|
static const bool allowDuplicate = false;
|
|
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDeviceGroupSwapchainCreateInfoKHR;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR DeviceGroupSwapchainCreateInfoKHR( VULKAN_HPP_NAMESPACE::DeviceGroupPresentModeFlagsKHR modes_ = {},
|
|
const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
|
|
: pNext( pNext_ )
|
|
, modes( modes_ )
|
|
{
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR DeviceGroupSwapchainCreateInfoKHR( DeviceGroupSwapchainCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
DeviceGroupSwapchainCreateInfoKHR( VkDeviceGroupSwapchainCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: DeviceGroupSwapchainCreateInfoKHR( *reinterpret_cast<DeviceGroupSwapchainCreateInfoKHR const *>( &rhs ) )
|
|
{
|
|
}
|
|
|
|
DeviceGroupSwapchainCreateInfoKHR & operator=( DeviceGroupSwapchainCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
DeviceGroupSwapchainCreateInfoKHR & operator=( VkDeviceGroupSwapchainCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::DeviceGroupSwapchainCreateInfoKHR const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
|
|
VULKAN_HPP_CONSTEXPR_14 DeviceGroupSwapchainCreateInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pNext = pNext_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 DeviceGroupSwapchainCreateInfoKHR & setModes( VULKAN_HPP_NAMESPACE::DeviceGroupPresentModeFlagsKHR modes_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
modes = modes_;
|
|
return *this;
|
|
}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
|
|
|
|
operator VkDeviceGroupSwapchainCreateInfoKHR const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkDeviceGroupSwapchainCreateInfoKHR *>( this );
|
|
}
|
|
|
|
operator VkDeviceGroupSwapchainCreateInfoKHR &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkDeviceGroupSwapchainCreateInfoKHR *>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
# if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
# else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::DeviceGroupPresentModeFlagsKHR const &>
|
|
# endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( sType, pNext, modes );
|
|
}
|
|
#endif
|
|
|
|
#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
|
|
auto operator<=>( DeviceGroupSwapchainCreateInfoKHR const & ) const = default;
|
|
#else
|
|
bool operator==( DeviceGroupSwapchainCreateInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
# if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
# else
|
|
return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( modes == rhs.modes );
|
|
# endif
|
|
}
|
|
|
|
bool operator!=( DeviceGroupSwapchainCreateInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDeviceGroupSwapchainCreateInfoKHR;
|
|
const void * pNext = {};
|
|
VULKAN_HPP_NAMESPACE::DeviceGroupPresentModeFlagsKHR modes = {};
|
|
};
|
|
|
|
template <>
|
|
struct CppType<StructureType, StructureType::eDeviceGroupSwapchainCreateInfoKHR>
|
|
{
|
|
using Type = DeviceGroupSwapchainCreateInfoKHR;
|
|
};
|
|
|
|
struct ImageCreateInfo
|
|
{
|
|
using NativeType = VkImageCreateInfo;
|
|
|
|
static const bool allowDuplicate = false;
|
|
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eImageCreateInfo;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR ImageCreateInfo( VULKAN_HPP_NAMESPACE::ImageCreateFlags flags_ = {},
|
|
VULKAN_HPP_NAMESPACE::ImageType imageType_ = VULKAN_HPP_NAMESPACE::ImageType::e1D,
|
|
VULKAN_HPP_NAMESPACE::Format format_ = VULKAN_HPP_NAMESPACE::Format::eUndefined,
|
|
VULKAN_HPP_NAMESPACE::Extent3D extent_ = {},
|
|
uint32_t mipLevels_ = {},
|
|
uint32_t arrayLayers_ = {},
|
|
VULKAN_HPP_NAMESPACE::SampleCountFlagBits samples_ = VULKAN_HPP_NAMESPACE::SampleCountFlagBits::e1,
|
|
VULKAN_HPP_NAMESPACE::ImageTiling tiling_ = VULKAN_HPP_NAMESPACE::ImageTiling::eOptimal,
|
|
VULKAN_HPP_NAMESPACE::ImageUsageFlags usage_ = {},
|
|
VULKAN_HPP_NAMESPACE::SharingMode sharingMode_ = VULKAN_HPP_NAMESPACE::SharingMode::eExclusive,
|
|
uint32_t queueFamilyIndexCount_ = {},
|
|
const uint32_t * pQueueFamilyIndices_ = {},
|
|
VULKAN_HPP_NAMESPACE::ImageLayout initialLayout_ = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined,
|
|
const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
|
|
: pNext( pNext_ )
|
|
, flags( flags_ )
|
|
, imageType( imageType_ )
|
|
, format( format_ )
|
|
, extent( extent_ )
|
|
, mipLevels( mipLevels_ )
|
|
, arrayLayers( arrayLayers_ )
|
|
, samples( samples_ )
|
|
, tiling( tiling_ )
|
|
, usage( usage_ )
|
|
, sharingMode( sharingMode_ )
|
|
, queueFamilyIndexCount( queueFamilyIndexCount_ )
|
|
, pQueueFamilyIndices( pQueueFamilyIndices_ )
|
|
, initialLayout( initialLayout_ )
|
|
{
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR ImageCreateInfo( ImageCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
ImageCreateInfo( VkImageCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT : ImageCreateInfo( *reinterpret_cast<ImageCreateInfo const *>( &rhs ) ) {}
|
|
|
|
# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
|
|
ImageCreateInfo( VULKAN_HPP_NAMESPACE::ImageCreateFlags flags_,
|
|
VULKAN_HPP_NAMESPACE::ImageType imageType_,
|
|
VULKAN_HPP_NAMESPACE::Format format_,
|
|
VULKAN_HPP_NAMESPACE::Extent3D extent_,
|
|
uint32_t mipLevels_,
|
|
uint32_t arrayLayers_,
|
|
VULKAN_HPP_NAMESPACE::SampleCountFlagBits samples_,
|
|
VULKAN_HPP_NAMESPACE::ImageTiling tiling_,
|
|
VULKAN_HPP_NAMESPACE::ImageUsageFlags usage_,
|
|
VULKAN_HPP_NAMESPACE::SharingMode sharingMode_,
|
|
VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const uint32_t> const & queueFamilyIndices_,
|
|
VULKAN_HPP_NAMESPACE::ImageLayout initialLayout_ = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined,
|
|
const void * pNext_ = nullptr )
|
|
: pNext( pNext_ )
|
|
, flags( flags_ )
|
|
, imageType( imageType_ )
|
|
, format( format_ )
|
|
, extent( extent_ )
|
|
, mipLevels( mipLevels_ )
|
|
, arrayLayers( arrayLayers_ )
|
|
, samples( samples_ )
|
|
, tiling( tiling_ )
|
|
, usage( usage_ )
|
|
, sharingMode( sharingMode_ )
|
|
, queueFamilyIndexCount( static_cast<uint32_t>( queueFamilyIndices_.size() ) )
|
|
, pQueueFamilyIndices( queueFamilyIndices_.data() )
|
|
, initialLayout( initialLayout_ )
|
|
{
|
|
}
|
|
# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
|
|
|
|
ImageCreateInfo & operator=( ImageCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
ImageCreateInfo & operator=( VkImageCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ImageCreateInfo const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
|
|
VULKAN_HPP_CONSTEXPR_14 ImageCreateInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pNext = pNext_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 ImageCreateInfo & setFlags( VULKAN_HPP_NAMESPACE::ImageCreateFlags flags_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
flags = flags_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 ImageCreateInfo & setImageType( VULKAN_HPP_NAMESPACE::ImageType imageType_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
imageType = imageType_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 ImageCreateInfo & setFormat( VULKAN_HPP_NAMESPACE::Format format_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
format = format_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 ImageCreateInfo & setExtent( VULKAN_HPP_NAMESPACE::Extent3D const & extent_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
extent = extent_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 ImageCreateInfo & setMipLevels( uint32_t mipLevels_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
mipLevels = mipLevels_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 ImageCreateInfo & setArrayLayers( uint32_t arrayLayers_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
arrayLayers = arrayLayers_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 ImageCreateInfo & setSamples( VULKAN_HPP_NAMESPACE::SampleCountFlagBits samples_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
samples = samples_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 ImageCreateInfo & setTiling( VULKAN_HPP_NAMESPACE::ImageTiling tiling_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
tiling = tiling_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 ImageCreateInfo & setUsage( VULKAN_HPP_NAMESPACE::ImageUsageFlags usage_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
usage = usage_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 ImageCreateInfo & setSharingMode( VULKAN_HPP_NAMESPACE::SharingMode sharingMode_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
sharingMode = sharingMode_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 ImageCreateInfo & setQueueFamilyIndexCount( uint32_t queueFamilyIndexCount_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
queueFamilyIndexCount = queueFamilyIndexCount_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 ImageCreateInfo & setPQueueFamilyIndices( const uint32_t * pQueueFamilyIndices_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pQueueFamilyIndices = pQueueFamilyIndices_;
|
|
return *this;
|
|
}
|
|
|
|
# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
|
|
ImageCreateInfo & setQueueFamilyIndices( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const uint32_t> const & queueFamilyIndices_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
queueFamilyIndexCount = static_cast<uint32_t>( queueFamilyIndices_.size() );
|
|
pQueueFamilyIndices = queueFamilyIndices_.data();
|
|
return *this;
|
|
}
|
|
# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 ImageCreateInfo & setInitialLayout( VULKAN_HPP_NAMESPACE::ImageLayout initialLayout_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
initialLayout = initialLayout_;
|
|
return *this;
|
|
}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
|
|
|
|
operator VkImageCreateInfo const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkImageCreateInfo *>( this );
|
|
}
|
|
|
|
operator VkImageCreateInfo &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkImageCreateInfo *>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
# if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
# else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &,
|
|
const void * const &,
|
|
VULKAN_HPP_NAMESPACE::ImageCreateFlags const &,
|
|
VULKAN_HPP_NAMESPACE::ImageType const &,
|
|
VULKAN_HPP_NAMESPACE::Format const &,
|
|
VULKAN_HPP_NAMESPACE::Extent3D const &,
|
|
uint32_t const &,
|
|
uint32_t const &,
|
|
VULKAN_HPP_NAMESPACE::SampleCountFlagBits const &,
|
|
VULKAN_HPP_NAMESPACE::ImageTiling const &,
|
|
VULKAN_HPP_NAMESPACE::ImageUsageFlags const &,
|
|
VULKAN_HPP_NAMESPACE::SharingMode const &,
|
|
uint32_t const &,
|
|
const uint32_t * const &,
|
|
VULKAN_HPP_NAMESPACE::ImageLayout const &>
|
|
# endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( sType,
|
|
pNext,
|
|
flags,
|
|
imageType,
|
|
format,
|
|
extent,
|
|
mipLevels,
|
|
arrayLayers,
|
|
samples,
|
|
tiling,
|
|
usage,
|
|
sharingMode,
|
|
queueFamilyIndexCount,
|
|
pQueueFamilyIndices,
|
|
initialLayout );
|
|
}
|
|
#endif
|
|
|
|
#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
|
|
auto operator<=>( ImageCreateInfo const & ) const = default;
|
|
#else
|
|
bool operator==( ImageCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
# if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
# else
|
|
return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && ( imageType == rhs.imageType ) && ( format == rhs.format ) &&
|
|
( extent == rhs.extent ) && ( mipLevels == rhs.mipLevels ) && ( arrayLayers == rhs.arrayLayers ) && ( samples == rhs.samples ) &&
|
|
( tiling == rhs.tiling ) && ( usage == rhs.usage ) && ( sharingMode == rhs.sharingMode ) &&
|
|
( queueFamilyIndexCount == rhs.queueFamilyIndexCount ) && ( pQueueFamilyIndices == rhs.pQueueFamilyIndices ) &&
|
|
( initialLayout == rhs.initialLayout );
|
|
# endif
|
|
}
|
|
|
|
bool operator!=( ImageCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eImageCreateInfo;
|
|
const void * pNext = {};
|
|
VULKAN_HPP_NAMESPACE::ImageCreateFlags flags = {};
|
|
VULKAN_HPP_NAMESPACE::ImageType imageType = VULKAN_HPP_NAMESPACE::ImageType::e1D;
|
|
VULKAN_HPP_NAMESPACE::Format format = VULKAN_HPP_NAMESPACE::Format::eUndefined;
|
|
VULKAN_HPP_NAMESPACE::Extent3D extent = {};
|
|
uint32_t mipLevels = {};
|
|
uint32_t arrayLayers = {};
|
|
VULKAN_HPP_NAMESPACE::SampleCountFlagBits samples = VULKAN_HPP_NAMESPACE::SampleCountFlagBits::e1;
|
|
VULKAN_HPP_NAMESPACE::ImageTiling tiling = VULKAN_HPP_NAMESPACE::ImageTiling::eOptimal;
|
|
VULKAN_HPP_NAMESPACE::ImageUsageFlags usage = {};
|
|
VULKAN_HPP_NAMESPACE::SharingMode sharingMode = VULKAN_HPP_NAMESPACE::SharingMode::eExclusive;
|
|
uint32_t queueFamilyIndexCount = {};
|
|
const uint32_t * pQueueFamilyIndices = {};
|
|
VULKAN_HPP_NAMESPACE::ImageLayout initialLayout = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined;
|
|
};
|
|
|
|
template <>
|
|
struct CppType<StructureType, StructureType::eImageCreateInfo>
|
|
{
|
|
using Type = ImageCreateInfo;
|
|
};
|
|
|
|
struct DeviceImageMemoryRequirements
|
|
{
|
|
using NativeType = VkDeviceImageMemoryRequirements;
|
|
|
|
static const bool allowDuplicate = false;
|
|
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDeviceImageMemoryRequirements;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR
|
|
DeviceImageMemoryRequirements( const VULKAN_HPP_NAMESPACE::ImageCreateInfo * pCreateInfo_ = {},
|
|
VULKAN_HPP_NAMESPACE::ImageAspectFlagBits planeAspect_ = VULKAN_HPP_NAMESPACE::ImageAspectFlagBits::eColor,
|
|
const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
|
|
: pNext( pNext_ )
|
|
, pCreateInfo( pCreateInfo_ )
|
|
, planeAspect( planeAspect_ )
|
|
{
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR DeviceImageMemoryRequirements( DeviceImageMemoryRequirements const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
DeviceImageMemoryRequirements( VkDeviceImageMemoryRequirements const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: DeviceImageMemoryRequirements( *reinterpret_cast<DeviceImageMemoryRequirements const *>( &rhs ) )
|
|
{
|
|
}
|
|
|
|
DeviceImageMemoryRequirements & operator=( DeviceImageMemoryRequirements const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
DeviceImageMemoryRequirements & operator=( VkDeviceImageMemoryRequirements const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::DeviceImageMemoryRequirements const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
|
|
VULKAN_HPP_CONSTEXPR_14 DeviceImageMemoryRequirements & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pNext = pNext_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 DeviceImageMemoryRequirements & setPCreateInfo( const VULKAN_HPP_NAMESPACE::ImageCreateInfo * pCreateInfo_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pCreateInfo = pCreateInfo_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 DeviceImageMemoryRequirements & setPlaneAspect( VULKAN_HPP_NAMESPACE::ImageAspectFlagBits planeAspect_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
planeAspect = planeAspect_;
|
|
return *this;
|
|
}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
|
|
|
|
operator VkDeviceImageMemoryRequirements const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkDeviceImageMemoryRequirements *>( this );
|
|
}
|
|
|
|
operator VkDeviceImageMemoryRequirements &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkDeviceImageMemoryRequirements *>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
# if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
# else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &,
|
|
const void * const &,
|
|
const VULKAN_HPP_NAMESPACE::ImageCreateInfo * const &,
|
|
VULKAN_HPP_NAMESPACE::ImageAspectFlagBits const &>
|
|
# endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( sType, pNext, pCreateInfo, planeAspect );
|
|
}
|
|
#endif
|
|
|
|
#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
|
|
auto operator<=>( DeviceImageMemoryRequirements const & ) const = default;
|
|
#else
|
|
bool operator==( DeviceImageMemoryRequirements const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
# if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
# else
|
|
return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( pCreateInfo == rhs.pCreateInfo ) && ( planeAspect == rhs.planeAspect );
|
|
# endif
|
|
}
|
|
|
|
bool operator!=( DeviceImageMemoryRequirements const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDeviceImageMemoryRequirements;
|
|
const void * pNext = {};
|
|
const VULKAN_HPP_NAMESPACE::ImageCreateInfo * pCreateInfo = {};
|
|
VULKAN_HPP_NAMESPACE::ImageAspectFlagBits planeAspect = VULKAN_HPP_NAMESPACE::ImageAspectFlagBits::eColor;
|
|
};
|
|
|
|
template <>
|
|
struct CppType<StructureType, StructureType::eDeviceImageMemoryRequirements>
|
|
{
|
|
using Type = DeviceImageMemoryRequirements;
|
|
};
|
|
|
|
using DeviceImageMemoryRequirementsKHR = DeviceImageMemoryRequirements;
|
|
|
|
struct DeviceMemoryOpaqueCaptureAddressInfo
|
|
{
|
|
using NativeType = VkDeviceMemoryOpaqueCaptureAddressInfo;
|
|
|
|
static const bool allowDuplicate = false;
|
|
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDeviceMemoryOpaqueCaptureAddressInfo;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR DeviceMemoryOpaqueCaptureAddressInfo( VULKAN_HPP_NAMESPACE::DeviceMemory memory_ = {},
|
|
const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
|
|
: pNext( pNext_ )
|
|
, memory( memory_ )
|
|
{
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR DeviceMemoryOpaqueCaptureAddressInfo( DeviceMemoryOpaqueCaptureAddressInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
DeviceMemoryOpaqueCaptureAddressInfo( VkDeviceMemoryOpaqueCaptureAddressInfo const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: DeviceMemoryOpaqueCaptureAddressInfo( *reinterpret_cast<DeviceMemoryOpaqueCaptureAddressInfo const *>( &rhs ) )
|
|
{
|
|
}
|
|
|
|
DeviceMemoryOpaqueCaptureAddressInfo & operator=( DeviceMemoryOpaqueCaptureAddressInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
DeviceMemoryOpaqueCaptureAddressInfo & operator=( VkDeviceMemoryOpaqueCaptureAddressInfo const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::DeviceMemoryOpaqueCaptureAddressInfo const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
|
|
VULKAN_HPP_CONSTEXPR_14 DeviceMemoryOpaqueCaptureAddressInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pNext = pNext_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 DeviceMemoryOpaqueCaptureAddressInfo & setMemory( VULKAN_HPP_NAMESPACE::DeviceMemory memory_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
memory = memory_;
|
|
return *this;
|
|
}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
|
|
|
|
operator VkDeviceMemoryOpaqueCaptureAddressInfo const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkDeviceMemoryOpaqueCaptureAddressInfo *>( this );
|
|
}
|
|
|
|
operator VkDeviceMemoryOpaqueCaptureAddressInfo &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkDeviceMemoryOpaqueCaptureAddressInfo *>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
# if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
# else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::DeviceMemory const &>
|
|
# endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( sType, pNext, memory );
|
|
}
|
|
#endif
|
|
|
|
#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
|
|
auto operator<=>( DeviceMemoryOpaqueCaptureAddressInfo const & ) const = default;
|
|
#else
|
|
bool operator==( DeviceMemoryOpaqueCaptureAddressInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
# if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
# else
|
|
return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( memory == rhs.memory );
|
|
# endif
|
|
}
|
|
|
|
bool operator!=( DeviceMemoryOpaqueCaptureAddressInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDeviceMemoryOpaqueCaptureAddressInfo;
|
|
const void * pNext = {};
|
|
VULKAN_HPP_NAMESPACE::DeviceMemory memory = {};
|
|
};
|
|
|
|
template <>
|
|
struct CppType<StructureType, StructureType::eDeviceMemoryOpaqueCaptureAddressInfo>
|
|
{
|
|
using Type = DeviceMemoryOpaqueCaptureAddressInfo;
|
|
};
|
|
|
|
using DeviceMemoryOpaqueCaptureAddressInfoKHR = DeviceMemoryOpaqueCaptureAddressInfo;
|
|
|
|
struct PipelineCacheCreateInfo
|
|
{
|
|
using NativeType = VkPipelineCacheCreateInfo;
|
|
|
|
static const bool allowDuplicate = false;
|
|
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePipelineCacheCreateInfo;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR PipelineCacheCreateInfo( VULKAN_HPP_NAMESPACE::PipelineCacheCreateFlags flags_ = {},
|
|
size_t initialDataSize_ = {},
|
|
const void * pInitialData_ = {},
|
|
const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
|
|
: pNext( pNext_ )
|
|
, flags( flags_ )
|
|
, initialDataSize( initialDataSize_ )
|
|
, pInitialData( pInitialData_ )
|
|
{
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR PipelineCacheCreateInfo( PipelineCacheCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
PipelineCacheCreateInfo( VkPipelineCacheCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: PipelineCacheCreateInfo( *reinterpret_cast<PipelineCacheCreateInfo const *>( &rhs ) )
|
|
{
|
|
}
|
|
|
|
# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
|
|
template <typename T>
|
|
PipelineCacheCreateInfo( VULKAN_HPP_NAMESPACE::PipelineCacheCreateFlags flags_,
|
|
VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const T> const & initialData_,
|
|
const void * pNext_ = nullptr )
|
|
: pNext( pNext_ ), flags( flags_ ), initialDataSize( initialData_.size() * sizeof( T ) ), pInitialData( initialData_.data() )
|
|
{
|
|
}
|
|
# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
|
|
|
|
PipelineCacheCreateInfo & operator=( PipelineCacheCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
PipelineCacheCreateInfo & operator=( VkPipelineCacheCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PipelineCacheCreateInfo const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
|
|
VULKAN_HPP_CONSTEXPR_14 PipelineCacheCreateInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pNext = pNext_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PipelineCacheCreateInfo & setFlags( VULKAN_HPP_NAMESPACE::PipelineCacheCreateFlags flags_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
flags = flags_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PipelineCacheCreateInfo & setInitialDataSize( size_t initialDataSize_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
initialDataSize = initialDataSize_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PipelineCacheCreateInfo & setPInitialData( const void * pInitialData_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pInitialData = pInitialData_;
|
|
return *this;
|
|
}
|
|
|
|
# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
|
|
template <typename T>
|
|
PipelineCacheCreateInfo & setInitialData( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const T> const & initialData_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
initialDataSize = initialData_.size() * sizeof( T );
|
|
pInitialData = initialData_.data();
|
|
return *this;
|
|
}
|
|
# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
|
|
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
|
|
|
|
operator VkPipelineCacheCreateInfo const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkPipelineCacheCreateInfo *>( this );
|
|
}
|
|
|
|
operator VkPipelineCacheCreateInfo &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkPipelineCacheCreateInfo *>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
# if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
# else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &,
|
|
const void * const &,
|
|
VULKAN_HPP_NAMESPACE::PipelineCacheCreateFlags const &,
|
|
size_t const &,
|
|
const void * const &>
|
|
# endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( sType, pNext, flags, initialDataSize, pInitialData );
|
|
}
|
|
#endif
|
|
|
|
#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
|
|
auto operator<=>( PipelineCacheCreateInfo const & ) const = default;
|
|
#else
|
|
bool operator==( PipelineCacheCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
# if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
# else
|
|
return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && ( initialDataSize == rhs.initialDataSize ) &&
|
|
( pInitialData == rhs.pInitialData );
|
|
# endif
|
|
}
|
|
|
|
bool operator!=( PipelineCacheCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineCacheCreateInfo;
|
|
const void * pNext = {};
|
|
VULKAN_HPP_NAMESPACE::PipelineCacheCreateFlags flags = {};
|
|
size_t initialDataSize = {};
|
|
const void * pInitialData = {};
|
|
};
|
|
|
|
template <>
|
|
struct CppType<StructureType, StructureType::ePipelineCacheCreateInfo>
|
|
{
|
|
using Type = PipelineCacheCreateInfo;
|
|
};
|
|
|
|
struct PipelinePoolSize
|
|
{
|
|
using NativeType = VkPipelinePoolSize;
|
|
|
|
static const bool allowDuplicate = false;
|
|
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePipelinePoolSize;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR
|
|
PipelinePoolSize( VULKAN_HPP_NAMESPACE::DeviceSize poolEntrySize_ = {}, uint32_t poolEntryCount_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
|
|
: pNext( pNext_ )
|
|
, poolEntrySize( poolEntrySize_ )
|
|
, poolEntryCount( poolEntryCount_ )
|
|
{
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR PipelinePoolSize( PipelinePoolSize const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
PipelinePoolSize( VkPipelinePoolSize const & rhs ) VULKAN_HPP_NOEXCEPT : PipelinePoolSize( *reinterpret_cast<PipelinePoolSize const *>( &rhs ) ) {}
|
|
|
|
PipelinePoolSize & operator=( PipelinePoolSize const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
PipelinePoolSize & operator=( VkPipelinePoolSize const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PipelinePoolSize const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
|
|
VULKAN_HPP_CONSTEXPR_14 PipelinePoolSize & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pNext = pNext_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PipelinePoolSize & setPoolEntrySize( VULKAN_HPP_NAMESPACE::DeviceSize poolEntrySize_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
poolEntrySize = poolEntrySize_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PipelinePoolSize & setPoolEntryCount( uint32_t poolEntryCount_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
poolEntryCount = poolEntryCount_;
|
|
return *this;
|
|
}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
|
|
|
|
operator VkPipelinePoolSize const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkPipelinePoolSize *>( this );
|
|
}
|
|
|
|
operator VkPipelinePoolSize &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkPipelinePoolSize *>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
# if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
# else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::DeviceSize const &, uint32_t const &>
|
|
# endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( sType, pNext, poolEntrySize, poolEntryCount );
|
|
}
|
|
#endif
|
|
|
|
#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
|
|
auto operator<=>( PipelinePoolSize const & ) const = default;
|
|
#else
|
|
bool operator==( PipelinePoolSize const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
# if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
# else
|
|
return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( poolEntrySize == rhs.poolEntrySize ) && ( poolEntryCount == rhs.poolEntryCount );
|
|
# endif
|
|
}
|
|
|
|
bool operator!=( PipelinePoolSize const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelinePoolSize;
|
|
const void * pNext = {};
|
|
VULKAN_HPP_NAMESPACE::DeviceSize poolEntrySize = {};
|
|
uint32_t poolEntryCount = {};
|
|
};
|
|
|
|
template <>
|
|
struct CppType<StructureType, StructureType::ePipelinePoolSize>
|
|
{
|
|
using Type = PipelinePoolSize;
|
|
};
|
|
|
|
struct DeviceObjectReservationCreateInfo
|
|
{
|
|
using NativeType = VkDeviceObjectReservationCreateInfo;
|
|
|
|
static const bool allowDuplicate = true;
|
|
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDeviceObjectReservationCreateInfo;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR DeviceObjectReservationCreateInfo( uint32_t pipelineCacheCreateInfoCount_ = {},
|
|
const VULKAN_HPP_NAMESPACE::PipelineCacheCreateInfo * pPipelineCacheCreateInfos_ = {},
|
|
uint32_t pipelinePoolSizeCount_ = {},
|
|
const VULKAN_HPP_NAMESPACE::PipelinePoolSize * pPipelinePoolSizes_ = {},
|
|
uint32_t semaphoreRequestCount_ = {},
|
|
uint32_t commandBufferRequestCount_ = {},
|
|
uint32_t fenceRequestCount_ = {},
|
|
uint32_t deviceMemoryRequestCount_ = {},
|
|
uint32_t bufferRequestCount_ = {},
|
|
uint32_t imageRequestCount_ = {},
|
|
uint32_t eventRequestCount_ = {},
|
|
uint32_t queryPoolRequestCount_ = {},
|
|
uint32_t bufferViewRequestCount_ = {},
|
|
uint32_t imageViewRequestCount_ = {},
|
|
uint32_t layeredImageViewRequestCount_ = {},
|
|
uint32_t pipelineCacheRequestCount_ = {},
|
|
uint32_t pipelineLayoutRequestCount_ = {},
|
|
uint32_t renderPassRequestCount_ = {},
|
|
uint32_t graphicsPipelineRequestCount_ = {},
|
|
uint32_t computePipelineRequestCount_ = {},
|
|
uint32_t descriptorSetLayoutRequestCount_ = {},
|
|
uint32_t samplerRequestCount_ = {},
|
|
uint32_t descriptorPoolRequestCount_ = {},
|
|
uint32_t descriptorSetRequestCount_ = {},
|
|
uint32_t framebufferRequestCount_ = {},
|
|
uint32_t commandPoolRequestCount_ = {},
|
|
uint32_t samplerYcbcrConversionRequestCount_ = {},
|
|
uint32_t surfaceRequestCount_ = {},
|
|
uint32_t swapchainRequestCount_ = {},
|
|
uint32_t displayModeRequestCount_ = {},
|
|
uint32_t subpassDescriptionRequestCount_ = {},
|
|
uint32_t attachmentDescriptionRequestCount_ = {},
|
|
uint32_t descriptorSetLayoutBindingRequestCount_ = {},
|
|
uint32_t descriptorSetLayoutBindingLimit_ = {},
|
|
uint32_t maxImageViewMipLevels_ = {},
|
|
uint32_t maxImageViewArrayLayers_ = {},
|
|
uint32_t maxLayeredImageViewMipLevels_ = {},
|
|
uint32_t maxOcclusionQueriesPerPool_ = {},
|
|
uint32_t maxPipelineStatisticsQueriesPerPool_ = {},
|
|
uint32_t maxTimestampQueriesPerPool_ = {},
|
|
uint32_t maxImmutableSamplersPerDescriptorSetLayout_ = {},
|
|
const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
|
|
: pNext( pNext_ )
|
|
, pipelineCacheCreateInfoCount( pipelineCacheCreateInfoCount_ )
|
|
, pPipelineCacheCreateInfos( pPipelineCacheCreateInfos_ )
|
|
, pipelinePoolSizeCount( pipelinePoolSizeCount_ )
|
|
, pPipelinePoolSizes( pPipelinePoolSizes_ )
|
|
, semaphoreRequestCount( semaphoreRequestCount_ )
|
|
, commandBufferRequestCount( commandBufferRequestCount_ )
|
|
, fenceRequestCount( fenceRequestCount_ )
|
|
, deviceMemoryRequestCount( deviceMemoryRequestCount_ )
|
|
, bufferRequestCount( bufferRequestCount_ )
|
|
, imageRequestCount( imageRequestCount_ )
|
|
, eventRequestCount( eventRequestCount_ )
|
|
, queryPoolRequestCount( queryPoolRequestCount_ )
|
|
, bufferViewRequestCount( bufferViewRequestCount_ )
|
|
, imageViewRequestCount( imageViewRequestCount_ )
|
|
, layeredImageViewRequestCount( layeredImageViewRequestCount_ )
|
|
, pipelineCacheRequestCount( pipelineCacheRequestCount_ )
|
|
, pipelineLayoutRequestCount( pipelineLayoutRequestCount_ )
|
|
, renderPassRequestCount( renderPassRequestCount_ )
|
|
, graphicsPipelineRequestCount( graphicsPipelineRequestCount_ )
|
|
, computePipelineRequestCount( computePipelineRequestCount_ )
|
|
, descriptorSetLayoutRequestCount( descriptorSetLayoutRequestCount_ )
|
|
, samplerRequestCount( samplerRequestCount_ )
|
|
, descriptorPoolRequestCount( descriptorPoolRequestCount_ )
|
|
, descriptorSetRequestCount( descriptorSetRequestCount_ )
|
|
, framebufferRequestCount( framebufferRequestCount_ )
|
|
, commandPoolRequestCount( commandPoolRequestCount_ )
|
|
, samplerYcbcrConversionRequestCount( samplerYcbcrConversionRequestCount_ )
|
|
, surfaceRequestCount( surfaceRequestCount_ )
|
|
, swapchainRequestCount( swapchainRequestCount_ )
|
|
, displayModeRequestCount( displayModeRequestCount_ )
|
|
, subpassDescriptionRequestCount( subpassDescriptionRequestCount_ )
|
|
, attachmentDescriptionRequestCount( attachmentDescriptionRequestCount_ )
|
|
, descriptorSetLayoutBindingRequestCount( descriptorSetLayoutBindingRequestCount_ )
|
|
, descriptorSetLayoutBindingLimit( descriptorSetLayoutBindingLimit_ )
|
|
, maxImageViewMipLevels( maxImageViewMipLevels_ )
|
|
, maxImageViewArrayLayers( maxImageViewArrayLayers_ )
|
|
, maxLayeredImageViewMipLevels( maxLayeredImageViewMipLevels_ )
|
|
, maxOcclusionQueriesPerPool( maxOcclusionQueriesPerPool_ )
|
|
, maxPipelineStatisticsQueriesPerPool( maxPipelineStatisticsQueriesPerPool_ )
|
|
, maxTimestampQueriesPerPool( maxTimestampQueriesPerPool_ )
|
|
, maxImmutableSamplersPerDescriptorSetLayout( maxImmutableSamplersPerDescriptorSetLayout_ )
|
|
{
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR DeviceObjectReservationCreateInfo( DeviceObjectReservationCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
DeviceObjectReservationCreateInfo( VkDeviceObjectReservationCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: DeviceObjectReservationCreateInfo( *reinterpret_cast<DeviceObjectReservationCreateInfo const *>( &rhs ) )
|
|
{
|
|
}
|
|
|
|
# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
|
|
DeviceObjectReservationCreateInfo(
|
|
VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::PipelineCacheCreateInfo> const & pipelineCacheCreateInfos_,
|
|
VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::PipelinePoolSize> const & pipelinePoolSizes_ = {},
|
|
uint32_t semaphoreRequestCount_ = {},
|
|
uint32_t commandBufferRequestCount_ = {},
|
|
uint32_t fenceRequestCount_ = {},
|
|
uint32_t deviceMemoryRequestCount_ = {},
|
|
uint32_t bufferRequestCount_ = {},
|
|
uint32_t imageRequestCount_ = {},
|
|
uint32_t eventRequestCount_ = {},
|
|
uint32_t queryPoolRequestCount_ = {},
|
|
uint32_t bufferViewRequestCount_ = {},
|
|
uint32_t imageViewRequestCount_ = {},
|
|
uint32_t layeredImageViewRequestCount_ = {},
|
|
uint32_t pipelineCacheRequestCount_ = {},
|
|
uint32_t pipelineLayoutRequestCount_ = {},
|
|
uint32_t renderPassRequestCount_ = {},
|
|
uint32_t graphicsPipelineRequestCount_ = {},
|
|
uint32_t computePipelineRequestCount_ = {},
|
|
uint32_t descriptorSetLayoutRequestCount_ = {},
|
|
uint32_t samplerRequestCount_ = {},
|
|
uint32_t descriptorPoolRequestCount_ = {},
|
|
uint32_t descriptorSetRequestCount_ = {},
|
|
uint32_t framebufferRequestCount_ = {},
|
|
uint32_t commandPoolRequestCount_ = {},
|
|
uint32_t samplerYcbcrConversionRequestCount_ = {},
|
|
uint32_t surfaceRequestCount_ = {},
|
|
uint32_t swapchainRequestCount_ = {},
|
|
uint32_t displayModeRequestCount_ = {},
|
|
uint32_t subpassDescriptionRequestCount_ = {},
|
|
uint32_t attachmentDescriptionRequestCount_ = {},
|
|
uint32_t descriptorSetLayoutBindingRequestCount_ = {},
|
|
uint32_t descriptorSetLayoutBindingLimit_ = {},
|
|
uint32_t maxImageViewMipLevels_ = {},
|
|
uint32_t maxImageViewArrayLayers_ = {},
|
|
uint32_t maxLayeredImageViewMipLevels_ = {},
|
|
uint32_t maxOcclusionQueriesPerPool_ = {},
|
|
uint32_t maxPipelineStatisticsQueriesPerPool_ = {},
|
|
uint32_t maxTimestampQueriesPerPool_ = {},
|
|
uint32_t maxImmutableSamplersPerDescriptorSetLayout_ = {},
|
|
const void * pNext_ = nullptr )
|
|
: pNext( pNext_ )
|
|
, pipelineCacheCreateInfoCount( static_cast<uint32_t>( pipelineCacheCreateInfos_.size() ) )
|
|
, pPipelineCacheCreateInfos( pipelineCacheCreateInfos_.data() )
|
|
, pipelinePoolSizeCount( static_cast<uint32_t>( pipelinePoolSizes_.size() ) )
|
|
, pPipelinePoolSizes( pipelinePoolSizes_.data() )
|
|
, semaphoreRequestCount( semaphoreRequestCount_ )
|
|
, commandBufferRequestCount( commandBufferRequestCount_ )
|
|
, fenceRequestCount( fenceRequestCount_ )
|
|
, deviceMemoryRequestCount( deviceMemoryRequestCount_ )
|
|
, bufferRequestCount( bufferRequestCount_ )
|
|
, imageRequestCount( imageRequestCount_ )
|
|
, eventRequestCount( eventRequestCount_ )
|
|
, queryPoolRequestCount( queryPoolRequestCount_ )
|
|
, bufferViewRequestCount( bufferViewRequestCount_ )
|
|
, imageViewRequestCount( imageViewRequestCount_ )
|
|
, layeredImageViewRequestCount( layeredImageViewRequestCount_ )
|
|
, pipelineCacheRequestCount( pipelineCacheRequestCount_ )
|
|
, pipelineLayoutRequestCount( pipelineLayoutRequestCount_ )
|
|
, renderPassRequestCount( renderPassRequestCount_ )
|
|
, graphicsPipelineRequestCount( graphicsPipelineRequestCount_ )
|
|
, computePipelineRequestCount( computePipelineRequestCount_ )
|
|
, descriptorSetLayoutRequestCount( descriptorSetLayoutRequestCount_ )
|
|
, samplerRequestCount( samplerRequestCount_ )
|
|
, descriptorPoolRequestCount( descriptorPoolRequestCount_ )
|
|
, descriptorSetRequestCount( descriptorSetRequestCount_ )
|
|
, framebufferRequestCount( framebufferRequestCount_ )
|
|
, commandPoolRequestCount( commandPoolRequestCount_ )
|
|
, samplerYcbcrConversionRequestCount( samplerYcbcrConversionRequestCount_ )
|
|
, surfaceRequestCount( surfaceRequestCount_ )
|
|
, swapchainRequestCount( swapchainRequestCount_ )
|
|
, displayModeRequestCount( displayModeRequestCount_ )
|
|
, subpassDescriptionRequestCount( subpassDescriptionRequestCount_ )
|
|
, attachmentDescriptionRequestCount( attachmentDescriptionRequestCount_ )
|
|
, descriptorSetLayoutBindingRequestCount( descriptorSetLayoutBindingRequestCount_ )
|
|
, descriptorSetLayoutBindingLimit( descriptorSetLayoutBindingLimit_ )
|
|
, maxImageViewMipLevels( maxImageViewMipLevels_ )
|
|
, maxImageViewArrayLayers( maxImageViewArrayLayers_ )
|
|
, maxLayeredImageViewMipLevels( maxLayeredImageViewMipLevels_ )
|
|
, maxOcclusionQueriesPerPool( maxOcclusionQueriesPerPool_ )
|
|
, maxPipelineStatisticsQueriesPerPool( maxPipelineStatisticsQueriesPerPool_ )
|
|
, maxTimestampQueriesPerPool( maxTimestampQueriesPerPool_ )
|
|
, maxImmutableSamplersPerDescriptorSetLayout( maxImmutableSamplersPerDescriptorSetLayout_ )
|
|
{
|
|
}
|
|
# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
|
|
|
|
DeviceObjectReservationCreateInfo & operator=( DeviceObjectReservationCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
DeviceObjectReservationCreateInfo & operator=( VkDeviceObjectReservationCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::DeviceObjectReservationCreateInfo const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
|
|
VULKAN_HPP_CONSTEXPR_14 DeviceObjectReservationCreateInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pNext = pNext_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 DeviceObjectReservationCreateInfo & setPipelineCacheCreateInfoCount( uint32_t pipelineCacheCreateInfoCount_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pipelineCacheCreateInfoCount = pipelineCacheCreateInfoCount_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 DeviceObjectReservationCreateInfo &
|
|
setPPipelineCacheCreateInfos( const VULKAN_HPP_NAMESPACE::PipelineCacheCreateInfo * pPipelineCacheCreateInfos_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pPipelineCacheCreateInfos = pPipelineCacheCreateInfos_;
|
|
return *this;
|
|
}
|
|
|
|
# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
|
|
DeviceObjectReservationCreateInfo & setPipelineCacheCreateInfos(
|
|
VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::PipelineCacheCreateInfo> const & pipelineCacheCreateInfos_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pipelineCacheCreateInfoCount = static_cast<uint32_t>( pipelineCacheCreateInfos_.size() );
|
|
pPipelineCacheCreateInfos = pipelineCacheCreateInfos_.data();
|
|
return *this;
|
|
}
|
|
# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 DeviceObjectReservationCreateInfo & setPipelinePoolSizeCount( uint32_t pipelinePoolSizeCount_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pipelinePoolSizeCount = pipelinePoolSizeCount_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 DeviceObjectReservationCreateInfo &
|
|
setPPipelinePoolSizes( const VULKAN_HPP_NAMESPACE::PipelinePoolSize * pPipelinePoolSizes_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pPipelinePoolSizes = pPipelinePoolSizes_;
|
|
return *this;
|
|
}
|
|
|
|
# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
|
|
DeviceObjectReservationCreateInfo & setPipelinePoolSizes(
|
|
VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::PipelinePoolSize> const & pipelinePoolSizes_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pipelinePoolSizeCount = static_cast<uint32_t>( pipelinePoolSizes_.size() );
|
|
pPipelinePoolSizes = pipelinePoolSizes_.data();
|
|
return *this;
|
|
}
|
|
# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 DeviceObjectReservationCreateInfo & setSemaphoreRequestCount( uint32_t semaphoreRequestCount_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
semaphoreRequestCount = semaphoreRequestCount_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 DeviceObjectReservationCreateInfo & setCommandBufferRequestCount( uint32_t commandBufferRequestCount_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
commandBufferRequestCount = commandBufferRequestCount_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 DeviceObjectReservationCreateInfo & setFenceRequestCount( uint32_t fenceRequestCount_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
fenceRequestCount = fenceRequestCount_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 DeviceObjectReservationCreateInfo & setDeviceMemoryRequestCount( uint32_t deviceMemoryRequestCount_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
deviceMemoryRequestCount = deviceMemoryRequestCount_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 DeviceObjectReservationCreateInfo & setBufferRequestCount( uint32_t bufferRequestCount_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
bufferRequestCount = bufferRequestCount_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 DeviceObjectReservationCreateInfo & setImageRequestCount( uint32_t imageRequestCount_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
imageRequestCount = imageRequestCount_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 DeviceObjectReservationCreateInfo & setEventRequestCount( uint32_t eventRequestCount_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
eventRequestCount = eventRequestCount_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 DeviceObjectReservationCreateInfo & setQueryPoolRequestCount( uint32_t queryPoolRequestCount_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
queryPoolRequestCount = queryPoolRequestCount_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 DeviceObjectReservationCreateInfo & setBufferViewRequestCount( uint32_t bufferViewRequestCount_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
bufferViewRequestCount = bufferViewRequestCount_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 DeviceObjectReservationCreateInfo & setImageViewRequestCount( uint32_t imageViewRequestCount_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
imageViewRequestCount = imageViewRequestCount_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 DeviceObjectReservationCreateInfo & setLayeredImageViewRequestCount( uint32_t layeredImageViewRequestCount_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
layeredImageViewRequestCount = layeredImageViewRequestCount_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 DeviceObjectReservationCreateInfo & setPipelineCacheRequestCount( uint32_t pipelineCacheRequestCount_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pipelineCacheRequestCount = pipelineCacheRequestCount_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 DeviceObjectReservationCreateInfo & setPipelineLayoutRequestCount( uint32_t pipelineLayoutRequestCount_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pipelineLayoutRequestCount = pipelineLayoutRequestCount_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 DeviceObjectReservationCreateInfo & setRenderPassRequestCount( uint32_t renderPassRequestCount_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
renderPassRequestCount = renderPassRequestCount_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 DeviceObjectReservationCreateInfo & setGraphicsPipelineRequestCount( uint32_t graphicsPipelineRequestCount_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
graphicsPipelineRequestCount = graphicsPipelineRequestCount_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 DeviceObjectReservationCreateInfo & setComputePipelineRequestCount( uint32_t computePipelineRequestCount_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
computePipelineRequestCount = computePipelineRequestCount_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 DeviceObjectReservationCreateInfo &
|
|
setDescriptorSetLayoutRequestCount( uint32_t descriptorSetLayoutRequestCount_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
descriptorSetLayoutRequestCount = descriptorSetLayoutRequestCount_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 DeviceObjectReservationCreateInfo & setSamplerRequestCount( uint32_t samplerRequestCount_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
samplerRequestCount = samplerRequestCount_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 DeviceObjectReservationCreateInfo & setDescriptorPoolRequestCount( uint32_t descriptorPoolRequestCount_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
descriptorPoolRequestCount = descriptorPoolRequestCount_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 DeviceObjectReservationCreateInfo & setDescriptorSetRequestCount( uint32_t descriptorSetRequestCount_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
descriptorSetRequestCount = descriptorSetRequestCount_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 DeviceObjectReservationCreateInfo & setFramebufferRequestCount( uint32_t framebufferRequestCount_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
framebufferRequestCount = framebufferRequestCount_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 DeviceObjectReservationCreateInfo & setCommandPoolRequestCount( uint32_t commandPoolRequestCount_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
commandPoolRequestCount = commandPoolRequestCount_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 DeviceObjectReservationCreateInfo &
|
|
setSamplerYcbcrConversionRequestCount( uint32_t samplerYcbcrConversionRequestCount_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
samplerYcbcrConversionRequestCount = samplerYcbcrConversionRequestCount_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 DeviceObjectReservationCreateInfo & setSurfaceRequestCount( uint32_t surfaceRequestCount_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
surfaceRequestCount = surfaceRequestCount_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 DeviceObjectReservationCreateInfo & setSwapchainRequestCount( uint32_t swapchainRequestCount_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
swapchainRequestCount = swapchainRequestCount_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 DeviceObjectReservationCreateInfo & setDisplayModeRequestCount( uint32_t displayModeRequestCount_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
displayModeRequestCount = displayModeRequestCount_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 DeviceObjectReservationCreateInfo &
|
|
setSubpassDescriptionRequestCount( uint32_t subpassDescriptionRequestCount_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
subpassDescriptionRequestCount = subpassDescriptionRequestCount_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 DeviceObjectReservationCreateInfo &
|
|
setAttachmentDescriptionRequestCount( uint32_t attachmentDescriptionRequestCount_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
attachmentDescriptionRequestCount = attachmentDescriptionRequestCount_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 DeviceObjectReservationCreateInfo &
|
|
setDescriptorSetLayoutBindingRequestCount( uint32_t descriptorSetLayoutBindingRequestCount_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
descriptorSetLayoutBindingRequestCount = descriptorSetLayoutBindingRequestCount_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 DeviceObjectReservationCreateInfo &
|
|
setDescriptorSetLayoutBindingLimit( uint32_t descriptorSetLayoutBindingLimit_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
descriptorSetLayoutBindingLimit = descriptorSetLayoutBindingLimit_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 DeviceObjectReservationCreateInfo & setMaxImageViewMipLevels( uint32_t maxImageViewMipLevels_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
maxImageViewMipLevels = maxImageViewMipLevels_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 DeviceObjectReservationCreateInfo & setMaxImageViewArrayLayers( uint32_t maxImageViewArrayLayers_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
maxImageViewArrayLayers = maxImageViewArrayLayers_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 DeviceObjectReservationCreateInfo & setMaxLayeredImageViewMipLevels( uint32_t maxLayeredImageViewMipLevels_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
maxLayeredImageViewMipLevels = maxLayeredImageViewMipLevels_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 DeviceObjectReservationCreateInfo & setMaxOcclusionQueriesPerPool( uint32_t maxOcclusionQueriesPerPool_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
maxOcclusionQueriesPerPool = maxOcclusionQueriesPerPool_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 DeviceObjectReservationCreateInfo &
|
|
setMaxPipelineStatisticsQueriesPerPool( uint32_t maxPipelineStatisticsQueriesPerPool_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
maxPipelineStatisticsQueriesPerPool = maxPipelineStatisticsQueriesPerPool_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 DeviceObjectReservationCreateInfo & setMaxTimestampQueriesPerPool( uint32_t maxTimestampQueriesPerPool_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
maxTimestampQueriesPerPool = maxTimestampQueriesPerPool_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 DeviceObjectReservationCreateInfo &
|
|
setMaxImmutableSamplersPerDescriptorSetLayout( uint32_t maxImmutableSamplersPerDescriptorSetLayout_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
maxImmutableSamplersPerDescriptorSetLayout = maxImmutableSamplersPerDescriptorSetLayout_;
|
|
return *this;
|
|
}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
|
|
|
|
operator VkDeviceObjectReservationCreateInfo const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkDeviceObjectReservationCreateInfo *>( this );
|
|
}
|
|
|
|
operator VkDeviceObjectReservationCreateInfo &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkDeviceObjectReservationCreateInfo *>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
# if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
# else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &,
|
|
const void * const &,
|
|
uint32_t const &,
|
|
const VULKAN_HPP_NAMESPACE::PipelineCacheCreateInfo * const &,
|
|
uint32_t const &,
|
|
const VULKAN_HPP_NAMESPACE::PipelinePoolSize * const &,
|
|
uint32_t const &,
|
|
uint32_t const &,
|
|
uint32_t const &,
|
|
uint32_t const &,
|
|
uint32_t const &,
|
|
uint32_t const &,
|
|
uint32_t const &,
|
|
uint32_t const &,
|
|
uint32_t const &,
|
|
uint32_t const &,
|
|
uint32_t const &,
|
|
uint32_t const &,
|
|
uint32_t const &,
|
|
uint32_t const &,
|
|
uint32_t const &,
|
|
uint32_t const &,
|
|
uint32_t const &,
|
|
uint32_t const &,
|
|
uint32_t const &,
|
|
uint32_t const &,
|
|
uint32_t const &,
|
|
uint32_t const &,
|
|
uint32_t const &,
|
|
uint32_t const &,
|
|
uint32_t const &,
|
|
uint32_t const &,
|
|
uint32_t const &,
|
|
uint32_t const &,
|
|
uint32_t const &,
|
|
uint32_t const &,
|
|
uint32_t const &,
|
|
uint32_t const &,
|
|
uint32_t const &,
|
|
uint32_t const &,
|
|
uint32_t const &,
|
|
uint32_t const &,
|
|
uint32_t const &>
|
|
# endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( sType,
|
|
pNext,
|
|
pipelineCacheCreateInfoCount,
|
|
pPipelineCacheCreateInfos,
|
|
pipelinePoolSizeCount,
|
|
pPipelinePoolSizes,
|
|
semaphoreRequestCount,
|
|
commandBufferRequestCount,
|
|
fenceRequestCount,
|
|
deviceMemoryRequestCount,
|
|
bufferRequestCount,
|
|
imageRequestCount,
|
|
eventRequestCount,
|
|
queryPoolRequestCount,
|
|
bufferViewRequestCount,
|
|
imageViewRequestCount,
|
|
layeredImageViewRequestCount,
|
|
pipelineCacheRequestCount,
|
|
pipelineLayoutRequestCount,
|
|
renderPassRequestCount,
|
|
graphicsPipelineRequestCount,
|
|
computePipelineRequestCount,
|
|
descriptorSetLayoutRequestCount,
|
|
samplerRequestCount,
|
|
descriptorPoolRequestCount,
|
|
descriptorSetRequestCount,
|
|
framebufferRequestCount,
|
|
commandPoolRequestCount,
|
|
samplerYcbcrConversionRequestCount,
|
|
surfaceRequestCount,
|
|
swapchainRequestCount,
|
|
displayModeRequestCount,
|
|
subpassDescriptionRequestCount,
|
|
attachmentDescriptionRequestCount,
|
|
descriptorSetLayoutBindingRequestCount,
|
|
descriptorSetLayoutBindingLimit,
|
|
maxImageViewMipLevels,
|
|
maxImageViewArrayLayers,
|
|
maxLayeredImageViewMipLevels,
|
|
maxOcclusionQueriesPerPool,
|
|
maxPipelineStatisticsQueriesPerPool,
|
|
maxTimestampQueriesPerPool,
|
|
maxImmutableSamplersPerDescriptorSetLayout );
|
|
}
|
|
#endif
|
|
|
|
#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
|
|
auto operator<=>( DeviceObjectReservationCreateInfo const & ) const = default;
|
|
#else
|
|
bool operator==( DeviceObjectReservationCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
# if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
# else
|
|
return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( pipelineCacheCreateInfoCount == rhs.pipelineCacheCreateInfoCount ) &&
|
|
( pPipelineCacheCreateInfos == rhs.pPipelineCacheCreateInfos ) && ( pipelinePoolSizeCount == rhs.pipelinePoolSizeCount ) &&
|
|
( pPipelinePoolSizes == rhs.pPipelinePoolSizes ) && ( semaphoreRequestCount == rhs.semaphoreRequestCount ) &&
|
|
( commandBufferRequestCount == rhs.commandBufferRequestCount ) && ( fenceRequestCount == rhs.fenceRequestCount ) &&
|
|
( deviceMemoryRequestCount == rhs.deviceMemoryRequestCount ) && ( bufferRequestCount == rhs.bufferRequestCount ) &&
|
|
( imageRequestCount == rhs.imageRequestCount ) && ( eventRequestCount == rhs.eventRequestCount ) &&
|
|
( queryPoolRequestCount == rhs.queryPoolRequestCount ) && ( bufferViewRequestCount == rhs.bufferViewRequestCount ) &&
|
|
( imageViewRequestCount == rhs.imageViewRequestCount ) && ( layeredImageViewRequestCount == rhs.layeredImageViewRequestCount ) &&
|
|
( pipelineCacheRequestCount == rhs.pipelineCacheRequestCount ) && ( pipelineLayoutRequestCount == rhs.pipelineLayoutRequestCount ) &&
|
|
( renderPassRequestCount == rhs.renderPassRequestCount ) && ( graphicsPipelineRequestCount == rhs.graphicsPipelineRequestCount ) &&
|
|
( computePipelineRequestCount == rhs.computePipelineRequestCount ) && ( descriptorSetLayoutRequestCount == rhs.descriptorSetLayoutRequestCount ) &&
|
|
( samplerRequestCount == rhs.samplerRequestCount ) && ( descriptorPoolRequestCount == rhs.descriptorPoolRequestCount ) &&
|
|
( descriptorSetRequestCount == rhs.descriptorSetRequestCount ) && ( framebufferRequestCount == rhs.framebufferRequestCount ) &&
|
|
( commandPoolRequestCount == rhs.commandPoolRequestCount ) && ( samplerYcbcrConversionRequestCount == rhs.samplerYcbcrConversionRequestCount ) &&
|
|
( surfaceRequestCount == rhs.surfaceRequestCount ) && ( swapchainRequestCount == rhs.swapchainRequestCount ) &&
|
|
( displayModeRequestCount == rhs.displayModeRequestCount ) && ( subpassDescriptionRequestCount == rhs.subpassDescriptionRequestCount ) &&
|
|
( attachmentDescriptionRequestCount == rhs.attachmentDescriptionRequestCount ) &&
|
|
( descriptorSetLayoutBindingRequestCount == rhs.descriptorSetLayoutBindingRequestCount ) &&
|
|
( descriptorSetLayoutBindingLimit == rhs.descriptorSetLayoutBindingLimit ) && ( maxImageViewMipLevels == rhs.maxImageViewMipLevels ) &&
|
|
( maxImageViewArrayLayers == rhs.maxImageViewArrayLayers ) && ( maxLayeredImageViewMipLevels == rhs.maxLayeredImageViewMipLevels ) &&
|
|
( maxOcclusionQueriesPerPool == rhs.maxOcclusionQueriesPerPool ) &&
|
|
( maxPipelineStatisticsQueriesPerPool == rhs.maxPipelineStatisticsQueriesPerPool ) &&
|
|
( maxTimestampQueriesPerPool == rhs.maxTimestampQueriesPerPool ) &&
|
|
( maxImmutableSamplersPerDescriptorSetLayout == rhs.maxImmutableSamplersPerDescriptorSetLayout );
|
|
# endif
|
|
}
|
|
|
|
bool operator!=( DeviceObjectReservationCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDeviceObjectReservationCreateInfo;
|
|
const void * pNext = {};
|
|
uint32_t pipelineCacheCreateInfoCount = {};
|
|
const VULKAN_HPP_NAMESPACE::PipelineCacheCreateInfo * pPipelineCacheCreateInfos = {};
|
|
uint32_t pipelinePoolSizeCount = {};
|
|
const VULKAN_HPP_NAMESPACE::PipelinePoolSize * pPipelinePoolSizes = {};
|
|
uint32_t semaphoreRequestCount = {};
|
|
uint32_t commandBufferRequestCount = {};
|
|
uint32_t fenceRequestCount = {};
|
|
uint32_t deviceMemoryRequestCount = {};
|
|
uint32_t bufferRequestCount = {};
|
|
uint32_t imageRequestCount = {};
|
|
uint32_t eventRequestCount = {};
|
|
uint32_t queryPoolRequestCount = {};
|
|
uint32_t bufferViewRequestCount = {};
|
|
uint32_t imageViewRequestCount = {};
|
|
uint32_t layeredImageViewRequestCount = {};
|
|
uint32_t pipelineCacheRequestCount = {};
|
|
uint32_t pipelineLayoutRequestCount = {};
|
|
uint32_t renderPassRequestCount = {};
|
|
uint32_t graphicsPipelineRequestCount = {};
|
|
uint32_t computePipelineRequestCount = {};
|
|
uint32_t descriptorSetLayoutRequestCount = {};
|
|
uint32_t samplerRequestCount = {};
|
|
uint32_t descriptorPoolRequestCount = {};
|
|
uint32_t descriptorSetRequestCount = {};
|
|
uint32_t framebufferRequestCount = {};
|
|
uint32_t commandPoolRequestCount = {};
|
|
uint32_t samplerYcbcrConversionRequestCount = {};
|
|
uint32_t surfaceRequestCount = {};
|
|
uint32_t swapchainRequestCount = {};
|
|
uint32_t displayModeRequestCount = {};
|
|
uint32_t subpassDescriptionRequestCount = {};
|
|
uint32_t attachmentDescriptionRequestCount = {};
|
|
uint32_t descriptorSetLayoutBindingRequestCount = {};
|
|
uint32_t descriptorSetLayoutBindingLimit = {};
|
|
uint32_t maxImageViewMipLevels = {};
|
|
uint32_t maxImageViewArrayLayers = {};
|
|
uint32_t maxLayeredImageViewMipLevels = {};
|
|
uint32_t maxOcclusionQueriesPerPool = {};
|
|
uint32_t maxPipelineStatisticsQueriesPerPool = {};
|
|
uint32_t maxTimestampQueriesPerPool = {};
|
|
uint32_t maxImmutableSamplersPerDescriptorSetLayout = {};
|
|
};
|
|
|
|
template <>
|
|
struct CppType<StructureType, StructureType::eDeviceObjectReservationCreateInfo>
|
|
{
|
|
using Type = DeviceObjectReservationCreateInfo;
|
|
};
|
|
|
|
struct DevicePrivateDataCreateInfo
|
|
{
|
|
using NativeType = VkDevicePrivateDataCreateInfo;
|
|
|
|
static const bool allowDuplicate = true;
|
|
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDevicePrivateDataCreateInfo;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR DevicePrivateDataCreateInfo( uint32_t privateDataSlotRequestCount_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
|
|
: pNext( pNext_ )
|
|
, privateDataSlotRequestCount( privateDataSlotRequestCount_ )
|
|
{
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR DevicePrivateDataCreateInfo( DevicePrivateDataCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
DevicePrivateDataCreateInfo( VkDevicePrivateDataCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: DevicePrivateDataCreateInfo( *reinterpret_cast<DevicePrivateDataCreateInfo const *>( &rhs ) )
|
|
{
|
|
}
|
|
|
|
DevicePrivateDataCreateInfo & operator=( DevicePrivateDataCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
DevicePrivateDataCreateInfo & operator=( VkDevicePrivateDataCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::DevicePrivateDataCreateInfo const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
|
|
VULKAN_HPP_CONSTEXPR_14 DevicePrivateDataCreateInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pNext = pNext_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 DevicePrivateDataCreateInfo & setPrivateDataSlotRequestCount( uint32_t privateDataSlotRequestCount_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
privateDataSlotRequestCount = privateDataSlotRequestCount_;
|
|
return *this;
|
|
}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
|
|
|
|
operator VkDevicePrivateDataCreateInfo const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkDevicePrivateDataCreateInfo *>( this );
|
|
}
|
|
|
|
operator VkDevicePrivateDataCreateInfo &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkDevicePrivateDataCreateInfo *>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
# if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
# else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, uint32_t const &>
|
|
# endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( sType, pNext, privateDataSlotRequestCount );
|
|
}
|
|
#endif
|
|
|
|
#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
|
|
auto operator<=>( DevicePrivateDataCreateInfo const & ) const = default;
|
|
#else
|
|
bool operator==( DevicePrivateDataCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
# if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
# else
|
|
return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( privateDataSlotRequestCount == rhs.privateDataSlotRequestCount );
|
|
# endif
|
|
}
|
|
|
|
bool operator!=( DevicePrivateDataCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDevicePrivateDataCreateInfo;
|
|
const void * pNext = {};
|
|
uint32_t privateDataSlotRequestCount = {};
|
|
};
|
|
|
|
template <>
|
|
struct CppType<StructureType, StructureType::eDevicePrivateDataCreateInfo>
|
|
{
|
|
using Type = DevicePrivateDataCreateInfo;
|
|
};
|
|
|
|
using DevicePrivateDataCreateInfoEXT = DevicePrivateDataCreateInfo;
|
|
|
|
struct DeviceQueueInfo2
|
|
{
|
|
using NativeType = VkDeviceQueueInfo2;
|
|
|
|
static const bool allowDuplicate = false;
|
|
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDeviceQueueInfo2;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR DeviceQueueInfo2( VULKAN_HPP_NAMESPACE::DeviceQueueCreateFlags flags_ = {},
|
|
uint32_t queueFamilyIndex_ = {},
|
|
uint32_t queueIndex_ = {},
|
|
const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
|
|
: pNext( pNext_ )
|
|
, flags( flags_ )
|
|
, queueFamilyIndex( queueFamilyIndex_ )
|
|
, queueIndex( queueIndex_ )
|
|
{
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR DeviceQueueInfo2( DeviceQueueInfo2 const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
DeviceQueueInfo2( VkDeviceQueueInfo2 const & rhs ) VULKAN_HPP_NOEXCEPT : DeviceQueueInfo2( *reinterpret_cast<DeviceQueueInfo2 const *>( &rhs ) ) {}
|
|
|
|
DeviceQueueInfo2 & operator=( DeviceQueueInfo2 const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
DeviceQueueInfo2 & operator=( VkDeviceQueueInfo2 const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::DeviceQueueInfo2 const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
|
|
VULKAN_HPP_CONSTEXPR_14 DeviceQueueInfo2 & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pNext = pNext_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 DeviceQueueInfo2 & setFlags( VULKAN_HPP_NAMESPACE::DeviceQueueCreateFlags flags_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
flags = flags_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 DeviceQueueInfo2 & setQueueFamilyIndex( uint32_t queueFamilyIndex_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
queueFamilyIndex = queueFamilyIndex_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 DeviceQueueInfo2 & setQueueIndex( uint32_t queueIndex_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
queueIndex = queueIndex_;
|
|
return *this;
|
|
}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
|
|
|
|
operator VkDeviceQueueInfo2 const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkDeviceQueueInfo2 *>( this );
|
|
}
|
|
|
|
operator VkDeviceQueueInfo2 &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkDeviceQueueInfo2 *>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
# if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
# else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &,
|
|
const void * const &,
|
|
VULKAN_HPP_NAMESPACE::DeviceQueueCreateFlags const &,
|
|
uint32_t const &,
|
|
uint32_t const &>
|
|
# endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( sType, pNext, flags, queueFamilyIndex, queueIndex );
|
|
}
|
|
#endif
|
|
|
|
#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
|
|
auto operator<=>( DeviceQueueInfo2 const & ) const = default;
|
|
#else
|
|
bool operator==( DeviceQueueInfo2 const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
# if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
# else
|
|
return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && ( queueFamilyIndex == rhs.queueFamilyIndex ) &&
|
|
( queueIndex == rhs.queueIndex );
|
|
# endif
|
|
}
|
|
|
|
bool operator!=( DeviceQueueInfo2 const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDeviceQueueInfo2;
|
|
const void * pNext = {};
|
|
VULKAN_HPP_NAMESPACE::DeviceQueueCreateFlags flags = {};
|
|
uint32_t queueFamilyIndex = {};
|
|
uint32_t queueIndex = {};
|
|
};
|
|
|
|
template <>
|
|
struct CppType<StructureType, StructureType::eDeviceQueueInfo2>
|
|
{
|
|
using Type = DeviceQueueInfo2;
|
|
};
|
|
|
|
#if defined( VK_USE_PLATFORM_SCI )
|
|
struct DeviceSemaphoreSciSyncPoolReservationCreateInfoNV
|
|
{
|
|
using NativeType = VkDeviceSemaphoreSciSyncPoolReservationCreateInfoNV;
|
|
|
|
static const bool allowDuplicate = true;
|
|
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDeviceSemaphoreSciSyncPoolReservationCreateInfoNV;
|
|
|
|
# if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR DeviceSemaphoreSciSyncPoolReservationCreateInfoNV( uint32_t semaphoreSciSyncPoolRequestCount_ = {},
|
|
const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
|
|
: pNext( pNext_ )
|
|
, semaphoreSciSyncPoolRequestCount( semaphoreSciSyncPoolRequestCount_ )
|
|
{
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR
|
|
DeviceSemaphoreSciSyncPoolReservationCreateInfoNV( DeviceSemaphoreSciSyncPoolReservationCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
DeviceSemaphoreSciSyncPoolReservationCreateInfoNV( VkDeviceSemaphoreSciSyncPoolReservationCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: DeviceSemaphoreSciSyncPoolReservationCreateInfoNV( *reinterpret_cast<DeviceSemaphoreSciSyncPoolReservationCreateInfoNV const *>( &rhs ) )
|
|
{
|
|
}
|
|
|
|
DeviceSemaphoreSciSyncPoolReservationCreateInfoNV &
|
|
operator=( DeviceSemaphoreSciSyncPoolReservationCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
# endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
DeviceSemaphoreSciSyncPoolReservationCreateInfoNV & operator=( VkDeviceSemaphoreSciSyncPoolReservationCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::DeviceSemaphoreSciSyncPoolReservationCreateInfoNV const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
# if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
|
|
VULKAN_HPP_CONSTEXPR_14 DeviceSemaphoreSciSyncPoolReservationCreateInfoNV & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pNext = pNext_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 DeviceSemaphoreSciSyncPoolReservationCreateInfoNV &
|
|
setSemaphoreSciSyncPoolRequestCount( uint32_t semaphoreSciSyncPoolRequestCount_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
semaphoreSciSyncPoolRequestCount = semaphoreSciSyncPoolRequestCount_;
|
|
return *this;
|
|
}
|
|
# endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
|
|
|
|
operator VkDeviceSemaphoreSciSyncPoolReservationCreateInfoNV const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkDeviceSemaphoreSciSyncPoolReservationCreateInfoNV *>( this );
|
|
}
|
|
|
|
operator VkDeviceSemaphoreSciSyncPoolReservationCreateInfoNV &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkDeviceSemaphoreSciSyncPoolReservationCreateInfoNV *>( this );
|
|
}
|
|
|
|
# if defined( VULKAN_HPP_USE_REFLECT )
|
|
# if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
# else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, uint32_t const &>
|
|
# endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( sType, pNext, semaphoreSciSyncPoolRequestCount );
|
|
}
|
|
# endif
|
|
|
|
# if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
|
|
auto operator<=>( DeviceSemaphoreSciSyncPoolReservationCreateInfoNV const & ) const = default;
|
|
# else
|
|
bool operator==( DeviceSemaphoreSciSyncPoolReservationCreateInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
# if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
# else
|
|
return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( semaphoreSciSyncPoolRequestCount == rhs.semaphoreSciSyncPoolRequestCount );
|
|
# endif
|
|
}
|
|
|
|
bool operator!=( DeviceSemaphoreSciSyncPoolReservationCreateInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
# endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDeviceSemaphoreSciSyncPoolReservationCreateInfoNV;
|
|
const void * pNext = {};
|
|
uint32_t semaphoreSciSyncPoolRequestCount = {};
|
|
};
|
|
|
|
template <>
|
|
struct CppType<StructureType, StructureType::eDeviceSemaphoreSciSyncPoolReservationCreateInfoNV>
|
|
{
|
|
using Type = DeviceSemaphoreSciSyncPoolReservationCreateInfoNV;
|
|
};
|
|
#endif /*VK_USE_PLATFORM_SCI*/
|
|
|
|
struct DispatchIndirectCommand
|
|
{
|
|
using NativeType = VkDispatchIndirectCommand;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR DispatchIndirectCommand( uint32_t x_ = {}, uint32_t y_ = {}, uint32_t z_ = {} ) VULKAN_HPP_NOEXCEPT
|
|
: x( x_ )
|
|
, y( y_ )
|
|
, z( z_ )
|
|
{
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR DispatchIndirectCommand( DispatchIndirectCommand const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
DispatchIndirectCommand( VkDispatchIndirectCommand const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: DispatchIndirectCommand( *reinterpret_cast<DispatchIndirectCommand const *>( &rhs ) )
|
|
{
|
|
}
|
|
|
|
DispatchIndirectCommand & operator=( DispatchIndirectCommand const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
DispatchIndirectCommand & operator=( VkDispatchIndirectCommand const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::DispatchIndirectCommand const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
|
|
VULKAN_HPP_CONSTEXPR_14 DispatchIndirectCommand & setX( uint32_t x_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
x = x_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 DispatchIndirectCommand & setY( uint32_t y_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
y = y_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 DispatchIndirectCommand & setZ( uint32_t z_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
z = z_;
|
|
return *this;
|
|
}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
|
|
|
|
operator VkDispatchIndirectCommand const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkDispatchIndirectCommand *>( this );
|
|
}
|
|
|
|
operator VkDispatchIndirectCommand &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkDispatchIndirectCommand *>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
# if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
# else
|
|
std::tuple<uint32_t const &, uint32_t const &, uint32_t const &>
|
|
# endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( x, y, z );
|
|
}
|
|
#endif
|
|
|
|
#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
|
|
auto operator<=>( DispatchIndirectCommand const & ) const = default;
|
|
#else
|
|
bool operator==( DispatchIndirectCommand const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
# if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
# else
|
|
return ( x == rhs.x ) && ( y == rhs.y ) && ( z == rhs.z );
|
|
# endif
|
|
}
|
|
|
|
bool operator!=( DispatchIndirectCommand const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
uint32_t x = {};
|
|
uint32_t y = {};
|
|
uint32_t z = {};
|
|
};
|
|
|
|
struct DisplayEventInfoEXT
|
|
{
|
|
using NativeType = VkDisplayEventInfoEXT;
|
|
|
|
static const bool allowDuplicate = false;
|
|
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDisplayEventInfoEXT;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR
|
|
DisplayEventInfoEXT( VULKAN_HPP_NAMESPACE::DisplayEventTypeEXT displayEvent_ = VULKAN_HPP_NAMESPACE::DisplayEventTypeEXT::eFirstPixelOut,
|
|
const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
|
|
: pNext( pNext_ )
|
|
, displayEvent( displayEvent_ )
|
|
{
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR DisplayEventInfoEXT( DisplayEventInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
DisplayEventInfoEXT( VkDisplayEventInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT : DisplayEventInfoEXT( *reinterpret_cast<DisplayEventInfoEXT const *>( &rhs ) )
|
|
{
|
|
}
|
|
|
|
DisplayEventInfoEXT & operator=( DisplayEventInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
DisplayEventInfoEXT & operator=( VkDisplayEventInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::DisplayEventInfoEXT const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
|
|
VULKAN_HPP_CONSTEXPR_14 DisplayEventInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pNext = pNext_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 DisplayEventInfoEXT & setDisplayEvent( VULKAN_HPP_NAMESPACE::DisplayEventTypeEXT displayEvent_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
displayEvent = displayEvent_;
|
|
return *this;
|
|
}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
|
|
|
|
operator VkDisplayEventInfoEXT const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkDisplayEventInfoEXT *>( this );
|
|
}
|
|
|
|
operator VkDisplayEventInfoEXT &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkDisplayEventInfoEXT *>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
# if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
# else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::DisplayEventTypeEXT const &>
|
|
# endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( sType, pNext, displayEvent );
|
|
}
|
|
#endif
|
|
|
|
#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
|
|
auto operator<=>( DisplayEventInfoEXT const & ) const = default;
|
|
#else
|
|
bool operator==( DisplayEventInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
# if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
# else
|
|
return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( displayEvent == rhs.displayEvent );
|
|
# endif
|
|
}
|
|
|
|
bool operator!=( DisplayEventInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDisplayEventInfoEXT;
|
|
const void * pNext = {};
|
|
VULKAN_HPP_NAMESPACE::DisplayEventTypeEXT displayEvent = VULKAN_HPP_NAMESPACE::DisplayEventTypeEXT::eFirstPixelOut;
|
|
};
|
|
|
|
template <>
|
|
struct CppType<StructureType, StructureType::eDisplayEventInfoEXT>
|
|
{
|
|
using Type = DisplayEventInfoEXT;
|
|
};
|
|
|
|
struct DisplayModeParametersKHR
|
|
{
|
|
using NativeType = VkDisplayModeParametersKHR;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR DisplayModeParametersKHR( VULKAN_HPP_NAMESPACE::Extent2D visibleRegion_ = {}, uint32_t refreshRate_ = {} ) VULKAN_HPP_NOEXCEPT
|
|
: visibleRegion( visibleRegion_ )
|
|
, refreshRate( refreshRate_ )
|
|
{
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR DisplayModeParametersKHR( DisplayModeParametersKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
DisplayModeParametersKHR( VkDisplayModeParametersKHR const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: DisplayModeParametersKHR( *reinterpret_cast<DisplayModeParametersKHR const *>( &rhs ) )
|
|
{
|
|
}
|
|
|
|
DisplayModeParametersKHR & operator=( DisplayModeParametersKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
DisplayModeParametersKHR & operator=( VkDisplayModeParametersKHR const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::DisplayModeParametersKHR const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
|
|
VULKAN_HPP_CONSTEXPR_14 DisplayModeParametersKHR & setVisibleRegion( VULKAN_HPP_NAMESPACE::Extent2D const & visibleRegion_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
visibleRegion = visibleRegion_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 DisplayModeParametersKHR & setRefreshRate( uint32_t refreshRate_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
refreshRate = refreshRate_;
|
|
return *this;
|
|
}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
|
|
|
|
operator VkDisplayModeParametersKHR const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkDisplayModeParametersKHR *>( this );
|
|
}
|
|
|
|
operator VkDisplayModeParametersKHR &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkDisplayModeParametersKHR *>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
# if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
# else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::Extent2D const &, uint32_t const &>
|
|
# endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( visibleRegion, refreshRate );
|
|
}
|
|
#endif
|
|
|
|
#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
|
|
auto operator<=>( DisplayModeParametersKHR const & ) const = default;
|
|
#else
|
|
bool operator==( DisplayModeParametersKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
# if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
# else
|
|
return ( visibleRegion == rhs.visibleRegion ) && ( refreshRate == rhs.refreshRate );
|
|
# endif
|
|
}
|
|
|
|
bool operator!=( DisplayModeParametersKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::Extent2D visibleRegion = {};
|
|
uint32_t refreshRate = {};
|
|
};
|
|
|
|
struct DisplayModeCreateInfoKHR
|
|
{
|
|
using NativeType = VkDisplayModeCreateInfoKHR;
|
|
|
|
static const bool allowDuplicate = false;
|
|
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDisplayModeCreateInfoKHR;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR DisplayModeCreateInfoKHR( VULKAN_HPP_NAMESPACE::DisplayModeCreateFlagsKHR flags_ = {},
|
|
VULKAN_HPP_NAMESPACE::DisplayModeParametersKHR parameters_ = {},
|
|
const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
|
|
: pNext( pNext_ )
|
|
, flags( flags_ )
|
|
, parameters( parameters_ )
|
|
{
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR DisplayModeCreateInfoKHR( DisplayModeCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
DisplayModeCreateInfoKHR( VkDisplayModeCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: DisplayModeCreateInfoKHR( *reinterpret_cast<DisplayModeCreateInfoKHR const *>( &rhs ) )
|
|
{
|
|
}
|
|
|
|
DisplayModeCreateInfoKHR & operator=( DisplayModeCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
DisplayModeCreateInfoKHR & operator=( VkDisplayModeCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::DisplayModeCreateInfoKHR const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
|
|
VULKAN_HPP_CONSTEXPR_14 DisplayModeCreateInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pNext = pNext_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 DisplayModeCreateInfoKHR & setFlags( VULKAN_HPP_NAMESPACE::DisplayModeCreateFlagsKHR flags_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
flags = flags_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 DisplayModeCreateInfoKHR & setParameters( VULKAN_HPP_NAMESPACE::DisplayModeParametersKHR const & parameters_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
parameters = parameters_;
|
|
return *this;
|
|
}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
|
|
|
|
operator VkDisplayModeCreateInfoKHR const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkDisplayModeCreateInfoKHR *>( this );
|
|
}
|
|
|
|
operator VkDisplayModeCreateInfoKHR &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkDisplayModeCreateInfoKHR *>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
# if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
# else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &,
|
|
const void * const &,
|
|
VULKAN_HPP_NAMESPACE::DisplayModeCreateFlagsKHR const &,
|
|
VULKAN_HPP_NAMESPACE::DisplayModeParametersKHR const &>
|
|
# endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( sType, pNext, flags, parameters );
|
|
}
|
|
#endif
|
|
|
|
#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
|
|
auto operator<=>( DisplayModeCreateInfoKHR const & ) const = default;
|
|
#else
|
|
bool operator==( DisplayModeCreateInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
# if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
# else
|
|
return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && ( parameters == rhs.parameters );
|
|
# endif
|
|
}
|
|
|
|
bool operator!=( DisplayModeCreateInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDisplayModeCreateInfoKHR;
|
|
const void * pNext = {};
|
|
VULKAN_HPP_NAMESPACE::DisplayModeCreateFlagsKHR flags = {};
|
|
VULKAN_HPP_NAMESPACE::DisplayModeParametersKHR parameters = {};
|
|
};
|
|
|
|
template <>
|
|
struct CppType<StructureType, StructureType::eDisplayModeCreateInfoKHR>
|
|
{
|
|
using Type = DisplayModeCreateInfoKHR;
|
|
};
|
|
|
|
struct DisplayModePropertiesKHR
|
|
{
|
|
using NativeType = VkDisplayModePropertiesKHR;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR DisplayModePropertiesKHR( VULKAN_HPP_NAMESPACE::DisplayModeKHR displayMode_ = {},
|
|
VULKAN_HPP_NAMESPACE::DisplayModeParametersKHR parameters_ = {} ) VULKAN_HPP_NOEXCEPT
|
|
: displayMode( displayMode_ )
|
|
, parameters( parameters_ )
|
|
{
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR DisplayModePropertiesKHR( DisplayModePropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
DisplayModePropertiesKHR( VkDisplayModePropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: DisplayModePropertiesKHR( *reinterpret_cast<DisplayModePropertiesKHR const *>( &rhs ) )
|
|
{
|
|
}
|
|
|
|
DisplayModePropertiesKHR & operator=( DisplayModePropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
DisplayModePropertiesKHR & operator=( VkDisplayModePropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::DisplayModePropertiesKHR const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
operator VkDisplayModePropertiesKHR const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkDisplayModePropertiesKHR *>( this );
|
|
}
|
|
|
|
operator VkDisplayModePropertiesKHR &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkDisplayModePropertiesKHR *>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
# if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
# else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::DisplayModeKHR const &, VULKAN_HPP_NAMESPACE::DisplayModeParametersKHR const &>
|
|
# endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( displayMode, parameters );
|
|
}
|
|
#endif
|
|
|
|
#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
|
|
auto operator<=>( DisplayModePropertiesKHR const & ) const = default;
|
|
#else
|
|
bool operator==( DisplayModePropertiesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
# if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
# else
|
|
return ( displayMode == rhs.displayMode ) && ( parameters == rhs.parameters );
|
|
# endif
|
|
}
|
|
|
|
bool operator!=( DisplayModePropertiesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::DisplayModeKHR displayMode = {};
|
|
VULKAN_HPP_NAMESPACE::DisplayModeParametersKHR parameters = {};
|
|
};
|
|
|
|
struct DisplayModeProperties2KHR
|
|
{
|
|
using NativeType = VkDisplayModeProperties2KHR;
|
|
|
|
static const bool allowDuplicate = false;
|
|
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDisplayModeProperties2KHR;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR DisplayModeProperties2KHR( VULKAN_HPP_NAMESPACE::DisplayModePropertiesKHR displayModeProperties_ = {},
|
|
void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
|
|
: pNext( pNext_ )
|
|
, displayModeProperties( displayModeProperties_ )
|
|
{
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR DisplayModeProperties2KHR( DisplayModeProperties2KHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
DisplayModeProperties2KHR( VkDisplayModeProperties2KHR const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: DisplayModeProperties2KHR( *reinterpret_cast<DisplayModeProperties2KHR const *>( &rhs ) )
|
|
{
|
|
}
|
|
|
|
DisplayModeProperties2KHR & operator=( DisplayModeProperties2KHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
DisplayModeProperties2KHR & operator=( VkDisplayModeProperties2KHR const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::DisplayModeProperties2KHR const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
operator VkDisplayModeProperties2KHR const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkDisplayModeProperties2KHR *>( this );
|
|
}
|
|
|
|
operator VkDisplayModeProperties2KHR &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkDisplayModeProperties2KHR *>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
# if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
# else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::DisplayModePropertiesKHR const &>
|
|
# endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( sType, pNext, displayModeProperties );
|
|
}
|
|
#endif
|
|
|
|
#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
|
|
auto operator<=>( DisplayModeProperties2KHR const & ) const = default;
|
|
#else
|
|
bool operator==( DisplayModeProperties2KHR const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
# if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
# else
|
|
return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( displayModeProperties == rhs.displayModeProperties );
|
|
# endif
|
|
}
|
|
|
|
bool operator!=( DisplayModeProperties2KHR const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDisplayModeProperties2KHR;
|
|
void * pNext = {};
|
|
VULKAN_HPP_NAMESPACE::DisplayModePropertiesKHR displayModeProperties = {};
|
|
};
|
|
|
|
template <>
|
|
struct CppType<StructureType, StructureType::eDisplayModeProperties2KHR>
|
|
{
|
|
using Type = DisplayModeProperties2KHR;
|
|
};
|
|
|
|
struct DisplayPlaneCapabilitiesKHR
|
|
{
|
|
using NativeType = VkDisplayPlaneCapabilitiesKHR;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR DisplayPlaneCapabilitiesKHR( VULKAN_HPP_NAMESPACE::DisplayPlaneAlphaFlagsKHR supportedAlpha_ = {},
|
|
VULKAN_HPP_NAMESPACE::Offset2D minSrcPosition_ = {},
|
|
VULKAN_HPP_NAMESPACE::Offset2D maxSrcPosition_ = {},
|
|
VULKAN_HPP_NAMESPACE::Extent2D minSrcExtent_ = {},
|
|
VULKAN_HPP_NAMESPACE::Extent2D maxSrcExtent_ = {},
|
|
VULKAN_HPP_NAMESPACE::Offset2D minDstPosition_ = {},
|
|
VULKAN_HPP_NAMESPACE::Offset2D maxDstPosition_ = {},
|
|
VULKAN_HPP_NAMESPACE::Extent2D minDstExtent_ = {},
|
|
VULKAN_HPP_NAMESPACE::Extent2D maxDstExtent_ = {} ) VULKAN_HPP_NOEXCEPT
|
|
: supportedAlpha( supportedAlpha_ )
|
|
, minSrcPosition( minSrcPosition_ )
|
|
, maxSrcPosition( maxSrcPosition_ )
|
|
, minSrcExtent( minSrcExtent_ )
|
|
, maxSrcExtent( maxSrcExtent_ )
|
|
, minDstPosition( minDstPosition_ )
|
|
, maxDstPosition( maxDstPosition_ )
|
|
, minDstExtent( minDstExtent_ )
|
|
, maxDstExtent( maxDstExtent_ )
|
|
{
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR DisplayPlaneCapabilitiesKHR( DisplayPlaneCapabilitiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
DisplayPlaneCapabilitiesKHR( VkDisplayPlaneCapabilitiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: DisplayPlaneCapabilitiesKHR( *reinterpret_cast<DisplayPlaneCapabilitiesKHR const *>( &rhs ) )
|
|
{
|
|
}
|
|
|
|
DisplayPlaneCapabilitiesKHR & operator=( DisplayPlaneCapabilitiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
DisplayPlaneCapabilitiesKHR & operator=( VkDisplayPlaneCapabilitiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::DisplayPlaneCapabilitiesKHR const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
operator VkDisplayPlaneCapabilitiesKHR const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkDisplayPlaneCapabilitiesKHR *>( this );
|
|
}
|
|
|
|
operator VkDisplayPlaneCapabilitiesKHR &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkDisplayPlaneCapabilitiesKHR *>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
# if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
# else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::DisplayPlaneAlphaFlagsKHR const &,
|
|
VULKAN_HPP_NAMESPACE::Offset2D const &,
|
|
VULKAN_HPP_NAMESPACE::Offset2D const &,
|
|
VULKAN_HPP_NAMESPACE::Extent2D const &,
|
|
VULKAN_HPP_NAMESPACE::Extent2D const &,
|
|
VULKAN_HPP_NAMESPACE::Offset2D const &,
|
|
VULKAN_HPP_NAMESPACE::Offset2D const &,
|
|
VULKAN_HPP_NAMESPACE::Extent2D const &,
|
|
VULKAN_HPP_NAMESPACE::Extent2D const &>
|
|
# endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( supportedAlpha, minSrcPosition, maxSrcPosition, minSrcExtent, maxSrcExtent, minDstPosition, maxDstPosition, minDstExtent, maxDstExtent );
|
|
}
|
|
#endif
|
|
|
|
#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
|
|
auto operator<=>( DisplayPlaneCapabilitiesKHR const & ) const = default;
|
|
#else
|
|
bool operator==( DisplayPlaneCapabilitiesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
# if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
# else
|
|
return ( supportedAlpha == rhs.supportedAlpha ) && ( minSrcPosition == rhs.minSrcPosition ) && ( maxSrcPosition == rhs.maxSrcPosition ) &&
|
|
( minSrcExtent == rhs.minSrcExtent ) && ( maxSrcExtent == rhs.maxSrcExtent ) && ( minDstPosition == rhs.minDstPosition ) &&
|
|
( maxDstPosition == rhs.maxDstPosition ) && ( minDstExtent == rhs.minDstExtent ) && ( maxDstExtent == rhs.maxDstExtent );
|
|
# endif
|
|
}
|
|
|
|
bool operator!=( DisplayPlaneCapabilitiesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::DisplayPlaneAlphaFlagsKHR supportedAlpha = {};
|
|
VULKAN_HPP_NAMESPACE::Offset2D minSrcPosition = {};
|
|
VULKAN_HPP_NAMESPACE::Offset2D maxSrcPosition = {};
|
|
VULKAN_HPP_NAMESPACE::Extent2D minSrcExtent = {};
|
|
VULKAN_HPP_NAMESPACE::Extent2D maxSrcExtent = {};
|
|
VULKAN_HPP_NAMESPACE::Offset2D minDstPosition = {};
|
|
VULKAN_HPP_NAMESPACE::Offset2D maxDstPosition = {};
|
|
VULKAN_HPP_NAMESPACE::Extent2D minDstExtent = {};
|
|
VULKAN_HPP_NAMESPACE::Extent2D maxDstExtent = {};
|
|
};
|
|
|
|
struct DisplayPlaneCapabilities2KHR
|
|
{
|
|
using NativeType = VkDisplayPlaneCapabilities2KHR;
|
|
|
|
static const bool allowDuplicate = false;
|
|
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDisplayPlaneCapabilities2KHR;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR DisplayPlaneCapabilities2KHR( VULKAN_HPP_NAMESPACE::DisplayPlaneCapabilitiesKHR capabilities_ = {},
|
|
void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
|
|
: pNext( pNext_ )
|
|
, capabilities( capabilities_ )
|
|
{
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR DisplayPlaneCapabilities2KHR( DisplayPlaneCapabilities2KHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
DisplayPlaneCapabilities2KHR( VkDisplayPlaneCapabilities2KHR const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: DisplayPlaneCapabilities2KHR( *reinterpret_cast<DisplayPlaneCapabilities2KHR const *>( &rhs ) )
|
|
{
|
|
}
|
|
|
|
DisplayPlaneCapabilities2KHR & operator=( DisplayPlaneCapabilities2KHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
DisplayPlaneCapabilities2KHR & operator=( VkDisplayPlaneCapabilities2KHR const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::DisplayPlaneCapabilities2KHR const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
operator VkDisplayPlaneCapabilities2KHR const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkDisplayPlaneCapabilities2KHR *>( this );
|
|
}
|
|
|
|
operator VkDisplayPlaneCapabilities2KHR &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkDisplayPlaneCapabilities2KHR *>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
# if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
# else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::DisplayPlaneCapabilitiesKHR const &>
|
|
# endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( sType, pNext, capabilities );
|
|
}
|
|
#endif
|
|
|
|
#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
|
|
auto operator<=>( DisplayPlaneCapabilities2KHR const & ) const = default;
|
|
#else
|
|
bool operator==( DisplayPlaneCapabilities2KHR const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
# if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
# else
|
|
return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( capabilities == rhs.capabilities );
|
|
# endif
|
|
}
|
|
|
|
bool operator!=( DisplayPlaneCapabilities2KHR const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDisplayPlaneCapabilities2KHR;
|
|
void * pNext = {};
|
|
VULKAN_HPP_NAMESPACE::DisplayPlaneCapabilitiesKHR capabilities = {};
|
|
};
|
|
|
|
template <>
|
|
struct CppType<StructureType, StructureType::eDisplayPlaneCapabilities2KHR>
|
|
{
|
|
using Type = DisplayPlaneCapabilities2KHR;
|
|
};
|
|
|
|
struct DisplayPlaneInfo2KHR
|
|
{
|
|
using NativeType = VkDisplayPlaneInfo2KHR;
|
|
|
|
static const bool allowDuplicate = false;
|
|
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDisplayPlaneInfo2KHR;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR
|
|
DisplayPlaneInfo2KHR( VULKAN_HPP_NAMESPACE::DisplayModeKHR mode_ = {}, uint32_t planeIndex_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
|
|
: pNext( pNext_ )
|
|
, mode( mode_ )
|
|
, planeIndex( planeIndex_ )
|
|
{
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR DisplayPlaneInfo2KHR( DisplayPlaneInfo2KHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
DisplayPlaneInfo2KHR( VkDisplayPlaneInfo2KHR const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: DisplayPlaneInfo2KHR( *reinterpret_cast<DisplayPlaneInfo2KHR const *>( &rhs ) )
|
|
{
|
|
}
|
|
|
|
DisplayPlaneInfo2KHR & operator=( DisplayPlaneInfo2KHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
DisplayPlaneInfo2KHR & operator=( VkDisplayPlaneInfo2KHR const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::DisplayPlaneInfo2KHR const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
|
|
VULKAN_HPP_CONSTEXPR_14 DisplayPlaneInfo2KHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pNext = pNext_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 DisplayPlaneInfo2KHR & setMode( VULKAN_HPP_NAMESPACE::DisplayModeKHR mode_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
mode = mode_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 DisplayPlaneInfo2KHR & setPlaneIndex( uint32_t planeIndex_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
planeIndex = planeIndex_;
|
|
return *this;
|
|
}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
|
|
|
|
operator VkDisplayPlaneInfo2KHR const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkDisplayPlaneInfo2KHR *>( this );
|
|
}
|
|
|
|
operator VkDisplayPlaneInfo2KHR &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkDisplayPlaneInfo2KHR *>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
# if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
# else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::DisplayModeKHR const &, uint32_t const &>
|
|
# endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( sType, pNext, mode, planeIndex );
|
|
}
|
|
#endif
|
|
|
|
#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
|
|
auto operator<=>( DisplayPlaneInfo2KHR const & ) const = default;
|
|
#else
|
|
bool operator==( DisplayPlaneInfo2KHR const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
# if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
# else
|
|
return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( mode == rhs.mode ) && ( planeIndex == rhs.planeIndex );
|
|
# endif
|
|
}
|
|
|
|
bool operator!=( DisplayPlaneInfo2KHR const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDisplayPlaneInfo2KHR;
|
|
const void * pNext = {};
|
|
VULKAN_HPP_NAMESPACE::DisplayModeKHR mode = {};
|
|
uint32_t planeIndex = {};
|
|
};
|
|
|
|
template <>
|
|
struct CppType<StructureType, StructureType::eDisplayPlaneInfo2KHR>
|
|
{
|
|
using Type = DisplayPlaneInfo2KHR;
|
|
};
|
|
|
|
struct DisplayPlanePropertiesKHR
|
|
{
|
|
using NativeType = VkDisplayPlanePropertiesKHR;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR DisplayPlanePropertiesKHR( VULKAN_HPP_NAMESPACE::DisplayKHR currentDisplay_ = {},
|
|
uint32_t currentStackIndex_ = {} ) VULKAN_HPP_NOEXCEPT
|
|
: currentDisplay( currentDisplay_ )
|
|
, currentStackIndex( currentStackIndex_ )
|
|
{
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR DisplayPlanePropertiesKHR( DisplayPlanePropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
DisplayPlanePropertiesKHR( VkDisplayPlanePropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: DisplayPlanePropertiesKHR( *reinterpret_cast<DisplayPlanePropertiesKHR const *>( &rhs ) )
|
|
{
|
|
}
|
|
|
|
DisplayPlanePropertiesKHR & operator=( DisplayPlanePropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
DisplayPlanePropertiesKHR & operator=( VkDisplayPlanePropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::DisplayPlanePropertiesKHR const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
operator VkDisplayPlanePropertiesKHR const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkDisplayPlanePropertiesKHR *>( this );
|
|
}
|
|
|
|
operator VkDisplayPlanePropertiesKHR &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkDisplayPlanePropertiesKHR *>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
# if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
# else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::DisplayKHR const &, uint32_t const &>
|
|
# endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( currentDisplay, currentStackIndex );
|
|
}
|
|
#endif
|
|
|
|
#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
|
|
auto operator<=>( DisplayPlanePropertiesKHR const & ) const = default;
|
|
#else
|
|
bool operator==( DisplayPlanePropertiesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
# if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
# else
|
|
return ( currentDisplay == rhs.currentDisplay ) && ( currentStackIndex == rhs.currentStackIndex );
|
|
# endif
|
|
}
|
|
|
|
bool operator!=( DisplayPlanePropertiesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::DisplayKHR currentDisplay = {};
|
|
uint32_t currentStackIndex = {};
|
|
};
|
|
|
|
struct DisplayPlaneProperties2KHR
|
|
{
|
|
using NativeType = VkDisplayPlaneProperties2KHR;
|
|
|
|
static const bool allowDuplicate = false;
|
|
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDisplayPlaneProperties2KHR;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR DisplayPlaneProperties2KHR( VULKAN_HPP_NAMESPACE::DisplayPlanePropertiesKHR displayPlaneProperties_ = {},
|
|
void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
|
|
: pNext( pNext_ )
|
|
, displayPlaneProperties( displayPlaneProperties_ )
|
|
{
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR DisplayPlaneProperties2KHR( DisplayPlaneProperties2KHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
DisplayPlaneProperties2KHR( VkDisplayPlaneProperties2KHR const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: DisplayPlaneProperties2KHR( *reinterpret_cast<DisplayPlaneProperties2KHR const *>( &rhs ) )
|
|
{
|
|
}
|
|
|
|
DisplayPlaneProperties2KHR & operator=( DisplayPlaneProperties2KHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
DisplayPlaneProperties2KHR & operator=( VkDisplayPlaneProperties2KHR const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::DisplayPlaneProperties2KHR const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
operator VkDisplayPlaneProperties2KHR const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkDisplayPlaneProperties2KHR *>( this );
|
|
}
|
|
|
|
operator VkDisplayPlaneProperties2KHR &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkDisplayPlaneProperties2KHR *>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
# if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
# else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::DisplayPlanePropertiesKHR const &>
|
|
# endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( sType, pNext, displayPlaneProperties );
|
|
}
|
|
#endif
|
|
|
|
#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
|
|
auto operator<=>( DisplayPlaneProperties2KHR const & ) const = default;
|
|
#else
|
|
bool operator==( DisplayPlaneProperties2KHR const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
# if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
# else
|
|
return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( displayPlaneProperties == rhs.displayPlaneProperties );
|
|
# endif
|
|
}
|
|
|
|
bool operator!=( DisplayPlaneProperties2KHR const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDisplayPlaneProperties2KHR;
|
|
void * pNext = {};
|
|
VULKAN_HPP_NAMESPACE::DisplayPlanePropertiesKHR displayPlaneProperties = {};
|
|
};
|
|
|
|
template <>
|
|
struct CppType<StructureType, StructureType::eDisplayPlaneProperties2KHR>
|
|
{
|
|
using Type = DisplayPlaneProperties2KHR;
|
|
};
|
|
|
|
struct DisplayPowerInfoEXT
|
|
{
|
|
using NativeType = VkDisplayPowerInfoEXT;
|
|
|
|
static const bool allowDuplicate = false;
|
|
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDisplayPowerInfoEXT;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR DisplayPowerInfoEXT( VULKAN_HPP_NAMESPACE::DisplayPowerStateEXT powerState_ = VULKAN_HPP_NAMESPACE::DisplayPowerStateEXT::eOff,
|
|
const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
|
|
: pNext( pNext_ )
|
|
, powerState( powerState_ )
|
|
{
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR DisplayPowerInfoEXT( DisplayPowerInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
DisplayPowerInfoEXT( VkDisplayPowerInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT : DisplayPowerInfoEXT( *reinterpret_cast<DisplayPowerInfoEXT const *>( &rhs ) )
|
|
{
|
|
}
|
|
|
|
DisplayPowerInfoEXT & operator=( DisplayPowerInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
DisplayPowerInfoEXT & operator=( VkDisplayPowerInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::DisplayPowerInfoEXT const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
|
|
VULKAN_HPP_CONSTEXPR_14 DisplayPowerInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pNext = pNext_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 DisplayPowerInfoEXT & setPowerState( VULKAN_HPP_NAMESPACE::DisplayPowerStateEXT powerState_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
powerState = powerState_;
|
|
return *this;
|
|
}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
|
|
|
|
operator VkDisplayPowerInfoEXT const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkDisplayPowerInfoEXT *>( this );
|
|
}
|
|
|
|
operator VkDisplayPowerInfoEXT &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkDisplayPowerInfoEXT *>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
# if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
# else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::DisplayPowerStateEXT const &>
|
|
# endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( sType, pNext, powerState );
|
|
}
|
|
#endif
|
|
|
|
#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
|
|
auto operator<=>( DisplayPowerInfoEXT const & ) const = default;
|
|
#else
|
|
bool operator==( DisplayPowerInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
# if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
# else
|
|
return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( powerState == rhs.powerState );
|
|
# endif
|
|
}
|
|
|
|
bool operator!=( DisplayPowerInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDisplayPowerInfoEXT;
|
|
const void * pNext = {};
|
|
VULKAN_HPP_NAMESPACE::DisplayPowerStateEXT powerState = VULKAN_HPP_NAMESPACE::DisplayPowerStateEXT::eOff;
|
|
};
|
|
|
|
template <>
|
|
struct CppType<StructureType, StructureType::eDisplayPowerInfoEXT>
|
|
{
|
|
using Type = DisplayPowerInfoEXT;
|
|
};
|
|
|
|
struct DisplayPresentInfoKHR
|
|
{
|
|
using NativeType = VkDisplayPresentInfoKHR;
|
|
|
|
static const bool allowDuplicate = false;
|
|
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDisplayPresentInfoKHR;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR DisplayPresentInfoKHR( VULKAN_HPP_NAMESPACE::Rect2D srcRect_ = {},
|
|
VULKAN_HPP_NAMESPACE::Rect2D dstRect_ = {},
|
|
VULKAN_HPP_NAMESPACE::Bool32 persistent_ = {},
|
|
const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
|
|
: pNext( pNext_ )
|
|
, srcRect( srcRect_ )
|
|
, dstRect( dstRect_ )
|
|
, persistent( persistent_ )
|
|
{
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR DisplayPresentInfoKHR( DisplayPresentInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
DisplayPresentInfoKHR( VkDisplayPresentInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: DisplayPresentInfoKHR( *reinterpret_cast<DisplayPresentInfoKHR const *>( &rhs ) )
|
|
{
|
|
}
|
|
|
|
DisplayPresentInfoKHR & operator=( DisplayPresentInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
DisplayPresentInfoKHR & operator=( VkDisplayPresentInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::DisplayPresentInfoKHR const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
|
|
VULKAN_HPP_CONSTEXPR_14 DisplayPresentInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pNext = pNext_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 DisplayPresentInfoKHR & setSrcRect( VULKAN_HPP_NAMESPACE::Rect2D const & srcRect_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
srcRect = srcRect_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 DisplayPresentInfoKHR & setDstRect( VULKAN_HPP_NAMESPACE::Rect2D const & dstRect_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
dstRect = dstRect_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 DisplayPresentInfoKHR & setPersistent( VULKAN_HPP_NAMESPACE::Bool32 persistent_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
persistent = persistent_;
|
|
return *this;
|
|
}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
|
|
|
|
operator VkDisplayPresentInfoKHR const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkDisplayPresentInfoKHR *>( this );
|
|
}
|
|
|
|
operator VkDisplayPresentInfoKHR &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkDisplayPresentInfoKHR *>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
# if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
# else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &,
|
|
const void * const &,
|
|
VULKAN_HPP_NAMESPACE::Rect2D const &,
|
|
VULKAN_HPP_NAMESPACE::Rect2D const &,
|
|
VULKAN_HPP_NAMESPACE::Bool32 const &>
|
|
# endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( sType, pNext, srcRect, dstRect, persistent );
|
|
}
|
|
#endif
|
|
|
|
#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
|
|
auto operator<=>( DisplayPresentInfoKHR const & ) const = default;
|
|
#else
|
|
bool operator==( DisplayPresentInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
# if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
# else
|
|
return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( srcRect == rhs.srcRect ) && ( dstRect == rhs.dstRect ) && ( persistent == rhs.persistent );
|
|
# endif
|
|
}
|
|
|
|
bool operator!=( DisplayPresentInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDisplayPresentInfoKHR;
|
|
const void * pNext = {};
|
|
VULKAN_HPP_NAMESPACE::Rect2D srcRect = {};
|
|
VULKAN_HPP_NAMESPACE::Rect2D dstRect = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 persistent = {};
|
|
};
|
|
|
|
template <>
|
|
struct CppType<StructureType, StructureType::eDisplayPresentInfoKHR>
|
|
{
|
|
using Type = DisplayPresentInfoKHR;
|
|
};
|
|
|
|
struct DisplayPropertiesKHR
|
|
{
|
|
using NativeType = VkDisplayPropertiesKHR;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR DisplayPropertiesKHR( VULKAN_HPP_NAMESPACE::DisplayKHR display_ = {},
|
|
const char * displayName_ = {},
|
|
VULKAN_HPP_NAMESPACE::Extent2D physicalDimensions_ = {},
|
|
VULKAN_HPP_NAMESPACE::Extent2D physicalResolution_ = {},
|
|
VULKAN_HPP_NAMESPACE::SurfaceTransformFlagsKHR supportedTransforms_ = {},
|
|
VULKAN_HPP_NAMESPACE::Bool32 planeReorderPossible_ = {},
|
|
VULKAN_HPP_NAMESPACE::Bool32 persistentContent_ = {} ) VULKAN_HPP_NOEXCEPT
|
|
: display( display_ )
|
|
, displayName( displayName_ )
|
|
, physicalDimensions( physicalDimensions_ )
|
|
, physicalResolution( physicalResolution_ )
|
|
, supportedTransforms( supportedTransforms_ )
|
|
, planeReorderPossible( planeReorderPossible_ )
|
|
, persistentContent( persistentContent_ )
|
|
{
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR DisplayPropertiesKHR( DisplayPropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
DisplayPropertiesKHR( VkDisplayPropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: DisplayPropertiesKHR( *reinterpret_cast<DisplayPropertiesKHR const *>( &rhs ) )
|
|
{
|
|
}
|
|
|
|
DisplayPropertiesKHR & operator=( DisplayPropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
DisplayPropertiesKHR & operator=( VkDisplayPropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::DisplayPropertiesKHR const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
operator VkDisplayPropertiesKHR const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkDisplayPropertiesKHR *>( this );
|
|
}
|
|
|
|
operator VkDisplayPropertiesKHR &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkDisplayPropertiesKHR *>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
# if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
# else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::DisplayKHR const &,
|
|
const char * const &,
|
|
VULKAN_HPP_NAMESPACE::Extent2D const &,
|
|
VULKAN_HPP_NAMESPACE::Extent2D const &,
|
|
VULKAN_HPP_NAMESPACE::SurfaceTransformFlagsKHR const &,
|
|
VULKAN_HPP_NAMESPACE::Bool32 const &,
|
|
VULKAN_HPP_NAMESPACE::Bool32 const &>
|
|
# endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( display, displayName, physicalDimensions, physicalResolution, supportedTransforms, planeReorderPossible, persistentContent );
|
|
}
|
|
#endif
|
|
|
|
#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
|
|
std::strong_ordering operator<=>( DisplayPropertiesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
if ( auto cmp = display <=> rhs.display; cmp != 0 )
|
|
return cmp;
|
|
if ( displayName != rhs.displayName )
|
|
if ( auto cmp = strcmp( displayName, rhs.displayName ); cmp != 0 )
|
|
return ( cmp < 0 ) ? std::strong_ordering::less : std::strong_ordering::greater;
|
|
if ( auto cmp = physicalDimensions <=> rhs.physicalDimensions; cmp != 0 )
|
|
return cmp;
|
|
if ( auto cmp = physicalResolution <=> rhs.physicalResolution; cmp != 0 )
|
|
return cmp;
|
|
if ( auto cmp = supportedTransforms <=> rhs.supportedTransforms; cmp != 0 )
|
|
return cmp;
|
|
if ( auto cmp = planeReorderPossible <=> rhs.planeReorderPossible; cmp != 0 )
|
|
return cmp;
|
|
if ( auto cmp = persistentContent <=> rhs.persistentContent; cmp != 0 )
|
|
return cmp;
|
|
|
|
return std::strong_ordering::equivalent;
|
|
}
|
|
#endif
|
|
|
|
bool operator==( DisplayPropertiesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return ( display == rhs.display ) && ( ( displayName == rhs.displayName ) || ( strcmp( displayName, rhs.displayName ) == 0 ) ) &&
|
|
( physicalDimensions == rhs.physicalDimensions ) && ( physicalResolution == rhs.physicalResolution ) &&
|
|
( supportedTransforms == rhs.supportedTransforms ) && ( planeReorderPossible == rhs.planeReorderPossible ) &&
|
|
( persistentContent == rhs.persistentContent );
|
|
}
|
|
|
|
bool operator!=( DisplayPropertiesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::DisplayKHR display = {};
|
|
const char * displayName = {};
|
|
VULKAN_HPP_NAMESPACE::Extent2D physicalDimensions = {};
|
|
VULKAN_HPP_NAMESPACE::Extent2D physicalResolution = {};
|
|
VULKAN_HPP_NAMESPACE::SurfaceTransformFlagsKHR supportedTransforms = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 planeReorderPossible = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 persistentContent = {};
|
|
};
|
|
|
|
struct DisplayProperties2KHR
|
|
{
|
|
using NativeType = VkDisplayProperties2KHR;
|
|
|
|
static const bool allowDuplicate = false;
|
|
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDisplayProperties2KHR;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR DisplayProperties2KHR( VULKAN_HPP_NAMESPACE::DisplayPropertiesKHR displayProperties_ = {},
|
|
void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
|
|
: pNext( pNext_ )
|
|
, displayProperties( displayProperties_ )
|
|
{
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR DisplayProperties2KHR( DisplayProperties2KHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
DisplayProperties2KHR( VkDisplayProperties2KHR const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: DisplayProperties2KHR( *reinterpret_cast<DisplayProperties2KHR const *>( &rhs ) )
|
|
{
|
|
}
|
|
|
|
DisplayProperties2KHR & operator=( DisplayProperties2KHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
DisplayProperties2KHR & operator=( VkDisplayProperties2KHR const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::DisplayProperties2KHR const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
operator VkDisplayProperties2KHR const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkDisplayProperties2KHR *>( this );
|
|
}
|
|
|
|
operator VkDisplayProperties2KHR &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkDisplayProperties2KHR *>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
# if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
# else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::DisplayPropertiesKHR const &>
|
|
# endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( sType, pNext, displayProperties );
|
|
}
|
|
#endif
|
|
|
|
#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
|
|
auto operator<=>( DisplayProperties2KHR const & ) const = default;
|
|
#else
|
|
bool operator==( DisplayProperties2KHR const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
# if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
# else
|
|
return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( displayProperties == rhs.displayProperties );
|
|
# endif
|
|
}
|
|
|
|
bool operator!=( DisplayProperties2KHR const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDisplayProperties2KHR;
|
|
void * pNext = {};
|
|
VULKAN_HPP_NAMESPACE::DisplayPropertiesKHR displayProperties = {};
|
|
};
|
|
|
|
template <>
|
|
struct CppType<StructureType, StructureType::eDisplayProperties2KHR>
|
|
{
|
|
using Type = DisplayProperties2KHR;
|
|
};
|
|
|
|
struct DisplaySurfaceCreateInfoKHR
|
|
{
|
|
using NativeType = VkDisplaySurfaceCreateInfoKHR;
|
|
|
|
static const bool allowDuplicate = false;
|
|
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDisplaySurfaceCreateInfoKHR;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR
|
|
DisplaySurfaceCreateInfoKHR( VULKAN_HPP_NAMESPACE::DisplaySurfaceCreateFlagsKHR flags_ = {},
|
|
VULKAN_HPP_NAMESPACE::DisplayModeKHR displayMode_ = {},
|
|
uint32_t planeIndex_ = {},
|
|
uint32_t planeStackIndex_ = {},
|
|
VULKAN_HPP_NAMESPACE::SurfaceTransformFlagBitsKHR transform_ = VULKAN_HPP_NAMESPACE::SurfaceTransformFlagBitsKHR::eIdentity,
|
|
float globalAlpha_ = {},
|
|
VULKAN_HPP_NAMESPACE::DisplayPlaneAlphaFlagBitsKHR alphaMode_ = VULKAN_HPP_NAMESPACE::DisplayPlaneAlphaFlagBitsKHR::eOpaque,
|
|
VULKAN_HPP_NAMESPACE::Extent2D imageExtent_ = {},
|
|
const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
|
|
: pNext( pNext_ )
|
|
, flags( flags_ )
|
|
, displayMode( displayMode_ )
|
|
, planeIndex( planeIndex_ )
|
|
, planeStackIndex( planeStackIndex_ )
|
|
, transform( transform_ )
|
|
, globalAlpha( globalAlpha_ )
|
|
, alphaMode( alphaMode_ )
|
|
, imageExtent( imageExtent_ )
|
|
{
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR DisplaySurfaceCreateInfoKHR( DisplaySurfaceCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
DisplaySurfaceCreateInfoKHR( VkDisplaySurfaceCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: DisplaySurfaceCreateInfoKHR( *reinterpret_cast<DisplaySurfaceCreateInfoKHR const *>( &rhs ) )
|
|
{
|
|
}
|
|
|
|
DisplaySurfaceCreateInfoKHR & operator=( DisplaySurfaceCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
DisplaySurfaceCreateInfoKHR & operator=( VkDisplaySurfaceCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::DisplaySurfaceCreateInfoKHR const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
|
|
VULKAN_HPP_CONSTEXPR_14 DisplaySurfaceCreateInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pNext = pNext_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 DisplaySurfaceCreateInfoKHR & setFlags( VULKAN_HPP_NAMESPACE::DisplaySurfaceCreateFlagsKHR flags_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
flags = flags_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 DisplaySurfaceCreateInfoKHR & setDisplayMode( VULKAN_HPP_NAMESPACE::DisplayModeKHR displayMode_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
displayMode = displayMode_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 DisplaySurfaceCreateInfoKHR & setPlaneIndex( uint32_t planeIndex_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
planeIndex = planeIndex_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 DisplaySurfaceCreateInfoKHR & setPlaneStackIndex( uint32_t planeStackIndex_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
planeStackIndex = planeStackIndex_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 DisplaySurfaceCreateInfoKHR & setTransform( VULKAN_HPP_NAMESPACE::SurfaceTransformFlagBitsKHR transform_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
transform = transform_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 DisplaySurfaceCreateInfoKHR & setGlobalAlpha( float globalAlpha_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
globalAlpha = globalAlpha_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 DisplaySurfaceCreateInfoKHR & setAlphaMode( VULKAN_HPP_NAMESPACE::DisplayPlaneAlphaFlagBitsKHR alphaMode_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
alphaMode = alphaMode_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 DisplaySurfaceCreateInfoKHR & setImageExtent( VULKAN_HPP_NAMESPACE::Extent2D const & imageExtent_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
imageExtent = imageExtent_;
|
|
return *this;
|
|
}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
|
|
|
|
operator VkDisplaySurfaceCreateInfoKHR const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkDisplaySurfaceCreateInfoKHR *>( this );
|
|
}
|
|
|
|
operator VkDisplaySurfaceCreateInfoKHR &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkDisplaySurfaceCreateInfoKHR *>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
# if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
# else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &,
|
|
const void * const &,
|
|
VULKAN_HPP_NAMESPACE::DisplaySurfaceCreateFlagsKHR const &,
|
|
VULKAN_HPP_NAMESPACE::DisplayModeKHR const &,
|
|
uint32_t const &,
|
|
uint32_t const &,
|
|
VULKAN_HPP_NAMESPACE::SurfaceTransformFlagBitsKHR const &,
|
|
float const &,
|
|
VULKAN_HPP_NAMESPACE::DisplayPlaneAlphaFlagBitsKHR const &,
|
|
VULKAN_HPP_NAMESPACE::Extent2D const &>
|
|
# endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( sType, pNext, flags, displayMode, planeIndex, planeStackIndex, transform, globalAlpha, alphaMode, imageExtent );
|
|
}
|
|
#endif
|
|
|
|
#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
|
|
auto operator<=>( DisplaySurfaceCreateInfoKHR const & ) const = default;
|
|
#else
|
|
bool operator==( DisplaySurfaceCreateInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
# if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
# else
|
|
return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && ( displayMode == rhs.displayMode ) &&
|
|
( planeIndex == rhs.planeIndex ) && ( planeStackIndex == rhs.planeStackIndex ) && ( transform == rhs.transform ) &&
|
|
( globalAlpha == rhs.globalAlpha ) && ( alphaMode == rhs.alphaMode ) && ( imageExtent == rhs.imageExtent );
|
|
# endif
|
|
}
|
|
|
|
bool operator!=( DisplaySurfaceCreateInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDisplaySurfaceCreateInfoKHR;
|
|
const void * pNext = {};
|
|
VULKAN_HPP_NAMESPACE::DisplaySurfaceCreateFlagsKHR flags = {};
|
|
VULKAN_HPP_NAMESPACE::DisplayModeKHR displayMode = {};
|
|
uint32_t planeIndex = {};
|
|
uint32_t planeStackIndex = {};
|
|
VULKAN_HPP_NAMESPACE::SurfaceTransformFlagBitsKHR transform = VULKAN_HPP_NAMESPACE::SurfaceTransformFlagBitsKHR::eIdentity;
|
|
float globalAlpha = {};
|
|
VULKAN_HPP_NAMESPACE::DisplayPlaneAlphaFlagBitsKHR alphaMode = VULKAN_HPP_NAMESPACE::DisplayPlaneAlphaFlagBitsKHR::eOpaque;
|
|
VULKAN_HPP_NAMESPACE::Extent2D imageExtent = {};
|
|
};
|
|
|
|
template <>
|
|
struct CppType<StructureType, StructureType::eDisplaySurfaceCreateInfoKHR>
|
|
{
|
|
using Type = DisplaySurfaceCreateInfoKHR;
|
|
};
|
|
|
|
struct DrawIndexedIndirectCommand
|
|
{
|
|
using NativeType = VkDrawIndexedIndirectCommand;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR DrawIndexedIndirectCommand( uint32_t indexCount_ = {},
|
|
uint32_t instanceCount_ = {},
|
|
uint32_t firstIndex_ = {},
|
|
int32_t vertexOffset_ = {},
|
|
uint32_t firstInstance_ = {} ) VULKAN_HPP_NOEXCEPT
|
|
: indexCount( indexCount_ )
|
|
, instanceCount( instanceCount_ )
|
|
, firstIndex( firstIndex_ )
|
|
, vertexOffset( vertexOffset_ )
|
|
, firstInstance( firstInstance_ )
|
|
{
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR DrawIndexedIndirectCommand( DrawIndexedIndirectCommand const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
DrawIndexedIndirectCommand( VkDrawIndexedIndirectCommand const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: DrawIndexedIndirectCommand( *reinterpret_cast<DrawIndexedIndirectCommand const *>( &rhs ) )
|
|
{
|
|
}
|
|
|
|
DrawIndexedIndirectCommand & operator=( DrawIndexedIndirectCommand const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
DrawIndexedIndirectCommand & operator=( VkDrawIndexedIndirectCommand const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::DrawIndexedIndirectCommand const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
|
|
VULKAN_HPP_CONSTEXPR_14 DrawIndexedIndirectCommand & setIndexCount( uint32_t indexCount_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
indexCount = indexCount_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 DrawIndexedIndirectCommand & setInstanceCount( uint32_t instanceCount_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
instanceCount = instanceCount_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 DrawIndexedIndirectCommand & setFirstIndex( uint32_t firstIndex_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
firstIndex = firstIndex_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 DrawIndexedIndirectCommand & setVertexOffset( int32_t vertexOffset_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
vertexOffset = vertexOffset_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 DrawIndexedIndirectCommand & setFirstInstance( uint32_t firstInstance_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
firstInstance = firstInstance_;
|
|
return *this;
|
|
}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
|
|
|
|
operator VkDrawIndexedIndirectCommand const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkDrawIndexedIndirectCommand *>( this );
|
|
}
|
|
|
|
operator VkDrawIndexedIndirectCommand &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkDrawIndexedIndirectCommand *>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
# if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
# else
|
|
std::tuple<uint32_t const &, uint32_t const &, uint32_t const &, int32_t const &, uint32_t const &>
|
|
# endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( indexCount, instanceCount, firstIndex, vertexOffset, firstInstance );
|
|
}
|
|
#endif
|
|
|
|
#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
|
|
auto operator<=>( DrawIndexedIndirectCommand const & ) const = default;
|
|
#else
|
|
bool operator==( DrawIndexedIndirectCommand const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
# if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
# else
|
|
return ( indexCount == rhs.indexCount ) && ( instanceCount == rhs.instanceCount ) && ( firstIndex == rhs.firstIndex ) &&
|
|
( vertexOffset == rhs.vertexOffset ) && ( firstInstance == rhs.firstInstance );
|
|
# endif
|
|
}
|
|
|
|
bool operator!=( DrawIndexedIndirectCommand const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
uint32_t indexCount = {};
|
|
uint32_t instanceCount = {};
|
|
uint32_t firstIndex = {};
|
|
int32_t vertexOffset = {};
|
|
uint32_t firstInstance = {};
|
|
};
|
|
|
|
struct DrawIndirectCommand
|
|
{
|
|
using NativeType = VkDrawIndirectCommand;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR DrawIndirectCommand( uint32_t vertexCount_ = {},
|
|
uint32_t instanceCount_ = {},
|
|
uint32_t firstVertex_ = {},
|
|
uint32_t firstInstance_ = {} ) VULKAN_HPP_NOEXCEPT
|
|
: vertexCount( vertexCount_ )
|
|
, instanceCount( instanceCount_ )
|
|
, firstVertex( firstVertex_ )
|
|
, firstInstance( firstInstance_ )
|
|
{
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR DrawIndirectCommand( DrawIndirectCommand const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
DrawIndirectCommand( VkDrawIndirectCommand const & rhs ) VULKAN_HPP_NOEXCEPT : DrawIndirectCommand( *reinterpret_cast<DrawIndirectCommand const *>( &rhs ) )
|
|
{
|
|
}
|
|
|
|
DrawIndirectCommand & operator=( DrawIndirectCommand const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
DrawIndirectCommand & operator=( VkDrawIndirectCommand const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::DrawIndirectCommand const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
|
|
VULKAN_HPP_CONSTEXPR_14 DrawIndirectCommand & setVertexCount( uint32_t vertexCount_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
vertexCount = vertexCount_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 DrawIndirectCommand & setInstanceCount( uint32_t instanceCount_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
instanceCount = instanceCount_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 DrawIndirectCommand & setFirstVertex( uint32_t firstVertex_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
firstVertex = firstVertex_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 DrawIndirectCommand & setFirstInstance( uint32_t firstInstance_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
firstInstance = firstInstance_;
|
|
return *this;
|
|
}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
|
|
|
|
operator VkDrawIndirectCommand const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkDrawIndirectCommand *>( this );
|
|
}
|
|
|
|
operator VkDrawIndirectCommand &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkDrawIndirectCommand *>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
# if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
# else
|
|
std::tuple<uint32_t const &, uint32_t const &, uint32_t const &, uint32_t const &>
|
|
# endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( vertexCount, instanceCount, firstVertex, firstInstance );
|
|
}
|
|
#endif
|
|
|
|
#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
|
|
auto operator<=>( DrawIndirectCommand const & ) const = default;
|
|
#else
|
|
bool operator==( DrawIndirectCommand const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
# if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
# else
|
|
return ( vertexCount == rhs.vertexCount ) && ( instanceCount == rhs.instanceCount ) && ( firstVertex == rhs.firstVertex ) &&
|
|
( firstInstance == rhs.firstInstance );
|
|
# endif
|
|
}
|
|
|
|
bool operator!=( DrawIndirectCommand const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
uint32_t vertexCount = {};
|
|
uint32_t instanceCount = {};
|
|
uint32_t firstVertex = {};
|
|
uint32_t firstInstance = {};
|
|
};
|
|
|
|
struct DrmFormatModifierProperties2EXT
|
|
{
|
|
using NativeType = VkDrmFormatModifierProperties2EXT;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR DrmFormatModifierProperties2EXT( uint64_t drmFormatModifier_ = {},
|
|
uint32_t drmFormatModifierPlaneCount_ = {},
|
|
VULKAN_HPP_NAMESPACE::FormatFeatureFlags2 drmFormatModifierTilingFeatures_ = {} ) VULKAN_HPP_NOEXCEPT
|
|
: drmFormatModifier( drmFormatModifier_ )
|
|
, drmFormatModifierPlaneCount( drmFormatModifierPlaneCount_ )
|
|
, drmFormatModifierTilingFeatures( drmFormatModifierTilingFeatures_ )
|
|
{
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR DrmFormatModifierProperties2EXT( DrmFormatModifierProperties2EXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
DrmFormatModifierProperties2EXT( VkDrmFormatModifierProperties2EXT const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: DrmFormatModifierProperties2EXT( *reinterpret_cast<DrmFormatModifierProperties2EXT const *>( &rhs ) )
|
|
{
|
|
}
|
|
|
|
DrmFormatModifierProperties2EXT & operator=( DrmFormatModifierProperties2EXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
DrmFormatModifierProperties2EXT & operator=( VkDrmFormatModifierProperties2EXT const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::DrmFormatModifierProperties2EXT const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
operator VkDrmFormatModifierProperties2EXT const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkDrmFormatModifierProperties2EXT *>( this );
|
|
}
|
|
|
|
operator VkDrmFormatModifierProperties2EXT &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkDrmFormatModifierProperties2EXT *>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
# if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
# else
|
|
std::tuple<uint64_t const &, uint32_t const &, VULKAN_HPP_NAMESPACE::FormatFeatureFlags2 const &>
|
|
# endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( drmFormatModifier, drmFormatModifierPlaneCount, drmFormatModifierTilingFeatures );
|
|
}
|
|
#endif
|
|
|
|
#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
|
|
auto operator<=>( DrmFormatModifierProperties2EXT const & ) const = default;
|
|
#else
|
|
bool operator==( DrmFormatModifierProperties2EXT const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
# if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
# else
|
|
return ( drmFormatModifier == rhs.drmFormatModifier ) && ( drmFormatModifierPlaneCount == rhs.drmFormatModifierPlaneCount ) &&
|
|
( drmFormatModifierTilingFeatures == rhs.drmFormatModifierTilingFeatures );
|
|
# endif
|
|
}
|
|
|
|
bool operator!=( DrmFormatModifierProperties2EXT const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
uint64_t drmFormatModifier = {};
|
|
uint32_t drmFormatModifierPlaneCount = {};
|
|
VULKAN_HPP_NAMESPACE::FormatFeatureFlags2 drmFormatModifierTilingFeatures = {};
|
|
};
|
|
|
|
struct DrmFormatModifierPropertiesEXT
|
|
{
|
|
using NativeType = VkDrmFormatModifierPropertiesEXT;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR DrmFormatModifierPropertiesEXT( uint64_t drmFormatModifier_ = {},
|
|
uint32_t drmFormatModifierPlaneCount_ = {},
|
|
VULKAN_HPP_NAMESPACE::FormatFeatureFlags drmFormatModifierTilingFeatures_ = {} ) VULKAN_HPP_NOEXCEPT
|
|
: drmFormatModifier( drmFormatModifier_ )
|
|
, drmFormatModifierPlaneCount( drmFormatModifierPlaneCount_ )
|
|
, drmFormatModifierTilingFeatures( drmFormatModifierTilingFeatures_ )
|
|
{
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR DrmFormatModifierPropertiesEXT( DrmFormatModifierPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
DrmFormatModifierPropertiesEXT( VkDrmFormatModifierPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: DrmFormatModifierPropertiesEXT( *reinterpret_cast<DrmFormatModifierPropertiesEXT const *>( &rhs ) )
|
|
{
|
|
}
|
|
|
|
DrmFormatModifierPropertiesEXT & operator=( DrmFormatModifierPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
DrmFormatModifierPropertiesEXT & operator=( VkDrmFormatModifierPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::DrmFormatModifierPropertiesEXT const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
operator VkDrmFormatModifierPropertiesEXT const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkDrmFormatModifierPropertiesEXT *>( this );
|
|
}
|
|
|
|
operator VkDrmFormatModifierPropertiesEXT &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkDrmFormatModifierPropertiesEXT *>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
# if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
# else
|
|
std::tuple<uint64_t const &, uint32_t const &, VULKAN_HPP_NAMESPACE::FormatFeatureFlags const &>
|
|
# endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( drmFormatModifier, drmFormatModifierPlaneCount, drmFormatModifierTilingFeatures );
|
|
}
|
|
#endif
|
|
|
|
#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
|
|
auto operator<=>( DrmFormatModifierPropertiesEXT const & ) const = default;
|
|
#else
|
|
bool operator==( DrmFormatModifierPropertiesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
# if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
# else
|
|
return ( drmFormatModifier == rhs.drmFormatModifier ) && ( drmFormatModifierPlaneCount == rhs.drmFormatModifierPlaneCount ) &&
|
|
( drmFormatModifierTilingFeatures == rhs.drmFormatModifierTilingFeatures );
|
|
# endif
|
|
}
|
|
|
|
bool operator!=( DrmFormatModifierPropertiesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
uint64_t drmFormatModifier = {};
|
|
uint32_t drmFormatModifierPlaneCount = {};
|
|
VULKAN_HPP_NAMESPACE::FormatFeatureFlags drmFormatModifierTilingFeatures = {};
|
|
};
|
|
|
|
struct DrmFormatModifierPropertiesList2EXT
|
|
{
|
|
using NativeType = VkDrmFormatModifierPropertiesList2EXT;
|
|
|
|
static const bool allowDuplicate = false;
|
|
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDrmFormatModifierPropertiesList2EXT;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR DrmFormatModifierPropertiesList2EXT( uint32_t drmFormatModifierCount_ = {},
|
|
VULKAN_HPP_NAMESPACE::DrmFormatModifierProperties2EXT * pDrmFormatModifierProperties_ = {},
|
|
void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
|
|
: pNext( pNext_ )
|
|
, drmFormatModifierCount( drmFormatModifierCount_ )
|
|
, pDrmFormatModifierProperties( pDrmFormatModifierProperties_ )
|
|
{
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR DrmFormatModifierPropertiesList2EXT( DrmFormatModifierPropertiesList2EXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
DrmFormatModifierPropertiesList2EXT( VkDrmFormatModifierPropertiesList2EXT const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: DrmFormatModifierPropertiesList2EXT( *reinterpret_cast<DrmFormatModifierPropertiesList2EXT const *>( &rhs ) )
|
|
{
|
|
}
|
|
|
|
# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
|
|
DrmFormatModifierPropertiesList2EXT(
|
|
VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<VULKAN_HPP_NAMESPACE::DrmFormatModifierProperties2EXT> const & drmFormatModifierProperties_,
|
|
void * pNext_ = nullptr )
|
|
: pNext( pNext_ )
|
|
, drmFormatModifierCount( static_cast<uint32_t>( drmFormatModifierProperties_.size() ) )
|
|
, pDrmFormatModifierProperties( drmFormatModifierProperties_.data() )
|
|
{
|
|
}
|
|
# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
|
|
|
|
DrmFormatModifierPropertiesList2EXT & operator=( DrmFormatModifierPropertiesList2EXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
DrmFormatModifierPropertiesList2EXT & operator=( VkDrmFormatModifierPropertiesList2EXT const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::DrmFormatModifierPropertiesList2EXT const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
operator VkDrmFormatModifierPropertiesList2EXT const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkDrmFormatModifierPropertiesList2EXT *>( this );
|
|
}
|
|
|
|
operator VkDrmFormatModifierPropertiesList2EXT &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkDrmFormatModifierPropertiesList2EXT *>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
# if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
# else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, uint32_t const &, VULKAN_HPP_NAMESPACE::DrmFormatModifierProperties2EXT * const &>
|
|
# endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( sType, pNext, drmFormatModifierCount, pDrmFormatModifierProperties );
|
|
}
|
|
#endif
|
|
|
|
#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
|
|
auto operator<=>( DrmFormatModifierPropertiesList2EXT const & ) const = default;
|
|
#else
|
|
bool operator==( DrmFormatModifierPropertiesList2EXT const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
# if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
# else
|
|
return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( drmFormatModifierCount == rhs.drmFormatModifierCount ) &&
|
|
( pDrmFormatModifierProperties == rhs.pDrmFormatModifierProperties );
|
|
# endif
|
|
}
|
|
|
|
bool operator!=( DrmFormatModifierPropertiesList2EXT const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDrmFormatModifierPropertiesList2EXT;
|
|
void * pNext = {};
|
|
uint32_t drmFormatModifierCount = {};
|
|
VULKAN_HPP_NAMESPACE::DrmFormatModifierProperties2EXT * pDrmFormatModifierProperties = {};
|
|
};
|
|
|
|
template <>
|
|
struct CppType<StructureType, StructureType::eDrmFormatModifierPropertiesList2EXT>
|
|
{
|
|
using Type = DrmFormatModifierPropertiesList2EXT;
|
|
};
|
|
|
|
struct DrmFormatModifierPropertiesListEXT
|
|
{
|
|
using NativeType = VkDrmFormatModifierPropertiesListEXT;
|
|
|
|
static const bool allowDuplicate = false;
|
|
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDrmFormatModifierPropertiesListEXT;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR DrmFormatModifierPropertiesListEXT( uint32_t drmFormatModifierCount_ = {},
|
|
VULKAN_HPP_NAMESPACE::DrmFormatModifierPropertiesEXT * pDrmFormatModifierProperties_ = {},
|
|
void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
|
|
: pNext( pNext_ )
|
|
, drmFormatModifierCount( drmFormatModifierCount_ )
|
|
, pDrmFormatModifierProperties( pDrmFormatModifierProperties_ )
|
|
{
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR DrmFormatModifierPropertiesListEXT( DrmFormatModifierPropertiesListEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
DrmFormatModifierPropertiesListEXT( VkDrmFormatModifierPropertiesListEXT const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: DrmFormatModifierPropertiesListEXT( *reinterpret_cast<DrmFormatModifierPropertiesListEXT const *>( &rhs ) )
|
|
{
|
|
}
|
|
|
|
# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
|
|
DrmFormatModifierPropertiesListEXT(
|
|
VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<VULKAN_HPP_NAMESPACE::DrmFormatModifierPropertiesEXT> const & drmFormatModifierProperties_,
|
|
void * pNext_ = nullptr )
|
|
: pNext( pNext_ )
|
|
, drmFormatModifierCount( static_cast<uint32_t>( drmFormatModifierProperties_.size() ) )
|
|
, pDrmFormatModifierProperties( drmFormatModifierProperties_.data() )
|
|
{
|
|
}
|
|
# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
|
|
|
|
DrmFormatModifierPropertiesListEXT & operator=( DrmFormatModifierPropertiesListEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
DrmFormatModifierPropertiesListEXT & operator=( VkDrmFormatModifierPropertiesListEXT const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::DrmFormatModifierPropertiesListEXT const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
operator VkDrmFormatModifierPropertiesListEXT const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkDrmFormatModifierPropertiesListEXT *>( this );
|
|
}
|
|
|
|
operator VkDrmFormatModifierPropertiesListEXT &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkDrmFormatModifierPropertiesListEXT *>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
# if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
# else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, uint32_t const &, VULKAN_HPP_NAMESPACE::DrmFormatModifierPropertiesEXT * const &>
|
|
# endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( sType, pNext, drmFormatModifierCount, pDrmFormatModifierProperties );
|
|
}
|
|
#endif
|
|
|
|
#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
|
|
auto operator<=>( DrmFormatModifierPropertiesListEXT const & ) const = default;
|
|
#else
|
|
bool operator==( DrmFormatModifierPropertiesListEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
# if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
# else
|
|
return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( drmFormatModifierCount == rhs.drmFormatModifierCount ) &&
|
|
( pDrmFormatModifierProperties == rhs.pDrmFormatModifierProperties );
|
|
# endif
|
|
}
|
|
|
|
bool operator!=( DrmFormatModifierPropertiesListEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDrmFormatModifierPropertiesListEXT;
|
|
void * pNext = {};
|
|
uint32_t drmFormatModifierCount = {};
|
|
VULKAN_HPP_NAMESPACE::DrmFormatModifierPropertiesEXT * pDrmFormatModifierProperties = {};
|
|
};
|
|
|
|
template <>
|
|
struct CppType<StructureType, StructureType::eDrmFormatModifierPropertiesListEXT>
|
|
{
|
|
using Type = DrmFormatModifierPropertiesListEXT;
|
|
};
|
|
|
|
struct EventCreateInfo
|
|
{
|
|
using NativeType = VkEventCreateInfo;
|
|
|
|
static const bool allowDuplicate = false;
|
|
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eEventCreateInfo;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR EventCreateInfo( VULKAN_HPP_NAMESPACE::EventCreateFlags flags_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
|
|
: pNext( pNext_ )
|
|
, flags( flags_ )
|
|
{
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR EventCreateInfo( EventCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
EventCreateInfo( VkEventCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT : EventCreateInfo( *reinterpret_cast<EventCreateInfo const *>( &rhs ) ) {}
|
|
|
|
EventCreateInfo & operator=( EventCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
EventCreateInfo & operator=( VkEventCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::EventCreateInfo const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
|
|
VULKAN_HPP_CONSTEXPR_14 EventCreateInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pNext = pNext_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 EventCreateInfo & setFlags( VULKAN_HPP_NAMESPACE::EventCreateFlags flags_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
flags = flags_;
|
|
return *this;
|
|
}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
|
|
|
|
operator VkEventCreateInfo const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkEventCreateInfo *>( this );
|
|
}
|
|
|
|
operator VkEventCreateInfo &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkEventCreateInfo *>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
# if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
# else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::EventCreateFlags const &>
|
|
# endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( sType, pNext, flags );
|
|
}
|
|
#endif
|
|
|
|
#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
|
|
auto operator<=>( EventCreateInfo const & ) const = default;
|
|
#else
|
|
bool operator==( EventCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
# if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
# else
|
|
return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags );
|
|
# endif
|
|
}
|
|
|
|
bool operator!=( EventCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eEventCreateInfo;
|
|
const void * pNext = {};
|
|
VULKAN_HPP_NAMESPACE::EventCreateFlags flags = {};
|
|
};
|
|
|
|
template <>
|
|
struct CppType<StructureType, StructureType::eEventCreateInfo>
|
|
{
|
|
using Type = EventCreateInfo;
|
|
};
|
|
|
|
struct ExportFenceCreateInfo
|
|
{
|
|
using NativeType = VkExportFenceCreateInfo;
|
|
|
|
static const bool allowDuplicate = false;
|
|
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eExportFenceCreateInfo;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR ExportFenceCreateInfo( VULKAN_HPP_NAMESPACE::ExternalFenceHandleTypeFlags handleTypes_ = {},
|
|
const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
|
|
: pNext( pNext_ )
|
|
, handleTypes( handleTypes_ )
|
|
{
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR ExportFenceCreateInfo( ExportFenceCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
ExportFenceCreateInfo( VkExportFenceCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: ExportFenceCreateInfo( *reinterpret_cast<ExportFenceCreateInfo const *>( &rhs ) )
|
|
{
|
|
}
|
|
|
|
ExportFenceCreateInfo & operator=( ExportFenceCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
ExportFenceCreateInfo & operator=( VkExportFenceCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ExportFenceCreateInfo const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
|
|
VULKAN_HPP_CONSTEXPR_14 ExportFenceCreateInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pNext = pNext_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 ExportFenceCreateInfo & setHandleTypes( VULKAN_HPP_NAMESPACE::ExternalFenceHandleTypeFlags handleTypes_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
handleTypes = handleTypes_;
|
|
return *this;
|
|
}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
|
|
|
|
operator VkExportFenceCreateInfo const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkExportFenceCreateInfo *>( this );
|
|
}
|
|
|
|
operator VkExportFenceCreateInfo &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkExportFenceCreateInfo *>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
# if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
# else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::ExternalFenceHandleTypeFlags const &>
|
|
# endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( sType, pNext, handleTypes );
|
|
}
|
|
#endif
|
|
|
|
#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
|
|
auto operator<=>( ExportFenceCreateInfo const & ) const = default;
|
|
#else
|
|
bool operator==( ExportFenceCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
# if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
# else
|
|
return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( handleTypes == rhs.handleTypes );
|
|
# endif
|
|
}
|
|
|
|
bool operator!=( ExportFenceCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eExportFenceCreateInfo;
|
|
const void * pNext = {};
|
|
VULKAN_HPP_NAMESPACE::ExternalFenceHandleTypeFlags handleTypes = {};
|
|
};
|
|
|
|
template <>
|
|
struct CppType<StructureType, StructureType::eExportFenceCreateInfo>
|
|
{
|
|
using Type = ExportFenceCreateInfo;
|
|
};
|
|
|
|
using ExportFenceCreateInfoKHR = ExportFenceCreateInfo;
|
|
|
|
#if defined( VK_USE_PLATFORM_SCI )
|
|
struct ExportFenceSciSyncInfoNV
|
|
{
|
|
using NativeType = VkExportFenceSciSyncInfoNV;
|
|
|
|
static const bool allowDuplicate = false;
|
|
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eExportFenceSciSyncInfoNV;
|
|
|
|
# if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR ExportFenceSciSyncInfoNV( NvSciSyncAttrList pAttributes_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
|
|
: pNext( pNext_ )
|
|
, pAttributes( pAttributes_ )
|
|
{
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR ExportFenceSciSyncInfoNV( ExportFenceSciSyncInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
ExportFenceSciSyncInfoNV( VkExportFenceSciSyncInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: ExportFenceSciSyncInfoNV( *reinterpret_cast<ExportFenceSciSyncInfoNV const *>( &rhs ) )
|
|
{
|
|
}
|
|
|
|
ExportFenceSciSyncInfoNV & operator=( ExportFenceSciSyncInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
# endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
ExportFenceSciSyncInfoNV & operator=( VkExportFenceSciSyncInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ExportFenceSciSyncInfoNV const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
# if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
|
|
VULKAN_HPP_CONSTEXPR_14 ExportFenceSciSyncInfoNV & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pNext = pNext_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 ExportFenceSciSyncInfoNV & setPAttributes( NvSciSyncAttrList pAttributes_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pAttributes = pAttributes_;
|
|
return *this;
|
|
}
|
|
# endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
|
|
|
|
operator VkExportFenceSciSyncInfoNV const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkExportFenceSciSyncInfoNV *>( this );
|
|
}
|
|
|
|
operator VkExportFenceSciSyncInfoNV &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkExportFenceSciSyncInfoNV *>( this );
|
|
}
|
|
|
|
# if defined( VULKAN_HPP_USE_REFLECT )
|
|
# if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
# else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, NvSciSyncAttrList const &>
|
|
# endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( sType, pNext, pAttributes );
|
|
}
|
|
# endif
|
|
|
|
# if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
|
|
std::strong_ordering operator<=>( ExportFenceSciSyncInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
if ( auto cmp = sType <=> rhs.sType; cmp != 0 )
|
|
return cmp;
|
|
if ( auto cmp = pNext <=> rhs.pNext; cmp != 0 )
|
|
return cmp;
|
|
if ( auto cmp = memcmp( &pAttributes, &rhs.pAttributes, sizeof( NvSciSyncAttrList ) ); cmp != 0 )
|
|
return ( cmp < 0 ) ? std::strong_ordering::less : std::strong_ordering::greater;
|
|
|
|
return std::strong_ordering::equivalent;
|
|
}
|
|
# endif
|
|
|
|
bool operator==( ExportFenceSciSyncInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( memcmp( &pAttributes, &rhs.pAttributes, sizeof( NvSciSyncAttrList ) ) == 0 );
|
|
}
|
|
|
|
bool operator!=( ExportFenceSciSyncInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eExportFenceSciSyncInfoNV;
|
|
const void * pNext = {};
|
|
NvSciSyncAttrList pAttributes = {};
|
|
};
|
|
|
|
template <>
|
|
struct CppType<StructureType, StructureType::eExportFenceSciSyncInfoNV>
|
|
{
|
|
using Type = ExportFenceSciSyncInfoNV;
|
|
};
|
|
#endif /*VK_USE_PLATFORM_SCI*/
|
|
|
|
struct ExportMemoryAllocateInfo
|
|
{
|
|
using NativeType = VkExportMemoryAllocateInfo;
|
|
|
|
static const bool allowDuplicate = false;
|
|
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eExportMemoryAllocateInfo;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR ExportMemoryAllocateInfo( VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlags handleTypes_ = {},
|
|
const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
|
|
: pNext( pNext_ )
|
|
, handleTypes( handleTypes_ )
|
|
{
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR ExportMemoryAllocateInfo( ExportMemoryAllocateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
ExportMemoryAllocateInfo( VkExportMemoryAllocateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: ExportMemoryAllocateInfo( *reinterpret_cast<ExportMemoryAllocateInfo const *>( &rhs ) )
|
|
{
|
|
}
|
|
|
|
ExportMemoryAllocateInfo & operator=( ExportMemoryAllocateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
ExportMemoryAllocateInfo & operator=( VkExportMemoryAllocateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ExportMemoryAllocateInfo const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
|
|
VULKAN_HPP_CONSTEXPR_14 ExportMemoryAllocateInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pNext = pNext_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 ExportMemoryAllocateInfo & setHandleTypes( VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlags handleTypes_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
handleTypes = handleTypes_;
|
|
return *this;
|
|
}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
|
|
|
|
operator VkExportMemoryAllocateInfo const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkExportMemoryAllocateInfo *>( this );
|
|
}
|
|
|
|
operator VkExportMemoryAllocateInfo &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkExportMemoryAllocateInfo *>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
# if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
# else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlags const &>
|
|
# endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( sType, pNext, handleTypes );
|
|
}
|
|
#endif
|
|
|
|
#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
|
|
auto operator<=>( ExportMemoryAllocateInfo const & ) const = default;
|
|
#else
|
|
bool operator==( ExportMemoryAllocateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
# if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
# else
|
|
return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( handleTypes == rhs.handleTypes );
|
|
# endif
|
|
}
|
|
|
|
bool operator!=( ExportMemoryAllocateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eExportMemoryAllocateInfo;
|
|
const void * pNext = {};
|
|
VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlags handleTypes = {};
|
|
};
|
|
|
|
template <>
|
|
struct CppType<StructureType, StructureType::eExportMemoryAllocateInfo>
|
|
{
|
|
using Type = ExportMemoryAllocateInfo;
|
|
};
|
|
|
|
using ExportMemoryAllocateInfoKHR = ExportMemoryAllocateInfo;
|
|
|
|
#if defined( VK_USE_PLATFORM_SCI )
|
|
struct ExportMemorySciBufInfoNV
|
|
{
|
|
using NativeType = VkExportMemorySciBufInfoNV;
|
|
|
|
static const bool allowDuplicate = false;
|
|
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eExportMemorySciBufInfoNV;
|
|
|
|
# if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR ExportMemorySciBufInfoNV( NvSciBufAttrList pAttributes_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
|
|
: pNext( pNext_ )
|
|
, pAttributes( pAttributes_ )
|
|
{
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR ExportMemorySciBufInfoNV( ExportMemorySciBufInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
ExportMemorySciBufInfoNV( VkExportMemorySciBufInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: ExportMemorySciBufInfoNV( *reinterpret_cast<ExportMemorySciBufInfoNV const *>( &rhs ) )
|
|
{
|
|
}
|
|
|
|
ExportMemorySciBufInfoNV & operator=( ExportMemorySciBufInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
# endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
ExportMemorySciBufInfoNV & operator=( VkExportMemorySciBufInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ExportMemorySciBufInfoNV const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
# if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
|
|
VULKAN_HPP_CONSTEXPR_14 ExportMemorySciBufInfoNV & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pNext = pNext_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 ExportMemorySciBufInfoNV & setPAttributes( NvSciBufAttrList pAttributes_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pAttributes = pAttributes_;
|
|
return *this;
|
|
}
|
|
# endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
|
|
|
|
operator VkExportMemorySciBufInfoNV const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkExportMemorySciBufInfoNV *>( this );
|
|
}
|
|
|
|
operator VkExportMemorySciBufInfoNV &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkExportMemorySciBufInfoNV *>( this );
|
|
}
|
|
|
|
# if defined( VULKAN_HPP_USE_REFLECT )
|
|
# if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
# else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, NvSciBufAttrList const &>
|
|
# endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( sType, pNext, pAttributes );
|
|
}
|
|
# endif
|
|
|
|
# if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
|
|
std::strong_ordering operator<=>( ExportMemorySciBufInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
if ( auto cmp = sType <=> rhs.sType; cmp != 0 )
|
|
return cmp;
|
|
if ( auto cmp = pNext <=> rhs.pNext; cmp != 0 )
|
|
return cmp;
|
|
if ( auto cmp = memcmp( &pAttributes, &rhs.pAttributes, sizeof( NvSciBufAttrList ) ); cmp != 0 )
|
|
return ( cmp < 0 ) ? std::strong_ordering::less : std::strong_ordering::greater;
|
|
|
|
return std::strong_ordering::equivalent;
|
|
}
|
|
# endif
|
|
|
|
bool operator==( ExportMemorySciBufInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( memcmp( &pAttributes, &rhs.pAttributes, sizeof( NvSciBufAttrList ) ) == 0 );
|
|
}
|
|
|
|
bool operator!=( ExportMemorySciBufInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eExportMemorySciBufInfoNV;
|
|
const void * pNext = {};
|
|
NvSciBufAttrList pAttributes = {};
|
|
};
|
|
|
|
template <>
|
|
struct CppType<StructureType, StructureType::eExportMemorySciBufInfoNV>
|
|
{
|
|
using Type = ExportMemorySciBufInfoNV;
|
|
};
|
|
#endif /*VK_USE_PLATFORM_SCI*/
|
|
|
|
struct ExportSemaphoreCreateInfo
|
|
{
|
|
using NativeType = VkExportSemaphoreCreateInfo;
|
|
|
|
static const bool allowDuplicate = false;
|
|
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eExportSemaphoreCreateInfo;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR ExportSemaphoreCreateInfo( VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlags handleTypes_ = {},
|
|
const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
|
|
: pNext( pNext_ )
|
|
, handleTypes( handleTypes_ )
|
|
{
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR ExportSemaphoreCreateInfo( ExportSemaphoreCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
ExportSemaphoreCreateInfo( VkExportSemaphoreCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: ExportSemaphoreCreateInfo( *reinterpret_cast<ExportSemaphoreCreateInfo const *>( &rhs ) )
|
|
{
|
|
}
|
|
|
|
ExportSemaphoreCreateInfo & operator=( ExportSemaphoreCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
ExportSemaphoreCreateInfo & operator=( VkExportSemaphoreCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ExportSemaphoreCreateInfo const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
|
|
VULKAN_HPP_CONSTEXPR_14 ExportSemaphoreCreateInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pNext = pNext_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 ExportSemaphoreCreateInfo &
|
|
setHandleTypes( VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlags handleTypes_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
handleTypes = handleTypes_;
|
|
return *this;
|
|
}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
|
|
|
|
operator VkExportSemaphoreCreateInfo const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkExportSemaphoreCreateInfo *>( this );
|
|
}
|
|
|
|
operator VkExportSemaphoreCreateInfo &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkExportSemaphoreCreateInfo *>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
# if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
# else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlags const &>
|
|
# endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( sType, pNext, handleTypes );
|
|
}
|
|
#endif
|
|
|
|
#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
|
|
auto operator<=>( ExportSemaphoreCreateInfo const & ) const = default;
|
|
#else
|
|
bool operator==( ExportSemaphoreCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
# if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
# else
|
|
return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( handleTypes == rhs.handleTypes );
|
|
# endif
|
|
}
|
|
|
|
bool operator!=( ExportSemaphoreCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eExportSemaphoreCreateInfo;
|
|
const void * pNext = {};
|
|
VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlags handleTypes = {};
|
|
};
|
|
|
|
template <>
|
|
struct CppType<StructureType, StructureType::eExportSemaphoreCreateInfo>
|
|
{
|
|
using Type = ExportSemaphoreCreateInfo;
|
|
};
|
|
|
|
using ExportSemaphoreCreateInfoKHR = ExportSemaphoreCreateInfo;
|
|
|
|
#if defined( VK_USE_PLATFORM_SCI )
|
|
struct ExportSemaphoreSciSyncInfoNV
|
|
{
|
|
using NativeType = VkExportSemaphoreSciSyncInfoNV;
|
|
|
|
static const bool allowDuplicate = false;
|
|
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eExportSemaphoreSciSyncInfoNV;
|
|
|
|
# if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR ExportSemaphoreSciSyncInfoNV( NvSciSyncAttrList pAttributes_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
|
|
: pNext( pNext_ )
|
|
, pAttributes( pAttributes_ )
|
|
{
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR ExportSemaphoreSciSyncInfoNV( ExportSemaphoreSciSyncInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
ExportSemaphoreSciSyncInfoNV( VkExportSemaphoreSciSyncInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: ExportSemaphoreSciSyncInfoNV( *reinterpret_cast<ExportSemaphoreSciSyncInfoNV const *>( &rhs ) )
|
|
{
|
|
}
|
|
|
|
ExportSemaphoreSciSyncInfoNV & operator=( ExportSemaphoreSciSyncInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
# endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
ExportSemaphoreSciSyncInfoNV & operator=( VkExportSemaphoreSciSyncInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ExportSemaphoreSciSyncInfoNV const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
# if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
|
|
VULKAN_HPP_CONSTEXPR_14 ExportSemaphoreSciSyncInfoNV & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pNext = pNext_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 ExportSemaphoreSciSyncInfoNV & setPAttributes( NvSciSyncAttrList pAttributes_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pAttributes = pAttributes_;
|
|
return *this;
|
|
}
|
|
# endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
|
|
|
|
operator VkExportSemaphoreSciSyncInfoNV const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkExportSemaphoreSciSyncInfoNV *>( this );
|
|
}
|
|
|
|
operator VkExportSemaphoreSciSyncInfoNV &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkExportSemaphoreSciSyncInfoNV *>( this );
|
|
}
|
|
|
|
# if defined( VULKAN_HPP_USE_REFLECT )
|
|
# if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
# else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, NvSciSyncAttrList const &>
|
|
# endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( sType, pNext, pAttributes );
|
|
}
|
|
# endif
|
|
|
|
# if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
|
|
std::strong_ordering operator<=>( ExportSemaphoreSciSyncInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
if ( auto cmp = sType <=> rhs.sType; cmp != 0 )
|
|
return cmp;
|
|
if ( auto cmp = pNext <=> rhs.pNext; cmp != 0 )
|
|
return cmp;
|
|
if ( auto cmp = memcmp( &pAttributes, &rhs.pAttributes, sizeof( NvSciSyncAttrList ) ); cmp != 0 )
|
|
return ( cmp < 0 ) ? std::strong_ordering::less : std::strong_ordering::greater;
|
|
|
|
return std::strong_ordering::equivalent;
|
|
}
|
|
# endif
|
|
|
|
bool operator==( ExportSemaphoreSciSyncInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( memcmp( &pAttributes, &rhs.pAttributes, sizeof( NvSciSyncAttrList ) ) == 0 );
|
|
}
|
|
|
|
bool operator!=( ExportSemaphoreSciSyncInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eExportSemaphoreSciSyncInfoNV;
|
|
const void * pNext = {};
|
|
NvSciSyncAttrList pAttributes = {};
|
|
};
|
|
|
|
template <>
|
|
struct CppType<StructureType, StructureType::eExportSemaphoreSciSyncInfoNV>
|
|
{
|
|
using Type = ExportSemaphoreSciSyncInfoNV;
|
|
};
|
|
#endif /*VK_USE_PLATFORM_SCI*/
|
|
|
|
struct ExtensionProperties
|
|
{
|
|
using NativeType = VkExtensionProperties;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR_14 ExtensionProperties( std::array<char, VK_MAX_EXTENSION_NAME_SIZE> const & extensionName_ = {},
|
|
uint32_t specVersion_ = {} ) VULKAN_HPP_NOEXCEPT
|
|
: extensionName( extensionName_ )
|
|
, specVersion( specVersion_ )
|
|
{
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 ExtensionProperties( ExtensionProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
ExtensionProperties( VkExtensionProperties const & rhs ) VULKAN_HPP_NOEXCEPT : ExtensionProperties( *reinterpret_cast<ExtensionProperties const *>( &rhs ) )
|
|
{
|
|
}
|
|
|
|
# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
|
|
ExtensionProperties( std::string const & extensionName_, uint32_t specVersion_ = {} ) : specVersion( specVersion_ )
|
|
{
|
|
VULKAN_HPP_ASSERT( extensionName_.size() < VK_MAX_EXTENSION_NAME_SIZE );
|
|
# if defined( WIN32 )
|
|
strncpy_s( extensionName, VK_MAX_EXTENSION_NAME_SIZE, extensionName_.data(), extensionName_.size() );
|
|
# else
|
|
strncpy( extensionName, extensionName_.data(), std::min<size_t>( VK_MAX_EXTENSION_NAME_SIZE, extensionName_.size() ) );
|
|
# endif
|
|
}
|
|
# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
|
|
|
|
ExtensionProperties & operator=( ExtensionProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
ExtensionProperties & operator=( VkExtensionProperties const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ExtensionProperties const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
operator VkExtensionProperties const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkExtensionProperties *>( this );
|
|
}
|
|
|
|
operator VkExtensionProperties &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkExtensionProperties *>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
# if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
# else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::ArrayWrapper1D<char, VK_MAX_EXTENSION_NAME_SIZE> const &, uint32_t const &>
|
|
# endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( extensionName, specVersion );
|
|
}
|
|
#endif
|
|
|
|
#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
|
|
std::strong_ordering operator<=>( ExtensionProperties const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
if ( auto cmp = strcmp( extensionName, rhs.extensionName ); cmp != 0 )
|
|
return ( cmp < 0 ) ? std::strong_ordering::less : std::strong_ordering::greater;
|
|
if ( auto cmp = specVersion <=> rhs.specVersion; cmp != 0 )
|
|
return cmp;
|
|
|
|
return std::strong_ordering::equivalent;
|
|
}
|
|
#endif
|
|
|
|
bool operator==( ExtensionProperties const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return ( strcmp( extensionName, rhs.extensionName ) == 0 ) && ( specVersion == rhs.specVersion );
|
|
}
|
|
|
|
bool operator!=( ExtensionProperties const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::ArrayWrapper1D<char, VK_MAX_EXTENSION_NAME_SIZE> extensionName = {};
|
|
uint32_t specVersion = {};
|
|
};
|
|
|
|
struct ExternalMemoryProperties
|
|
{
|
|
using NativeType = VkExternalMemoryProperties;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR ExternalMemoryProperties( VULKAN_HPP_NAMESPACE::ExternalMemoryFeatureFlags externalMemoryFeatures_ = {},
|
|
VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlags exportFromImportedHandleTypes_ = {},
|
|
VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlags compatibleHandleTypes_ = {} ) VULKAN_HPP_NOEXCEPT
|
|
: externalMemoryFeatures( externalMemoryFeatures_ )
|
|
, exportFromImportedHandleTypes( exportFromImportedHandleTypes_ )
|
|
, compatibleHandleTypes( compatibleHandleTypes_ )
|
|
{
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR ExternalMemoryProperties( ExternalMemoryProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
ExternalMemoryProperties( VkExternalMemoryProperties const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: ExternalMemoryProperties( *reinterpret_cast<ExternalMemoryProperties const *>( &rhs ) )
|
|
{
|
|
}
|
|
|
|
ExternalMemoryProperties & operator=( ExternalMemoryProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
ExternalMemoryProperties & operator=( VkExternalMemoryProperties const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ExternalMemoryProperties const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
operator VkExternalMemoryProperties const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkExternalMemoryProperties *>( this );
|
|
}
|
|
|
|
operator VkExternalMemoryProperties &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkExternalMemoryProperties *>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
# if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
# else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::ExternalMemoryFeatureFlags const &,
|
|
VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlags const &,
|
|
VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlags const &>
|
|
# endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( externalMemoryFeatures, exportFromImportedHandleTypes, compatibleHandleTypes );
|
|
}
|
|
#endif
|
|
|
|
#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
|
|
auto operator<=>( ExternalMemoryProperties const & ) const = default;
|
|
#else
|
|
bool operator==( ExternalMemoryProperties const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
# if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
# else
|
|
return ( externalMemoryFeatures == rhs.externalMemoryFeatures ) && ( exportFromImportedHandleTypes == rhs.exportFromImportedHandleTypes ) &&
|
|
( compatibleHandleTypes == rhs.compatibleHandleTypes );
|
|
# endif
|
|
}
|
|
|
|
bool operator!=( ExternalMemoryProperties const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::ExternalMemoryFeatureFlags externalMemoryFeatures = {};
|
|
VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlags exportFromImportedHandleTypes = {};
|
|
VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlags compatibleHandleTypes = {};
|
|
};
|
|
|
|
using ExternalMemoryPropertiesKHR = ExternalMemoryProperties;
|
|
|
|
struct ExternalBufferProperties
|
|
{
|
|
using NativeType = VkExternalBufferProperties;
|
|
|
|
static const bool allowDuplicate = false;
|
|
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eExternalBufferProperties;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR ExternalBufferProperties( VULKAN_HPP_NAMESPACE::ExternalMemoryProperties externalMemoryProperties_ = {},
|
|
void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
|
|
: pNext( pNext_ )
|
|
, externalMemoryProperties( externalMemoryProperties_ )
|
|
{
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR ExternalBufferProperties( ExternalBufferProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
ExternalBufferProperties( VkExternalBufferProperties const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: ExternalBufferProperties( *reinterpret_cast<ExternalBufferProperties const *>( &rhs ) )
|
|
{
|
|
}
|
|
|
|
ExternalBufferProperties & operator=( ExternalBufferProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
ExternalBufferProperties & operator=( VkExternalBufferProperties const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ExternalBufferProperties const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
operator VkExternalBufferProperties const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkExternalBufferProperties *>( this );
|
|
}
|
|
|
|
operator VkExternalBufferProperties &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkExternalBufferProperties *>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
# if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
# else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::ExternalMemoryProperties const &>
|
|
# endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( sType, pNext, externalMemoryProperties );
|
|
}
|
|
#endif
|
|
|
|
#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
|
|
auto operator<=>( ExternalBufferProperties const & ) const = default;
|
|
#else
|
|
bool operator==( ExternalBufferProperties const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
# if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
# else
|
|
return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( externalMemoryProperties == rhs.externalMemoryProperties );
|
|
# endif
|
|
}
|
|
|
|
bool operator!=( ExternalBufferProperties const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eExternalBufferProperties;
|
|
void * pNext = {};
|
|
VULKAN_HPP_NAMESPACE::ExternalMemoryProperties externalMemoryProperties = {};
|
|
};
|
|
|
|
template <>
|
|
struct CppType<StructureType, StructureType::eExternalBufferProperties>
|
|
{
|
|
using Type = ExternalBufferProperties;
|
|
};
|
|
|
|
using ExternalBufferPropertiesKHR = ExternalBufferProperties;
|
|
|
|
struct ExternalFenceProperties
|
|
{
|
|
using NativeType = VkExternalFenceProperties;
|
|
|
|
static const bool allowDuplicate = false;
|
|
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eExternalFenceProperties;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR ExternalFenceProperties( VULKAN_HPP_NAMESPACE::ExternalFenceHandleTypeFlags exportFromImportedHandleTypes_ = {},
|
|
VULKAN_HPP_NAMESPACE::ExternalFenceHandleTypeFlags compatibleHandleTypes_ = {},
|
|
VULKAN_HPP_NAMESPACE::ExternalFenceFeatureFlags externalFenceFeatures_ = {},
|
|
void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
|
|
: pNext( pNext_ )
|
|
, exportFromImportedHandleTypes( exportFromImportedHandleTypes_ )
|
|
, compatibleHandleTypes( compatibleHandleTypes_ )
|
|
, externalFenceFeatures( externalFenceFeatures_ )
|
|
{
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR ExternalFenceProperties( ExternalFenceProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
ExternalFenceProperties( VkExternalFenceProperties const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: ExternalFenceProperties( *reinterpret_cast<ExternalFenceProperties const *>( &rhs ) )
|
|
{
|
|
}
|
|
|
|
ExternalFenceProperties & operator=( ExternalFenceProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
ExternalFenceProperties & operator=( VkExternalFenceProperties const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ExternalFenceProperties const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
operator VkExternalFenceProperties const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkExternalFenceProperties *>( this );
|
|
}
|
|
|
|
operator VkExternalFenceProperties &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkExternalFenceProperties *>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
# if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
# else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &,
|
|
void * const &,
|
|
VULKAN_HPP_NAMESPACE::ExternalFenceHandleTypeFlags const &,
|
|
VULKAN_HPP_NAMESPACE::ExternalFenceHandleTypeFlags const &,
|
|
VULKAN_HPP_NAMESPACE::ExternalFenceFeatureFlags const &>
|
|
# endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( sType, pNext, exportFromImportedHandleTypes, compatibleHandleTypes, externalFenceFeatures );
|
|
}
|
|
#endif
|
|
|
|
#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
|
|
auto operator<=>( ExternalFenceProperties const & ) const = default;
|
|
#else
|
|
bool operator==( ExternalFenceProperties const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
# if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
# else
|
|
return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( exportFromImportedHandleTypes == rhs.exportFromImportedHandleTypes ) &&
|
|
( compatibleHandleTypes == rhs.compatibleHandleTypes ) && ( externalFenceFeatures == rhs.externalFenceFeatures );
|
|
# endif
|
|
}
|
|
|
|
bool operator!=( ExternalFenceProperties const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eExternalFenceProperties;
|
|
void * pNext = {};
|
|
VULKAN_HPP_NAMESPACE::ExternalFenceHandleTypeFlags exportFromImportedHandleTypes = {};
|
|
VULKAN_HPP_NAMESPACE::ExternalFenceHandleTypeFlags compatibleHandleTypes = {};
|
|
VULKAN_HPP_NAMESPACE::ExternalFenceFeatureFlags externalFenceFeatures = {};
|
|
};
|
|
|
|
template <>
|
|
struct CppType<StructureType, StructureType::eExternalFenceProperties>
|
|
{
|
|
using Type = ExternalFenceProperties;
|
|
};
|
|
|
|
using ExternalFencePropertiesKHR = ExternalFenceProperties;
|
|
|
|
#if defined( VK_USE_PLATFORM_SCREEN_QNX )
|
|
struct ExternalFormatQNX
|
|
{
|
|
using NativeType = VkExternalFormatQNX;
|
|
|
|
static const bool allowDuplicate = false;
|
|
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eExternalFormatQNX;
|
|
|
|
# if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR ExternalFormatQNX( uint64_t externalFormat_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
|
|
: pNext( pNext_ )
|
|
, externalFormat( externalFormat_ )
|
|
{
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR ExternalFormatQNX( ExternalFormatQNX const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
ExternalFormatQNX( VkExternalFormatQNX const & rhs ) VULKAN_HPP_NOEXCEPT : ExternalFormatQNX( *reinterpret_cast<ExternalFormatQNX const *>( &rhs ) ) {}
|
|
|
|
ExternalFormatQNX & operator=( ExternalFormatQNX const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
# endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
ExternalFormatQNX & operator=( VkExternalFormatQNX const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ExternalFormatQNX const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
# if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
|
|
VULKAN_HPP_CONSTEXPR_14 ExternalFormatQNX & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pNext = pNext_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 ExternalFormatQNX & setExternalFormat( uint64_t externalFormat_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
externalFormat = externalFormat_;
|
|
return *this;
|
|
}
|
|
# endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
|
|
|
|
operator VkExternalFormatQNX const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkExternalFormatQNX *>( this );
|
|
}
|
|
|
|
operator VkExternalFormatQNX &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkExternalFormatQNX *>( this );
|
|
}
|
|
|
|
# if defined( VULKAN_HPP_USE_REFLECT )
|
|
# if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
# else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, uint64_t const &>
|
|
# endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( sType, pNext, externalFormat );
|
|
}
|
|
# endif
|
|
|
|
# if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
|
|
auto operator<=>( ExternalFormatQNX const & ) const = default;
|
|
# else
|
|
bool operator==( ExternalFormatQNX const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
# if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
# else
|
|
return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( externalFormat == rhs.externalFormat );
|
|
# endif
|
|
}
|
|
|
|
bool operator!=( ExternalFormatQNX const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
# endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eExternalFormatQNX;
|
|
void * pNext = {};
|
|
uint64_t externalFormat = {};
|
|
};
|
|
|
|
template <>
|
|
struct CppType<StructureType, StructureType::eExternalFormatQNX>
|
|
{
|
|
using Type = ExternalFormatQNX;
|
|
};
|
|
#endif /*VK_USE_PLATFORM_SCREEN_QNX*/
|
|
|
|
struct ExternalImageFormatProperties
|
|
{
|
|
using NativeType = VkExternalImageFormatProperties;
|
|
|
|
static const bool allowDuplicate = false;
|
|
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eExternalImageFormatProperties;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR ExternalImageFormatProperties( VULKAN_HPP_NAMESPACE::ExternalMemoryProperties externalMemoryProperties_ = {},
|
|
void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
|
|
: pNext( pNext_ )
|
|
, externalMemoryProperties( externalMemoryProperties_ )
|
|
{
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR ExternalImageFormatProperties( ExternalImageFormatProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
ExternalImageFormatProperties( VkExternalImageFormatProperties const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: ExternalImageFormatProperties( *reinterpret_cast<ExternalImageFormatProperties const *>( &rhs ) )
|
|
{
|
|
}
|
|
|
|
ExternalImageFormatProperties & operator=( ExternalImageFormatProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
ExternalImageFormatProperties & operator=( VkExternalImageFormatProperties const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ExternalImageFormatProperties const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
operator VkExternalImageFormatProperties const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkExternalImageFormatProperties *>( this );
|
|
}
|
|
|
|
operator VkExternalImageFormatProperties &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkExternalImageFormatProperties *>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
# if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
# else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::ExternalMemoryProperties const &>
|
|
# endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( sType, pNext, externalMemoryProperties );
|
|
}
|
|
#endif
|
|
|
|
#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
|
|
auto operator<=>( ExternalImageFormatProperties const & ) const = default;
|
|
#else
|
|
bool operator==( ExternalImageFormatProperties const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
# if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
# else
|
|
return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( externalMemoryProperties == rhs.externalMemoryProperties );
|
|
# endif
|
|
}
|
|
|
|
bool operator!=( ExternalImageFormatProperties const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eExternalImageFormatProperties;
|
|
void * pNext = {};
|
|
VULKAN_HPP_NAMESPACE::ExternalMemoryProperties externalMemoryProperties = {};
|
|
};
|
|
|
|
template <>
|
|
struct CppType<StructureType, StructureType::eExternalImageFormatProperties>
|
|
{
|
|
using Type = ExternalImageFormatProperties;
|
|
};
|
|
|
|
using ExternalImageFormatPropertiesKHR = ExternalImageFormatProperties;
|
|
|
|
struct ExternalMemoryBufferCreateInfo
|
|
{
|
|
using NativeType = VkExternalMemoryBufferCreateInfo;
|
|
|
|
static const bool allowDuplicate = false;
|
|
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eExternalMemoryBufferCreateInfo;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR ExternalMemoryBufferCreateInfo( VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlags handleTypes_ = {},
|
|
const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
|
|
: pNext( pNext_ )
|
|
, handleTypes( handleTypes_ )
|
|
{
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR ExternalMemoryBufferCreateInfo( ExternalMemoryBufferCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
ExternalMemoryBufferCreateInfo( VkExternalMemoryBufferCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: ExternalMemoryBufferCreateInfo( *reinterpret_cast<ExternalMemoryBufferCreateInfo const *>( &rhs ) )
|
|
{
|
|
}
|
|
|
|
ExternalMemoryBufferCreateInfo & operator=( ExternalMemoryBufferCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
ExternalMemoryBufferCreateInfo & operator=( VkExternalMemoryBufferCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ExternalMemoryBufferCreateInfo const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
|
|
VULKAN_HPP_CONSTEXPR_14 ExternalMemoryBufferCreateInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pNext = pNext_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 ExternalMemoryBufferCreateInfo &
|
|
setHandleTypes( VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlags handleTypes_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
handleTypes = handleTypes_;
|
|
return *this;
|
|
}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
|
|
|
|
operator VkExternalMemoryBufferCreateInfo const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkExternalMemoryBufferCreateInfo *>( this );
|
|
}
|
|
|
|
operator VkExternalMemoryBufferCreateInfo &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkExternalMemoryBufferCreateInfo *>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
# if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
# else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlags const &>
|
|
# endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( sType, pNext, handleTypes );
|
|
}
|
|
#endif
|
|
|
|
#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
|
|
auto operator<=>( ExternalMemoryBufferCreateInfo const & ) const = default;
|
|
#else
|
|
bool operator==( ExternalMemoryBufferCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
# if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
# else
|
|
return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( handleTypes == rhs.handleTypes );
|
|
# endif
|
|
}
|
|
|
|
bool operator!=( ExternalMemoryBufferCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eExternalMemoryBufferCreateInfo;
|
|
const void * pNext = {};
|
|
VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlags handleTypes = {};
|
|
};
|
|
|
|
template <>
|
|
struct CppType<StructureType, StructureType::eExternalMemoryBufferCreateInfo>
|
|
{
|
|
using Type = ExternalMemoryBufferCreateInfo;
|
|
};
|
|
|
|
using ExternalMemoryBufferCreateInfoKHR = ExternalMemoryBufferCreateInfo;
|
|
|
|
struct ExternalMemoryImageCreateInfo
|
|
{
|
|
using NativeType = VkExternalMemoryImageCreateInfo;
|
|
|
|
static const bool allowDuplicate = false;
|
|
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eExternalMemoryImageCreateInfo;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR ExternalMemoryImageCreateInfo( VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlags handleTypes_ = {},
|
|
const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
|
|
: pNext( pNext_ )
|
|
, handleTypes( handleTypes_ )
|
|
{
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR ExternalMemoryImageCreateInfo( ExternalMemoryImageCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
ExternalMemoryImageCreateInfo( VkExternalMemoryImageCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: ExternalMemoryImageCreateInfo( *reinterpret_cast<ExternalMemoryImageCreateInfo const *>( &rhs ) )
|
|
{
|
|
}
|
|
|
|
ExternalMemoryImageCreateInfo & operator=( ExternalMemoryImageCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
ExternalMemoryImageCreateInfo & operator=( VkExternalMemoryImageCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ExternalMemoryImageCreateInfo const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
|
|
VULKAN_HPP_CONSTEXPR_14 ExternalMemoryImageCreateInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pNext = pNext_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 ExternalMemoryImageCreateInfo &
|
|
setHandleTypes( VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlags handleTypes_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
handleTypes = handleTypes_;
|
|
return *this;
|
|
}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
|
|
|
|
operator VkExternalMemoryImageCreateInfo const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkExternalMemoryImageCreateInfo *>( this );
|
|
}
|
|
|
|
operator VkExternalMemoryImageCreateInfo &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkExternalMemoryImageCreateInfo *>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
# if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
# else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlags const &>
|
|
# endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( sType, pNext, handleTypes );
|
|
}
|
|
#endif
|
|
|
|
#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
|
|
auto operator<=>( ExternalMemoryImageCreateInfo const & ) const = default;
|
|
#else
|
|
bool operator==( ExternalMemoryImageCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
# if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
# else
|
|
return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( handleTypes == rhs.handleTypes );
|
|
# endif
|
|
}
|
|
|
|
bool operator!=( ExternalMemoryImageCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eExternalMemoryImageCreateInfo;
|
|
const void * pNext = {};
|
|
VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlags handleTypes = {};
|
|
};
|
|
|
|
template <>
|
|
struct CppType<StructureType, StructureType::eExternalMemoryImageCreateInfo>
|
|
{
|
|
using Type = ExternalMemoryImageCreateInfo;
|
|
};
|
|
|
|
using ExternalMemoryImageCreateInfoKHR = ExternalMemoryImageCreateInfo;
|
|
|
|
struct ExternalSemaphoreProperties
|
|
{
|
|
using NativeType = VkExternalSemaphoreProperties;
|
|
|
|
static const bool allowDuplicate = false;
|
|
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eExternalSemaphoreProperties;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR ExternalSemaphoreProperties( VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlags exportFromImportedHandleTypes_ = {},
|
|
VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlags compatibleHandleTypes_ = {},
|
|
VULKAN_HPP_NAMESPACE::ExternalSemaphoreFeatureFlags externalSemaphoreFeatures_ = {},
|
|
void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
|
|
: pNext( pNext_ )
|
|
, exportFromImportedHandleTypes( exportFromImportedHandleTypes_ )
|
|
, compatibleHandleTypes( compatibleHandleTypes_ )
|
|
, externalSemaphoreFeatures( externalSemaphoreFeatures_ )
|
|
{
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR ExternalSemaphoreProperties( ExternalSemaphoreProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
ExternalSemaphoreProperties( VkExternalSemaphoreProperties const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: ExternalSemaphoreProperties( *reinterpret_cast<ExternalSemaphoreProperties const *>( &rhs ) )
|
|
{
|
|
}
|
|
|
|
ExternalSemaphoreProperties & operator=( ExternalSemaphoreProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
ExternalSemaphoreProperties & operator=( VkExternalSemaphoreProperties const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ExternalSemaphoreProperties const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
operator VkExternalSemaphoreProperties const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkExternalSemaphoreProperties *>( this );
|
|
}
|
|
|
|
operator VkExternalSemaphoreProperties &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkExternalSemaphoreProperties *>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
# if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
# else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &,
|
|
void * const &,
|
|
VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlags const &,
|
|
VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlags const &,
|
|
VULKAN_HPP_NAMESPACE::ExternalSemaphoreFeatureFlags const &>
|
|
# endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( sType, pNext, exportFromImportedHandleTypes, compatibleHandleTypes, externalSemaphoreFeatures );
|
|
}
|
|
#endif
|
|
|
|
#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
|
|
auto operator<=>( ExternalSemaphoreProperties const & ) const = default;
|
|
#else
|
|
bool operator==( ExternalSemaphoreProperties const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
# if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
# else
|
|
return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( exportFromImportedHandleTypes == rhs.exportFromImportedHandleTypes ) &&
|
|
( compatibleHandleTypes == rhs.compatibleHandleTypes ) && ( externalSemaphoreFeatures == rhs.externalSemaphoreFeatures );
|
|
# endif
|
|
}
|
|
|
|
bool operator!=( ExternalSemaphoreProperties const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eExternalSemaphoreProperties;
|
|
void * pNext = {};
|
|
VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlags exportFromImportedHandleTypes = {};
|
|
VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlags compatibleHandleTypes = {};
|
|
VULKAN_HPP_NAMESPACE::ExternalSemaphoreFeatureFlags externalSemaphoreFeatures = {};
|
|
};
|
|
|
|
template <>
|
|
struct CppType<StructureType, StructureType::eExternalSemaphoreProperties>
|
|
{
|
|
using Type = ExternalSemaphoreProperties;
|
|
};
|
|
|
|
using ExternalSemaphorePropertiesKHR = ExternalSemaphoreProperties;
|
|
|
|
struct FaultData
|
|
{
|
|
using NativeType = VkFaultData;
|
|
|
|
static const bool allowDuplicate = false;
|
|
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eFaultData;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR FaultData( VULKAN_HPP_NAMESPACE::FaultLevel faultLevel_ = VULKAN_HPP_NAMESPACE::FaultLevel::eUnassigned,
|
|
VULKAN_HPP_NAMESPACE::FaultType faultType_ = VULKAN_HPP_NAMESPACE::FaultType::eInvalid,
|
|
void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
|
|
: pNext( pNext_ )
|
|
, faultLevel( faultLevel_ )
|
|
, faultType( faultType_ )
|
|
{
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR FaultData( FaultData const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
FaultData( VkFaultData const & rhs ) VULKAN_HPP_NOEXCEPT : FaultData( *reinterpret_cast<FaultData const *>( &rhs ) ) {}
|
|
|
|
FaultData & operator=( FaultData const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
FaultData & operator=( VkFaultData const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::FaultData const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
operator VkFaultData const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkFaultData *>( this );
|
|
}
|
|
|
|
operator VkFaultData &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkFaultData *>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
# if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
# else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::FaultLevel const &, VULKAN_HPP_NAMESPACE::FaultType const &>
|
|
# endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( sType, pNext, faultLevel, faultType );
|
|
}
|
|
#endif
|
|
|
|
#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
|
|
auto operator<=>( FaultData const & ) const = default;
|
|
#else
|
|
bool operator==( FaultData const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
# if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
# else
|
|
return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( faultLevel == rhs.faultLevel ) && ( faultType == rhs.faultType );
|
|
# endif
|
|
}
|
|
|
|
bool operator!=( FaultData const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eFaultData;
|
|
void * pNext = {};
|
|
VULKAN_HPP_NAMESPACE::FaultLevel faultLevel = VULKAN_HPP_NAMESPACE::FaultLevel::eUnassigned;
|
|
VULKAN_HPP_NAMESPACE::FaultType faultType = VULKAN_HPP_NAMESPACE::FaultType::eInvalid;
|
|
};
|
|
|
|
template <>
|
|
struct CppType<StructureType, StructureType::eFaultData>
|
|
{
|
|
using Type = FaultData;
|
|
};
|
|
|
|
struct FaultCallbackInfo
|
|
{
|
|
using NativeType = VkFaultCallbackInfo;
|
|
|
|
static const bool allowDuplicate = false;
|
|
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eFaultCallbackInfo;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR FaultCallbackInfo( uint32_t faultCount_ = {},
|
|
VULKAN_HPP_NAMESPACE::FaultData * pFaults_ = {},
|
|
PFN_vkFaultCallbackFunction pfnFaultCallback_ = {},
|
|
const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
|
|
: pNext( pNext_ )
|
|
, faultCount( faultCount_ )
|
|
, pFaults( pFaults_ )
|
|
, pfnFaultCallback( pfnFaultCallback_ )
|
|
{
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR FaultCallbackInfo( FaultCallbackInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
FaultCallbackInfo( VkFaultCallbackInfo const & rhs ) VULKAN_HPP_NOEXCEPT : FaultCallbackInfo( *reinterpret_cast<FaultCallbackInfo const *>( &rhs ) ) {}
|
|
|
|
# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
|
|
FaultCallbackInfo( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<VULKAN_HPP_NAMESPACE::FaultData> const & faults_,
|
|
PFN_vkFaultCallbackFunction pfnFaultCallback_ = {},
|
|
const void * pNext_ = nullptr )
|
|
: pNext( pNext_ ), faultCount( static_cast<uint32_t>( faults_.size() ) ), pFaults( faults_.data() ), pfnFaultCallback( pfnFaultCallback_ )
|
|
{
|
|
}
|
|
# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
|
|
|
|
FaultCallbackInfo & operator=( FaultCallbackInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
FaultCallbackInfo & operator=( VkFaultCallbackInfo const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::FaultCallbackInfo const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
|
|
VULKAN_HPP_CONSTEXPR_14 FaultCallbackInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pNext = pNext_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 FaultCallbackInfo & setFaultCount( uint32_t faultCount_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
faultCount = faultCount_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 FaultCallbackInfo & setPFaults( VULKAN_HPP_NAMESPACE::FaultData * pFaults_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pFaults = pFaults_;
|
|
return *this;
|
|
}
|
|
|
|
# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
|
|
FaultCallbackInfo & setFaults( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<VULKAN_HPP_NAMESPACE::FaultData> const & faults_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
faultCount = static_cast<uint32_t>( faults_.size() );
|
|
pFaults = faults_.data();
|
|
return *this;
|
|
}
|
|
# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 FaultCallbackInfo & setPfnFaultCallback( PFN_vkFaultCallbackFunction pfnFaultCallback_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pfnFaultCallback = pfnFaultCallback_;
|
|
return *this;
|
|
}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
|
|
|
|
operator VkFaultCallbackInfo const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkFaultCallbackInfo *>( this );
|
|
}
|
|
|
|
operator VkFaultCallbackInfo &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkFaultCallbackInfo *>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
# if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
# else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &,
|
|
const void * const &,
|
|
uint32_t const &,
|
|
VULKAN_HPP_NAMESPACE::FaultData * const &,
|
|
PFN_vkFaultCallbackFunction const &>
|
|
# endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( sType, pNext, faultCount, pFaults, pfnFaultCallback );
|
|
}
|
|
#endif
|
|
|
|
bool operator==( FaultCallbackInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
#else
|
|
return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( faultCount == rhs.faultCount ) && ( pFaults == rhs.pFaults ) &&
|
|
( pfnFaultCallback == rhs.pfnFaultCallback );
|
|
#endif
|
|
}
|
|
|
|
bool operator!=( FaultCallbackInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eFaultCallbackInfo;
|
|
const void * pNext = {};
|
|
uint32_t faultCount = {};
|
|
VULKAN_HPP_NAMESPACE::FaultData * pFaults = {};
|
|
PFN_vkFaultCallbackFunction pfnFaultCallback = {};
|
|
};
|
|
|
|
template <>
|
|
struct CppType<StructureType, StructureType::eFaultCallbackInfo>
|
|
{
|
|
using Type = FaultCallbackInfo;
|
|
};
|
|
|
|
struct FenceCreateInfo
|
|
{
|
|
using NativeType = VkFenceCreateInfo;
|
|
|
|
static const bool allowDuplicate = false;
|
|
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eFenceCreateInfo;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR FenceCreateInfo( VULKAN_HPP_NAMESPACE::FenceCreateFlags flags_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
|
|
: pNext( pNext_ )
|
|
, flags( flags_ )
|
|
{
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR FenceCreateInfo( FenceCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
FenceCreateInfo( VkFenceCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT : FenceCreateInfo( *reinterpret_cast<FenceCreateInfo const *>( &rhs ) ) {}
|
|
|
|
FenceCreateInfo & operator=( FenceCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
FenceCreateInfo & operator=( VkFenceCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::FenceCreateInfo const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
|
|
VULKAN_HPP_CONSTEXPR_14 FenceCreateInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pNext = pNext_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 FenceCreateInfo & setFlags( VULKAN_HPP_NAMESPACE::FenceCreateFlags flags_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
flags = flags_;
|
|
return *this;
|
|
}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
|
|
|
|
operator VkFenceCreateInfo const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkFenceCreateInfo *>( this );
|
|
}
|
|
|
|
operator VkFenceCreateInfo &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkFenceCreateInfo *>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
# if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
# else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::FenceCreateFlags const &>
|
|
# endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( sType, pNext, flags );
|
|
}
|
|
#endif
|
|
|
|
#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
|
|
auto operator<=>( FenceCreateInfo const & ) const = default;
|
|
#else
|
|
bool operator==( FenceCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
# if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
# else
|
|
return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags );
|
|
# endif
|
|
}
|
|
|
|
bool operator!=( FenceCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eFenceCreateInfo;
|
|
const void * pNext = {};
|
|
VULKAN_HPP_NAMESPACE::FenceCreateFlags flags = {};
|
|
};
|
|
|
|
template <>
|
|
struct CppType<StructureType, StructureType::eFenceCreateInfo>
|
|
{
|
|
using Type = FenceCreateInfo;
|
|
};
|
|
|
|
struct FenceGetFdInfoKHR
|
|
{
|
|
using NativeType = VkFenceGetFdInfoKHR;
|
|
|
|
static const bool allowDuplicate = false;
|
|
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eFenceGetFdInfoKHR;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR
|
|
FenceGetFdInfoKHR( VULKAN_HPP_NAMESPACE::Fence fence_ = {},
|
|
VULKAN_HPP_NAMESPACE::ExternalFenceHandleTypeFlagBits handleType_ = VULKAN_HPP_NAMESPACE::ExternalFenceHandleTypeFlagBits::eOpaqueFd,
|
|
const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
|
|
: pNext( pNext_ )
|
|
, fence( fence_ )
|
|
, handleType( handleType_ )
|
|
{
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR FenceGetFdInfoKHR( FenceGetFdInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
FenceGetFdInfoKHR( VkFenceGetFdInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT : FenceGetFdInfoKHR( *reinterpret_cast<FenceGetFdInfoKHR const *>( &rhs ) ) {}
|
|
|
|
FenceGetFdInfoKHR & operator=( FenceGetFdInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
FenceGetFdInfoKHR & operator=( VkFenceGetFdInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::FenceGetFdInfoKHR const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
|
|
VULKAN_HPP_CONSTEXPR_14 FenceGetFdInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pNext = pNext_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 FenceGetFdInfoKHR & setFence( VULKAN_HPP_NAMESPACE::Fence fence_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
fence = fence_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 FenceGetFdInfoKHR & setHandleType( VULKAN_HPP_NAMESPACE::ExternalFenceHandleTypeFlagBits handleType_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
handleType = handleType_;
|
|
return *this;
|
|
}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
|
|
|
|
operator VkFenceGetFdInfoKHR const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkFenceGetFdInfoKHR *>( this );
|
|
}
|
|
|
|
operator VkFenceGetFdInfoKHR &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkFenceGetFdInfoKHR *>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
# if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
# else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &,
|
|
const void * const &,
|
|
VULKAN_HPP_NAMESPACE::Fence const &,
|
|
VULKAN_HPP_NAMESPACE::ExternalFenceHandleTypeFlagBits const &>
|
|
# endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( sType, pNext, fence, handleType );
|
|
}
|
|
#endif
|
|
|
|
#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
|
|
auto operator<=>( FenceGetFdInfoKHR const & ) const = default;
|
|
#else
|
|
bool operator==( FenceGetFdInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
# if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
# else
|
|
return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( fence == rhs.fence ) && ( handleType == rhs.handleType );
|
|
# endif
|
|
}
|
|
|
|
bool operator!=( FenceGetFdInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eFenceGetFdInfoKHR;
|
|
const void * pNext = {};
|
|
VULKAN_HPP_NAMESPACE::Fence fence = {};
|
|
VULKAN_HPP_NAMESPACE::ExternalFenceHandleTypeFlagBits handleType = VULKAN_HPP_NAMESPACE::ExternalFenceHandleTypeFlagBits::eOpaqueFd;
|
|
};
|
|
|
|
template <>
|
|
struct CppType<StructureType, StructureType::eFenceGetFdInfoKHR>
|
|
{
|
|
using Type = FenceGetFdInfoKHR;
|
|
};
|
|
|
|
#if defined( VK_USE_PLATFORM_SCI )
|
|
struct FenceGetSciSyncInfoNV
|
|
{
|
|
using NativeType = VkFenceGetSciSyncInfoNV;
|
|
|
|
static const bool allowDuplicate = false;
|
|
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eFenceGetSciSyncInfoNV;
|
|
|
|
# if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR FenceGetSciSyncInfoNV(
|
|
VULKAN_HPP_NAMESPACE::Fence fence_ = {},
|
|
VULKAN_HPP_NAMESPACE::ExternalFenceHandleTypeFlagBits handleType_ = VULKAN_HPP_NAMESPACE::ExternalFenceHandleTypeFlagBits::eOpaqueFd,
|
|
const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
|
|
: pNext( pNext_ )
|
|
, fence( fence_ )
|
|
, handleType( handleType_ )
|
|
{
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR FenceGetSciSyncInfoNV( FenceGetSciSyncInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
FenceGetSciSyncInfoNV( VkFenceGetSciSyncInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: FenceGetSciSyncInfoNV( *reinterpret_cast<FenceGetSciSyncInfoNV const *>( &rhs ) )
|
|
{
|
|
}
|
|
|
|
FenceGetSciSyncInfoNV & operator=( FenceGetSciSyncInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
# endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
FenceGetSciSyncInfoNV & operator=( VkFenceGetSciSyncInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::FenceGetSciSyncInfoNV const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
# if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
|
|
VULKAN_HPP_CONSTEXPR_14 FenceGetSciSyncInfoNV & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pNext = pNext_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 FenceGetSciSyncInfoNV & setFence( VULKAN_HPP_NAMESPACE::Fence fence_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
fence = fence_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 FenceGetSciSyncInfoNV & setHandleType( VULKAN_HPP_NAMESPACE::ExternalFenceHandleTypeFlagBits handleType_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
handleType = handleType_;
|
|
return *this;
|
|
}
|
|
# endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
|
|
|
|
operator VkFenceGetSciSyncInfoNV const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkFenceGetSciSyncInfoNV *>( this );
|
|
}
|
|
|
|
operator VkFenceGetSciSyncInfoNV &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkFenceGetSciSyncInfoNV *>( this );
|
|
}
|
|
|
|
# if defined( VULKAN_HPP_USE_REFLECT )
|
|
# if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
# else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &,
|
|
const void * const &,
|
|
VULKAN_HPP_NAMESPACE::Fence const &,
|
|
VULKAN_HPP_NAMESPACE::ExternalFenceHandleTypeFlagBits const &>
|
|
# endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( sType, pNext, fence, handleType );
|
|
}
|
|
# endif
|
|
|
|
# if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
|
|
auto operator<=>( FenceGetSciSyncInfoNV const & ) const = default;
|
|
# else
|
|
bool operator==( FenceGetSciSyncInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
# if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
# else
|
|
return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( fence == rhs.fence ) && ( handleType == rhs.handleType );
|
|
# endif
|
|
}
|
|
|
|
bool operator!=( FenceGetSciSyncInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
# endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eFenceGetSciSyncInfoNV;
|
|
const void * pNext = {};
|
|
VULKAN_HPP_NAMESPACE::Fence fence = {};
|
|
VULKAN_HPP_NAMESPACE::ExternalFenceHandleTypeFlagBits handleType = VULKAN_HPP_NAMESPACE::ExternalFenceHandleTypeFlagBits::eOpaqueFd;
|
|
};
|
|
|
|
template <>
|
|
struct CppType<StructureType, StructureType::eFenceGetSciSyncInfoNV>
|
|
{
|
|
using Type = FenceGetSciSyncInfoNV;
|
|
};
|
|
#endif /*VK_USE_PLATFORM_SCI*/
|
|
|
|
struct FilterCubicImageViewImageFormatPropertiesEXT
|
|
{
|
|
using NativeType = VkFilterCubicImageViewImageFormatPropertiesEXT;
|
|
|
|
static const bool allowDuplicate = false;
|
|
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eFilterCubicImageViewImageFormatPropertiesEXT;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR FilterCubicImageViewImageFormatPropertiesEXT( VULKAN_HPP_NAMESPACE::Bool32 filterCubic_ = {},
|
|
VULKAN_HPP_NAMESPACE::Bool32 filterCubicMinmax_ = {},
|
|
void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
|
|
: pNext( pNext_ )
|
|
, filterCubic( filterCubic_ )
|
|
, filterCubicMinmax( filterCubicMinmax_ )
|
|
{
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR FilterCubicImageViewImageFormatPropertiesEXT( FilterCubicImageViewImageFormatPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
FilterCubicImageViewImageFormatPropertiesEXT( VkFilterCubicImageViewImageFormatPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: FilterCubicImageViewImageFormatPropertiesEXT( *reinterpret_cast<FilterCubicImageViewImageFormatPropertiesEXT const *>( &rhs ) )
|
|
{
|
|
}
|
|
|
|
FilterCubicImageViewImageFormatPropertiesEXT & operator=( FilterCubicImageViewImageFormatPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
FilterCubicImageViewImageFormatPropertiesEXT & operator=( VkFilterCubicImageViewImageFormatPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::FilterCubicImageViewImageFormatPropertiesEXT const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
operator VkFilterCubicImageViewImageFormatPropertiesEXT const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkFilterCubicImageViewImageFormatPropertiesEXT *>( this );
|
|
}
|
|
|
|
operator VkFilterCubicImageViewImageFormatPropertiesEXT &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkFilterCubicImageViewImageFormatPropertiesEXT *>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
# if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
# else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &>
|
|
# endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( sType, pNext, filterCubic, filterCubicMinmax );
|
|
}
|
|
#endif
|
|
|
|
#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
|
|
auto operator<=>( FilterCubicImageViewImageFormatPropertiesEXT const & ) const = default;
|
|
#else
|
|
bool operator==( FilterCubicImageViewImageFormatPropertiesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
# if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
# else
|
|
return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( filterCubic == rhs.filterCubic ) && ( filterCubicMinmax == rhs.filterCubicMinmax );
|
|
# endif
|
|
}
|
|
|
|
bool operator!=( FilterCubicImageViewImageFormatPropertiesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eFilterCubicImageViewImageFormatPropertiesEXT;
|
|
void * pNext = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 filterCubic = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 filterCubicMinmax = {};
|
|
};
|
|
|
|
template <>
|
|
struct CppType<StructureType, StructureType::eFilterCubicImageViewImageFormatPropertiesEXT>
|
|
{
|
|
using Type = FilterCubicImageViewImageFormatPropertiesEXT;
|
|
};
|
|
|
|
struct FormatProperties
|
|
{
|
|
using NativeType = VkFormatProperties;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR FormatProperties( VULKAN_HPP_NAMESPACE::FormatFeatureFlags linearTilingFeatures_ = {},
|
|
VULKAN_HPP_NAMESPACE::FormatFeatureFlags optimalTilingFeatures_ = {},
|
|
VULKAN_HPP_NAMESPACE::FormatFeatureFlags bufferFeatures_ = {} ) VULKAN_HPP_NOEXCEPT
|
|
: linearTilingFeatures( linearTilingFeatures_ )
|
|
, optimalTilingFeatures( optimalTilingFeatures_ )
|
|
, bufferFeatures( bufferFeatures_ )
|
|
{
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR FormatProperties( FormatProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
FormatProperties( VkFormatProperties const & rhs ) VULKAN_HPP_NOEXCEPT : FormatProperties( *reinterpret_cast<FormatProperties const *>( &rhs ) ) {}
|
|
|
|
FormatProperties & operator=( FormatProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
FormatProperties & operator=( VkFormatProperties const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::FormatProperties const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
operator VkFormatProperties const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkFormatProperties *>( this );
|
|
}
|
|
|
|
operator VkFormatProperties &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkFormatProperties *>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
# if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
# else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::FormatFeatureFlags const &,
|
|
VULKAN_HPP_NAMESPACE::FormatFeatureFlags const &,
|
|
VULKAN_HPP_NAMESPACE::FormatFeatureFlags const &>
|
|
# endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( linearTilingFeatures, optimalTilingFeatures, bufferFeatures );
|
|
}
|
|
#endif
|
|
|
|
#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
|
|
auto operator<=>( FormatProperties const & ) const = default;
|
|
#else
|
|
bool operator==( FormatProperties const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
# if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
# else
|
|
return ( linearTilingFeatures == rhs.linearTilingFeatures ) && ( optimalTilingFeatures == rhs.optimalTilingFeatures ) &&
|
|
( bufferFeatures == rhs.bufferFeatures );
|
|
# endif
|
|
}
|
|
|
|
bool operator!=( FormatProperties const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::FormatFeatureFlags linearTilingFeatures = {};
|
|
VULKAN_HPP_NAMESPACE::FormatFeatureFlags optimalTilingFeatures = {};
|
|
VULKAN_HPP_NAMESPACE::FormatFeatureFlags bufferFeatures = {};
|
|
};
|
|
|
|
struct FormatProperties2
|
|
{
|
|
using NativeType = VkFormatProperties2;
|
|
|
|
static const bool allowDuplicate = false;
|
|
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eFormatProperties2;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR FormatProperties2( VULKAN_HPP_NAMESPACE::FormatProperties formatProperties_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
|
|
: pNext( pNext_ )
|
|
, formatProperties( formatProperties_ )
|
|
{
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR FormatProperties2( FormatProperties2 const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
FormatProperties2( VkFormatProperties2 const & rhs ) VULKAN_HPP_NOEXCEPT : FormatProperties2( *reinterpret_cast<FormatProperties2 const *>( &rhs ) ) {}
|
|
|
|
FormatProperties2 & operator=( FormatProperties2 const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
FormatProperties2 & operator=( VkFormatProperties2 const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::FormatProperties2 const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
operator VkFormatProperties2 const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkFormatProperties2 *>( this );
|
|
}
|
|
|
|
operator VkFormatProperties2 &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkFormatProperties2 *>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
# if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
# else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::FormatProperties const &>
|
|
# endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( sType, pNext, formatProperties );
|
|
}
|
|
#endif
|
|
|
|
#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
|
|
auto operator<=>( FormatProperties2 const & ) const = default;
|
|
#else
|
|
bool operator==( FormatProperties2 const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
# if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
# else
|
|
return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( formatProperties == rhs.formatProperties );
|
|
# endif
|
|
}
|
|
|
|
bool operator!=( FormatProperties2 const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eFormatProperties2;
|
|
void * pNext = {};
|
|
VULKAN_HPP_NAMESPACE::FormatProperties formatProperties = {};
|
|
};
|
|
|
|
template <>
|
|
struct CppType<StructureType, StructureType::eFormatProperties2>
|
|
{
|
|
using Type = FormatProperties2;
|
|
};
|
|
|
|
using FormatProperties2KHR = FormatProperties2;
|
|
|
|
struct FormatProperties3
|
|
{
|
|
using NativeType = VkFormatProperties3;
|
|
|
|
static const bool allowDuplicate = false;
|
|
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eFormatProperties3;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR FormatProperties3( VULKAN_HPP_NAMESPACE::FormatFeatureFlags2 linearTilingFeatures_ = {},
|
|
VULKAN_HPP_NAMESPACE::FormatFeatureFlags2 optimalTilingFeatures_ = {},
|
|
VULKAN_HPP_NAMESPACE::FormatFeatureFlags2 bufferFeatures_ = {},
|
|
void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
|
|
: pNext( pNext_ )
|
|
, linearTilingFeatures( linearTilingFeatures_ )
|
|
, optimalTilingFeatures( optimalTilingFeatures_ )
|
|
, bufferFeatures( bufferFeatures_ )
|
|
{
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR FormatProperties3( FormatProperties3 const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
FormatProperties3( VkFormatProperties3 const & rhs ) VULKAN_HPP_NOEXCEPT : FormatProperties3( *reinterpret_cast<FormatProperties3 const *>( &rhs ) ) {}
|
|
|
|
FormatProperties3 & operator=( FormatProperties3 const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
FormatProperties3 & operator=( VkFormatProperties3 const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::FormatProperties3 const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
operator VkFormatProperties3 const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkFormatProperties3 *>( this );
|
|
}
|
|
|
|
operator VkFormatProperties3 &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkFormatProperties3 *>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
# if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
# else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &,
|
|
void * const &,
|
|
VULKAN_HPP_NAMESPACE::FormatFeatureFlags2 const &,
|
|
VULKAN_HPP_NAMESPACE::FormatFeatureFlags2 const &,
|
|
VULKAN_HPP_NAMESPACE::FormatFeatureFlags2 const &>
|
|
# endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( sType, pNext, linearTilingFeatures, optimalTilingFeatures, bufferFeatures );
|
|
}
|
|
#endif
|
|
|
|
#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
|
|
auto operator<=>( FormatProperties3 const & ) const = default;
|
|
#else
|
|
bool operator==( FormatProperties3 const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
# if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
# else
|
|
return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( linearTilingFeatures == rhs.linearTilingFeatures ) &&
|
|
( optimalTilingFeatures == rhs.optimalTilingFeatures ) && ( bufferFeatures == rhs.bufferFeatures );
|
|
# endif
|
|
}
|
|
|
|
bool operator!=( FormatProperties3 const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eFormatProperties3;
|
|
void * pNext = {};
|
|
VULKAN_HPP_NAMESPACE::FormatFeatureFlags2 linearTilingFeatures = {};
|
|
VULKAN_HPP_NAMESPACE::FormatFeatureFlags2 optimalTilingFeatures = {};
|
|
VULKAN_HPP_NAMESPACE::FormatFeatureFlags2 bufferFeatures = {};
|
|
};
|
|
|
|
template <>
|
|
struct CppType<StructureType, StructureType::eFormatProperties3>
|
|
{
|
|
using Type = FormatProperties3;
|
|
};
|
|
|
|
using FormatProperties3KHR = FormatProperties3;
|
|
|
|
struct FragmentShadingRateAttachmentInfoKHR
|
|
{
|
|
using NativeType = VkFragmentShadingRateAttachmentInfoKHR;
|
|
|
|
static const bool allowDuplicate = false;
|
|
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eFragmentShadingRateAttachmentInfoKHR;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR FragmentShadingRateAttachmentInfoKHR( const VULKAN_HPP_NAMESPACE::AttachmentReference2 * pFragmentShadingRateAttachment_ = {},
|
|
VULKAN_HPP_NAMESPACE::Extent2D shadingRateAttachmentTexelSize_ = {},
|
|
const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
|
|
: pNext( pNext_ )
|
|
, pFragmentShadingRateAttachment( pFragmentShadingRateAttachment_ )
|
|
, shadingRateAttachmentTexelSize( shadingRateAttachmentTexelSize_ )
|
|
{
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR FragmentShadingRateAttachmentInfoKHR( FragmentShadingRateAttachmentInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
FragmentShadingRateAttachmentInfoKHR( VkFragmentShadingRateAttachmentInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: FragmentShadingRateAttachmentInfoKHR( *reinterpret_cast<FragmentShadingRateAttachmentInfoKHR const *>( &rhs ) )
|
|
{
|
|
}
|
|
|
|
FragmentShadingRateAttachmentInfoKHR & operator=( FragmentShadingRateAttachmentInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
FragmentShadingRateAttachmentInfoKHR & operator=( VkFragmentShadingRateAttachmentInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::FragmentShadingRateAttachmentInfoKHR const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
|
|
VULKAN_HPP_CONSTEXPR_14 FragmentShadingRateAttachmentInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pNext = pNext_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 FragmentShadingRateAttachmentInfoKHR &
|
|
setPFragmentShadingRateAttachment( const VULKAN_HPP_NAMESPACE::AttachmentReference2 * pFragmentShadingRateAttachment_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pFragmentShadingRateAttachment = pFragmentShadingRateAttachment_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 FragmentShadingRateAttachmentInfoKHR &
|
|
setShadingRateAttachmentTexelSize( VULKAN_HPP_NAMESPACE::Extent2D const & shadingRateAttachmentTexelSize_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
shadingRateAttachmentTexelSize = shadingRateAttachmentTexelSize_;
|
|
return *this;
|
|
}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
|
|
|
|
operator VkFragmentShadingRateAttachmentInfoKHR const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkFragmentShadingRateAttachmentInfoKHR *>( this );
|
|
}
|
|
|
|
operator VkFragmentShadingRateAttachmentInfoKHR &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkFragmentShadingRateAttachmentInfoKHR *>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
# if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
# else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &,
|
|
const void * const &,
|
|
const VULKAN_HPP_NAMESPACE::AttachmentReference2 * const &,
|
|
VULKAN_HPP_NAMESPACE::Extent2D const &>
|
|
# endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( sType, pNext, pFragmentShadingRateAttachment, shadingRateAttachmentTexelSize );
|
|
}
|
|
#endif
|
|
|
|
#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
|
|
auto operator<=>( FragmentShadingRateAttachmentInfoKHR const & ) const = default;
|
|
#else
|
|
bool operator==( FragmentShadingRateAttachmentInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
# if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
# else
|
|
return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( pFragmentShadingRateAttachment == rhs.pFragmentShadingRateAttachment ) &&
|
|
( shadingRateAttachmentTexelSize == rhs.shadingRateAttachmentTexelSize );
|
|
# endif
|
|
}
|
|
|
|
bool operator!=( FragmentShadingRateAttachmentInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eFragmentShadingRateAttachmentInfoKHR;
|
|
const void * pNext = {};
|
|
const VULKAN_HPP_NAMESPACE::AttachmentReference2 * pFragmentShadingRateAttachment = {};
|
|
VULKAN_HPP_NAMESPACE::Extent2D shadingRateAttachmentTexelSize = {};
|
|
};
|
|
|
|
template <>
|
|
struct CppType<StructureType, StructureType::eFragmentShadingRateAttachmentInfoKHR>
|
|
{
|
|
using Type = FragmentShadingRateAttachmentInfoKHR;
|
|
};
|
|
|
|
struct FramebufferAttachmentImageInfo
|
|
{
|
|
using NativeType = VkFramebufferAttachmentImageInfo;
|
|
|
|
static const bool allowDuplicate = false;
|
|
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eFramebufferAttachmentImageInfo;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR FramebufferAttachmentImageInfo( VULKAN_HPP_NAMESPACE::ImageCreateFlags flags_ = {},
|
|
VULKAN_HPP_NAMESPACE::ImageUsageFlags usage_ = {},
|
|
uint32_t width_ = {},
|
|
uint32_t height_ = {},
|
|
uint32_t layerCount_ = {},
|
|
uint32_t viewFormatCount_ = {},
|
|
const VULKAN_HPP_NAMESPACE::Format * pViewFormats_ = {},
|
|
const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
|
|
: pNext( pNext_ )
|
|
, flags( flags_ )
|
|
, usage( usage_ )
|
|
, width( width_ )
|
|
, height( height_ )
|
|
, layerCount( layerCount_ )
|
|
, viewFormatCount( viewFormatCount_ )
|
|
, pViewFormats( pViewFormats_ )
|
|
{
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR FramebufferAttachmentImageInfo( FramebufferAttachmentImageInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
FramebufferAttachmentImageInfo( VkFramebufferAttachmentImageInfo const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: FramebufferAttachmentImageInfo( *reinterpret_cast<FramebufferAttachmentImageInfo const *>( &rhs ) )
|
|
{
|
|
}
|
|
|
|
# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
|
|
FramebufferAttachmentImageInfo( VULKAN_HPP_NAMESPACE::ImageCreateFlags flags_,
|
|
VULKAN_HPP_NAMESPACE::ImageUsageFlags usage_,
|
|
uint32_t width_,
|
|
uint32_t height_,
|
|
uint32_t layerCount_,
|
|
VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::Format> const & viewFormats_,
|
|
const void * pNext_ = nullptr )
|
|
: pNext( pNext_ )
|
|
, flags( flags_ )
|
|
, usage( usage_ )
|
|
, width( width_ )
|
|
, height( height_ )
|
|
, layerCount( layerCount_ )
|
|
, viewFormatCount( static_cast<uint32_t>( viewFormats_.size() ) )
|
|
, pViewFormats( viewFormats_.data() )
|
|
{
|
|
}
|
|
# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
|
|
|
|
FramebufferAttachmentImageInfo & operator=( FramebufferAttachmentImageInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
FramebufferAttachmentImageInfo & operator=( VkFramebufferAttachmentImageInfo const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::FramebufferAttachmentImageInfo const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
|
|
VULKAN_HPP_CONSTEXPR_14 FramebufferAttachmentImageInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pNext = pNext_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 FramebufferAttachmentImageInfo & setFlags( VULKAN_HPP_NAMESPACE::ImageCreateFlags flags_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
flags = flags_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 FramebufferAttachmentImageInfo & setUsage( VULKAN_HPP_NAMESPACE::ImageUsageFlags usage_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
usage = usage_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 FramebufferAttachmentImageInfo & setWidth( uint32_t width_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
width = width_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 FramebufferAttachmentImageInfo & setHeight( uint32_t height_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
height = height_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 FramebufferAttachmentImageInfo & setLayerCount( uint32_t layerCount_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
layerCount = layerCount_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 FramebufferAttachmentImageInfo & setViewFormatCount( uint32_t viewFormatCount_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
viewFormatCount = viewFormatCount_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 FramebufferAttachmentImageInfo & setPViewFormats( const VULKAN_HPP_NAMESPACE::Format * pViewFormats_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pViewFormats = pViewFormats_;
|
|
return *this;
|
|
}
|
|
|
|
# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
|
|
FramebufferAttachmentImageInfo &
|
|
setViewFormats( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::Format> const & viewFormats_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
viewFormatCount = static_cast<uint32_t>( viewFormats_.size() );
|
|
pViewFormats = viewFormats_.data();
|
|
return *this;
|
|
}
|
|
# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
|
|
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
|
|
|
|
operator VkFramebufferAttachmentImageInfo const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkFramebufferAttachmentImageInfo *>( this );
|
|
}
|
|
|
|
operator VkFramebufferAttachmentImageInfo &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkFramebufferAttachmentImageInfo *>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
# if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
# else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &,
|
|
const void * const &,
|
|
VULKAN_HPP_NAMESPACE::ImageCreateFlags const &,
|
|
VULKAN_HPP_NAMESPACE::ImageUsageFlags const &,
|
|
uint32_t const &,
|
|
uint32_t const &,
|
|
uint32_t const &,
|
|
uint32_t const &,
|
|
const VULKAN_HPP_NAMESPACE::Format * const &>
|
|
# endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( sType, pNext, flags, usage, width, height, layerCount, viewFormatCount, pViewFormats );
|
|
}
|
|
#endif
|
|
|
|
#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
|
|
auto operator<=>( FramebufferAttachmentImageInfo const & ) const = default;
|
|
#else
|
|
bool operator==( FramebufferAttachmentImageInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
# if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
# else
|
|
return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && ( usage == rhs.usage ) && ( width == rhs.width ) &&
|
|
( height == rhs.height ) && ( layerCount == rhs.layerCount ) && ( viewFormatCount == rhs.viewFormatCount ) && ( pViewFormats == rhs.pViewFormats );
|
|
# endif
|
|
}
|
|
|
|
bool operator!=( FramebufferAttachmentImageInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eFramebufferAttachmentImageInfo;
|
|
const void * pNext = {};
|
|
VULKAN_HPP_NAMESPACE::ImageCreateFlags flags = {};
|
|
VULKAN_HPP_NAMESPACE::ImageUsageFlags usage = {};
|
|
uint32_t width = {};
|
|
uint32_t height = {};
|
|
uint32_t layerCount = {};
|
|
uint32_t viewFormatCount = {};
|
|
const VULKAN_HPP_NAMESPACE::Format * pViewFormats = {};
|
|
};
|
|
|
|
template <>
|
|
struct CppType<StructureType, StructureType::eFramebufferAttachmentImageInfo>
|
|
{
|
|
using Type = FramebufferAttachmentImageInfo;
|
|
};
|
|
|
|
using FramebufferAttachmentImageInfoKHR = FramebufferAttachmentImageInfo;
|
|
|
|
struct FramebufferAttachmentsCreateInfo
|
|
{
|
|
using NativeType = VkFramebufferAttachmentsCreateInfo;
|
|
|
|
static const bool allowDuplicate = false;
|
|
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eFramebufferAttachmentsCreateInfo;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR FramebufferAttachmentsCreateInfo( uint32_t attachmentImageInfoCount_ = {},
|
|
const VULKAN_HPP_NAMESPACE::FramebufferAttachmentImageInfo * pAttachmentImageInfos_ = {},
|
|
const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
|
|
: pNext( pNext_ )
|
|
, attachmentImageInfoCount( attachmentImageInfoCount_ )
|
|
, pAttachmentImageInfos( pAttachmentImageInfos_ )
|
|
{
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR FramebufferAttachmentsCreateInfo( FramebufferAttachmentsCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
FramebufferAttachmentsCreateInfo( VkFramebufferAttachmentsCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: FramebufferAttachmentsCreateInfo( *reinterpret_cast<FramebufferAttachmentsCreateInfo const *>( &rhs ) )
|
|
{
|
|
}
|
|
|
|
# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
|
|
FramebufferAttachmentsCreateInfo(
|
|
VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::FramebufferAttachmentImageInfo> const & attachmentImageInfos_,
|
|
const void * pNext_ = nullptr )
|
|
: pNext( pNext_ )
|
|
, attachmentImageInfoCount( static_cast<uint32_t>( attachmentImageInfos_.size() ) )
|
|
, pAttachmentImageInfos( attachmentImageInfos_.data() )
|
|
{
|
|
}
|
|
# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
|
|
|
|
FramebufferAttachmentsCreateInfo & operator=( FramebufferAttachmentsCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
FramebufferAttachmentsCreateInfo & operator=( VkFramebufferAttachmentsCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::FramebufferAttachmentsCreateInfo const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
|
|
VULKAN_HPP_CONSTEXPR_14 FramebufferAttachmentsCreateInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pNext = pNext_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 FramebufferAttachmentsCreateInfo & setAttachmentImageInfoCount( uint32_t attachmentImageInfoCount_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
attachmentImageInfoCount = attachmentImageInfoCount_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 FramebufferAttachmentsCreateInfo &
|
|
setPAttachmentImageInfos( const VULKAN_HPP_NAMESPACE::FramebufferAttachmentImageInfo * pAttachmentImageInfos_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pAttachmentImageInfos = pAttachmentImageInfos_;
|
|
return *this;
|
|
}
|
|
|
|
# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
|
|
FramebufferAttachmentsCreateInfo & setAttachmentImageInfos(
|
|
VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::FramebufferAttachmentImageInfo> const & attachmentImageInfos_ )
|
|
VULKAN_HPP_NOEXCEPT
|
|
{
|
|
attachmentImageInfoCount = static_cast<uint32_t>( attachmentImageInfos_.size() );
|
|
pAttachmentImageInfos = attachmentImageInfos_.data();
|
|
return *this;
|
|
}
|
|
# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
|
|
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
|
|
|
|
operator VkFramebufferAttachmentsCreateInfo const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkFramebufferAttachmentsCreateInfo *>( this );
|
|
}
|
|
|
|
operator VkFramebufferAttachmentsCreateInfo &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkFramebufferAttachmentsCreateInfo *>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
# if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
# else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &,
|
|
const void * const &,
|
|
uint32_t const &,
|
|
const VULKAN_HPP_NAMESPACE::FramebufferAttachmentImageInfo * const &>
|
|
# endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( sType, pNext, attachmentImageInfoCount, pAttachmentImageInfos );
|
|
}
|
|
#endif
|
|
|
|
#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
|
|
auto operator<=>( FramebufferAttachmentsCreateInfo const & ) const = default;
|
|
#else
|
|
bool operator==( FramebufferAttachmentsCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
# if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
# else
|
|
return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( attachmentImageInfoCount == rhs.attachmentImageInfoCount ) &&
|
|
( pAttachmentImageInfos == rhs.pAttachmentImageInfos );
|
|
# endif
|
|
}
|
|
|
|
bool operator!=( FramebufferAttachmentsCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eFramebufferAttachmentsCreateInfo;
|
|
const void * pNext = {};
|
|
uint32_t attachmentImageInfoCount = {};
|
|
const VULKAN_HPP_NAMESPACE::FramebufferAttachmentImageInfo * pAttachmentImageInfos = {};
|
|
};
|
|
|
|
template <>
|
|
struct CppType<StructureType, StructureType::eFramebufferAttachmentsCreateInfo>
|
|
{
|
|
using Type = FramebufferAttachmentsCreateInfo;
|
|
};
|
|
|
|
using FramebufferAttachmentsCreateInfoKHR = FramebufferAttachmentsCreateInfo;
|
|
|
|
struct FramebufferCreateInfo
|
|
{
|
|
using NativeType = VkFramebufferCreateInfo;
|
|
|
|
static const bool allowDuplicate = false;
|
|
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eFramebufferCreateInfo;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR FramebufferCreateInfo( VULKAN_HPP_NAMESPACE::FramebufferCreateFlags flags_ = {},
|
|
VULKAN_HPP_NAMESPACE::RenderPass renderPass_ = {},
|
|
uint32_t attachmentCount_ = {},
|
|
const VULKAN_HPP_NAMESPACE::ImageView * pAttachments_ = {},
|
|
uint32_t width_ = {},
|
|
uint32_t height_ = {},
|
|
uint32_t layers_ = {},
|
|
const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
|
|
: pNext( pNext_ )
|
|
, flags( flags_ )
|
|
, renderPass( renderPass_ )
|
|
, attachmentCount( attachmentCount_ )
|
|
, pAttachments( pAttachments_ )
|
|
, width( width_ )
|
|
, height( height_ )
|
|
, layers( layers_ )
|
|
{
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR FramebufferCreateInfo( FramebufferCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
FramebufferCreateInfo( VkFramebufferCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: FramebufferCreateInfo( *reinterpret_cast<FramebufferCreateInfo const *>( &rhs ) )
|
|
{
|
|
}
|
|
|
|
# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
|
|
FramebufferCreateInfo( VULKAN_HPP_NAMESPACE::FramebufferCreateFlags flags_,
|
|
VULKAN_HPP_NAMESPACE::RenderPass renderPass_,
|
|
VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::ImageView> const & attachments_,
|
|
uint32_t width_ = {},
|
|
uint32_t height_ = {},
|
|
uint32_t layers_ = {},
|
|
const void * pNext_ = nullptr )
|
|
: pNext( pNext_ )
|
|
, flags( flags_ )
|
|
, renderPass( renderPass_ )
|
|
, attachmentCount( static_cast<uint32_t>( attachments_.size() ) )
|
|
, pAttachments( attachments_.data() )
|
|
, width( width_ )
|
|
, height( height_ )
|
|
, layers( layers_ )
|
|
{
|
|
}
|
|
# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
|
|
|
|
FramebufferCreateInfo & operator=( FramebufferCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
FramebufferCreateInfo & operator=( VkFramebufferCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::FramebufferCreateInfo const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
|
|
VULKAN_HPP_CONSTEXPR_14 FramebufferCreateInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pNext = pNext_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 FramebufferCreateInfo & setFlags( VULKAN_HPP_NAMESPACE::FramebufferCreateFlags flags_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
flags = flags_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 FramebufferCreateInfo & setRenderPass( VULKAN_HPP_NAMESPACE::RenderPass renderPass_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
renderPass = renderPass_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 FramebufferCreateInfo & setAttachmentCount( uint32_t attachmentCount_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
attachmentCount = attachmentCount_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 FramebufferCreateInfo & setPAttachments( const VULKAN_HPP_NAMESPACE::ImageView * pAttachments_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pAttachments = pAttachments_;
|
|
return *this;
|
|
}
|
|
|
|
# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
|
|
FramebufferCreateInfo &
|
|
setAttachments( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::ImageView> const & attachments_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
attachmentCount = static_cast<uint32_t>( attachments_.size() );
|
|
pAttachments = attachments_.data();
|
|
return *this;
|
|
}
|
|
# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 FramebufferCreateInfo & setWidth( uint32_t width_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
width = width_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 FramebufferCreateInfo & setHeight( uint32_t height_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
height = height_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 FramebufferCreateInfo & setLayers( uint32_t layers_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
layers = layers_;
|
|
return *this;
|
|
}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
|
|
|
|
operator VkFramebufferCreateInfo const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkFramebufferCreateInfo *>( this );
|
|
}
|
|
|
|
operator VkFramebufferCreateInfo &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkFramebufferCreateInfo *>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
# if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
# else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &,
|
|
const void * const &,
|
|
VULKAN_HPP_NAMESPACE::FramebufferCreateFlags const &,
|
|
VULKAN_HPP_NAMESPACE::RenderPass const &,
|
|
uint32_t const &,
|
|
const VULKAN_HPP_NAMESPACE::ImageView * const &,
|
|
uint32_t const &,
|
|
uint32_t const &,
|
|
uint32_t const &>
|
|
# endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( sType, pNext, flags, renderPass, attachmentCount, pAttachments, width, height, layers );
|
|
}
|
|
#endif
|
|
|
|
#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
|
|
auto operator<=>( FramebufferCreateInfo const & ) const = default;
|
|
#else
|
|
bool operator==( FramebufferCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
# if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
# else
|
|
return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && ( renderPass == rhs.renderPass ) &&
|
|
( attachmentCount == rhs.attachmentCount ) && ( pAttachments == rhs.pAttachments ) && ( width == rhs.width ) && ( height == rhs.height ) &&
|
|
( layers == rhs.layers );
|
|
# endif
|
|
}
|
|
|
|
bool operator!=( FramebufferCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eFramebufferCreateInfo;
|
|
const void * pNext = {};
|
|
VULKAN_HPP_NAMESPACE::FramebufferCreateFlags flags = {};
|
|
VULKAN_HPP_NAMESPACE::RenderPass renderPass = {};
|
|
uint32_t attachmentCount = {};
|
|
const VULKAN_HPP_NAMESPACE::ImageView * pAttachments = {};
|
|
uint32_t width = {};
|
|
uint32_t height = {};
|
|
uint32_t layers = {};
|
|
};
|
|
|
|
template <>
|
|
struct CppType<StructureType, StructureType::eFramebufferCreateInfo>
|
|
{
|
|
using Type = FramebufferCreateInfo;
|
|
};
|
|
|
|
struct VertexInputBindingDescription
|
|
{
|
|
using NativeType = VkVertexInputBindingDescription;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR
|
|
VertexInputBindingDescription( uint32_t binding_ = {},
|
|
uint32_t stride_ = {},
|
|
VULKAN_HPP_NAMESPACE::VertexInputRate inputRate_ = VULKAN_HPP_NAMESPACE::VertexInputRate::eVertex ) VULKAN_HPP_NOEXCEPT
|
|
: binding( binding_ )
|
|
, stride( stride_ )
|
|
, inputRate( inputRate_ )
|
|
{
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR VertexInputBindingDescription( VertexInputBindingDescription const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
VertexInputBindingDescription( VkVertexInputBindingDescription const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: VertexInputBindingDescription( *reinterpret_cast<VertexInputBindingDescription const *>( &rhs ) )
|
|
{
|
|
}
|
|
|
|
VertexInputBindingDescription & operator=( VertexInputBindingDescription const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
VertexInputBindingDescription & operator=( VkVertexInputBindingDescription const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::VertexInputBindingDescription const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
|
|
VULKAN_HPP_CONSTEXPR_14 VertexInputBindingDescription & setBinding( uint32_t binding_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
binding = binding_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 VertexInputBindingDescription & setStride( uint32_t stride_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
stride = stride_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 VertexInputBindingDescription & setInputRate( VULKAN_HPP_NAMESPACE::VertexInputRate inputRate_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
inputRate = inputRate_;
|
|
return *this;
|
|
}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
|
|
|
|
operator VkVertexInputBindingDescription const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkVertexInputBindingDescription *>( this );
|
|
}
|
|
|
|
operator VkVertexInputBindingDescription &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkVertexInputBindingDescription *>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
# if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
# else
|
|
std::tuple<uint32_t const &, uint32_t const &, VULKAN_HPP_NAMESPACE::VertexInputRate const &>
|
|
# endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( binding, stride, inputRate );
|
|
}
|
|
#endif
|
|
|
|
#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
|
|
auto operator<=>( VertexInputBindingDescription const & ) const = default;
|
|
#else
|
|
bool operator==( VertexInputBindingDescription const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
# if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
# else
|
|
return ( binding == rhs.binding ) && ( stride == rhs.stride ) && ( inputRate == rhs.inputRate );
|
|
# endif
|
|
}
|
|
|
|
bool operator!=( VertexInputBindingDescription const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
uint32_t binding = {};
|
|
uint32_t stride = {};
|
|
VULKAN_HPP_NAMESPACE::VertexInputRate inputRate = VULKAN_HPP_NAMESPACE::VertexInputRate::eVertex;
|
|
};
|
|
|
|
struct VertexInputAttributeDescription
|
|
{
|
|
using NativeType = VkVertexInputAttributeDescription;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR VertexInputAttributeDescription( uint32_t location_ = {},
|
|
uint32_t binding_ = {},
|
|
VULKAN_HPP_NAMESPACE::Format format_ = VULKAN_HPP_NAMESPACE::Format::eUndefined,
|
|
uint32_t offset_ = {} ) VULKAN_HPP_NOEXCEPT
|
|
: location( location_ )
|
|
, binding( binding_ )
|
|
, format( format_ )
|
|
, offset( offset_ )
|
|
{
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR VertexInputAttributeDescription( VertexInputAttributeDescription const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
VertexInputAttributeDescription( VkVertexInputAttributeDescription const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: VertexInputAttributeDescription( *reinterpret_cast<VertexInputAttributeDescription const *>( &rhs ) )
|
|
{
|
|
}
|
|
|
|
VertexInputAttributeDescription & operator=( VertexInputAttributeDescription const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
VertexInputAttributeDescription & operator=( VkVertexInputAttributeDescription const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::VertexInputAttributeDescription const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
|
|
VULKAN_HPP_CONSTEXPR_14 VertexInputAttributeDescription & setLocation( uint32_t location_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
location = location_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 VertexInputAttributeDescription & setBinding( uint32_t binding_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
binding = binding_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 VertexInputAttributeDescription & setFormat( VULKAN_HPP_NAMESPACE::Format format_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
format = format_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 VertexInputAttributeDescription & setOffset( uint32_t offset_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
offset = offset_;
|
|
return *this;
|
|
}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
|
|
|
|
operator VkVertexInputAttributeDescription const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkVertexInputAttributeDescription *>( this );
|
|
}
|
|
|
|
operator VkVertexInputAttributeDescription &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkVertexInputAttributeDescription *>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
# if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
# else
|
|
std::tuple<uint32_t const &, uint32_t const &, VULKAN_HPP_NAMESPACE::Format const &, uint32_t const &>
|
|
# endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( location, binding, format, offset );
|
|
}
|
|
#endif
|
|
|
|
#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
|
|
auto operator<=>( VertexInputAttributeDescription const & ) const = default;
|
|
#else
|
|
bool operator==( VertexInputAttributeDescription const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
# if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
# else
|
|
return ( location == rhs.location ) && ( binding == rhs.binding ) && ( format == rhs.format ) && ( offset == rhs.offset );
|
|
# endif
|
|
}
|
|
|
|
bool operator!=( VertexInputAttributeDescription const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
uint32_t location = {};
|
|
uint32_t binding = {};
|
|
VULKAN_HPP_NAMESPACE::Format format = VULKAN_HPP_NAMESPACE::Format::eUndefined;
|
|
uint32_t offset = {};
|
|
};
|
|
|
|
struct PipelineVertexInputStateCreateInfo
|
|
{
|
|
using NativeType = VkPipelineVertexInputStateCreateInfo;
|
|
|
|
static const bool allowDuplicate = false;
|
|
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePipelineVertexInputStateCreateInfo;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR PipelineVertexInputStateCreateInfo( VULKAN_HPP_NAMESPACE::PipelineVertexInputStateCreateFlags flags_ = {},
|
|
uint32_t vertexBindingDescriptionCount_ = {},
|
|
const VULKAN_HPP_NAMESPACE::VertexInputBindingDescription * pVertexBindingDescriptions_ = {},
|
|
uint32_t vertexAttributeDescriptionCount_ = {},
|
|
const VULKAN_HPP_NAMESPACE::VertexInputAttributeDescription * pVertexAttributeDescriptions_ = {},
|
|
const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
|
|
: pNext( pNext_ )
|
|
, flags( flags_ )
|
|
, vertexBindingDescriptionCount( vertexBindingDescriptionCount_ )
|
|
, pVertexBindingDescriptions( pVertexBindingDescriptions_ )
|
|
, vertexAttributeDescriptionCount( vertexAttributeDescriptionCount_ )
|
|
, pVertexAttributeDescriptions( pVertexAttributeDescriptions_ )
|
|
{
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR PipelineVertexInputStateCreateInfo( PipelineVertexInputStateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
PipelineVertexInputStateCreateInfo( VkPipelineVertexInputStateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: PipelineVertexInputStateCreateInfo( *reinterpret_cast<PipelineVertexInputStateCreateInfo const *>( &rhs ) )
|
|
{
|
|
}
|
|
|
|
# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
|
|
PipelineVertexInputStateCreateInfo(
|
|
VULKAN_HPP_NAMESPACE::PipelineVertexInputStateCreateFlags flags_,
|
|
VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::VertexInputBindingDescription> const & vertexBindingDescriptions_,
|
|
VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::VertexInputAttributeDescription> const & vertexAttributeDescriptions_ = {},
|
|
const void * pNext_ = nullptr )
|
|
: pNext( pNext_ )
|
|
, flags( flags_ )
|
|
, vertexBindingDescriptionCount( static_cast<uint32_t>( vertexBindingDescriptions_.size() ) )
|
|
, pVertexBindingDescriptions( vertexBindingDescriptions_.data() )
|
|
, vertexAttributeDescriptionCount( static_cast<uint32_t>( vertexAttributeDescriptions_.size() ) )
|
|
, pVertexAttributeDescriptions( vertexAttributeDescriptions_.data() )
|
|
{
|
|
}
|
|
# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
|
|
|
|
PipelineVertexInputStateCreateInfo & operator=( PipelineVertexInputStateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
PipelineVertexInputStateCreateInfo & operator=( VkPipelineVertexInputStateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PipelineVertexInputStateCreateInfo const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
|
|
VULKAN_HPP_CONSTEXPR_14 PipelineVertexInputStateCreateInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pNext = pNext_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PipelineVertexInputStateCreateInfo &
|
|
setFlags( VULKAN_HPP_NAMESPACE::PipelineVertexInputStateCreateFlags flags_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
flags = flags_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PipelineVertexInputStateCreateInfo & setVertexBindingDescriptionCount( uint32_t vertexBindingDescriptionCount_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
vertexBindingDescriptionCount = vertexBindingDescriptionCount_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PipelineVertexInputStateCreateInfo &
|
|
setPVertexBindingDescriptions( const VULKAN_HPP_NAMESPACE::VertexInputBindingDescription * pVertexBindingDescriptions_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pVertexBindingDescriptions = pVertexBindingDescriptions_;
|
|
return *this;
|
|
}
|
|
|
|
# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
|
|
PipelineVertexInputStateCreateInfo & setVertexBindingDescriptions(
|
|
VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::VertexInputBindingDescription> const & vertexBindingDescriptions_ )
|
|
VULKAN_HPP_NOEXCEPT
|
|
{
|
|
vertexBindingDescriptionCount = static_cast<uint32_t>( vertexBindingDescriptions_.size() );
|
|
pVertexBindingDescriptions = vertexBindingDescriptions_.data();
|
|
return *this;
|
|
}
|
|
# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PipelineVertexInputStateCreateInfo &
|
|
setVertexAttributeDescriptionCount( uint32_t vertexAttributeDescriptionCount_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
vertexAttributeDescriptionCount = vertexAttributeDescriptionCount_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PipelineVertexInputStateCreateInfo &
|
|
setPVertexAttributeDescriptions( const VULKAN_HPP_NAMESPACE::VertexInputAttributeDescription * pVertexAttributeDescriptions_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pVertexAttributeDescriptions = pVertexAttributeDescriptions_;
|
|
return *this;
|
|
}
|
|
|
|
# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
|
|
PipelineVertexInputStateCreateInfo & setVertexAttributeDescriptions(
|
|
VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::VertexInputAttributeDescription> const & vertexAttributeDescriptions_ )
|
|
VULKAN_HPP_NOEXCEPT
|
|
{
|
|
vertexAttributeDescriptionCount = static_cast<uint32_t>( vertexAttributeDescriptions_.size() );
|
|
pVertexAttributeDescriptions = vertexAttributeDescriptions_.data();
|
|
return *this;
|
|
}
|
|
# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
|
|
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
|
|
|
|
operator VkPipelineVertexInputStateCreateInfo const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkPipelineVertexInputStateCreateInfo *>( this );
|
|
}
|
|
|
|
operator VkPipelineVertexInputStateCreateInfo &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkPipelineVertexInputStateCreateInfo *>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
# if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
# else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &,
|
|
const void * const &,
|
|
VULKAN_HPP_NAMESPACE::PipelineVertexInputStateCreateFlags const &,
|
|
uint32_t const &,
|
|
const VULKAN_HPP_NAMESPACE::VertexInputBindingDescription * const &,
|
|
uint32_t const &,
|
|
const VULKAN_HPP_NAMESPACE::VertexInputAttributeDescription * const &>
|
|
# endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie(
|
|
sType, pNext, flags, vertexBindingDescriptionCount, pVertexBindingDescriptions, vertexAttributeDescriptionCount, pVertexAttributeDescriptions );
|
|
}
|
|
#endif
|
|
|
|
#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
|
|
auto operator<=>( PipelineVertexInputStateCreateInfo const & ) const = default;
|
|
#else
|
|
bool operator==( PipelineVertexInputStateCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
# if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
# else
|
|
return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) &&
|
|
( vertexBindingDescriptionCount == rhs.vertexBindingDescriptionCount ) && ( pVertexBindingDescriptions == rhs.pVertexBindingDescriptions ) &&
|
|
( vertexAttributeDescriptionCount == rhs.vertexAttributeDescriptionCount ) && ( pVertexAttributeDescriptions == rhs.pVertexAttributeDescriptions );
|
|
# endif
|
|
}
|
|
|
|
bool operator!=( PipelineVertexInputStateCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineVertexInputStateCreateInfo;
|
|
const void * pNext = {};
|
|
VULKAN_HPP_NAMESPACE::PipelineVertexInputStateCreateFlags flags = {};
|
|
uint32_t vertexBindingDescriptionCount = {};
|
|
const VULKAN_HPP_NAMESPACE::VertexInputBindingDescription * pVertexBindingDescriptions = {};
|
|
uint32_t vertexAttributeDescriptionCount = {};
|
|
const VULKAN_HPP_NAMESPACE::VertexInputAttributeDescription * pVertexAttributeDescriptions = {};
|
|
};
|
|
|
|
template <>
|
|
struct CppType<StructureType, StructureType::ePipelineVertexInputStateCreateInfo>
|
|
{
|
|
using Type = PipelineVertexInputStateCreateInfo;
|
|
};
|
|
|
|
struct PipelineInputAssemblyStateCreateInfo
|
|
{
|
|
using NativeType = VkPipelineInputAssemblyStateCreateInfo;
|
|
|
|
static const bool allowDuplicate = false;
|
|
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePipelineInputAssemblyStateCreateInfo;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR
|
|
PipelineInputAssemblyStateCreateInfo( VULKAN_HPP_NAMESPACE::PipelineInputAssemblyStateCreateFlags flags_ = {},
|
|
VULKAN_HPP_NAMESPACE::PrimitiveTopology topology_ = VULKAN_HPP_NAMESPACE::PrimitiveTopology::ePointList,
|
|
VULKAN_HPP_NAMESPACE::Bool32 primitiveRestartEnable_ = {},
|
|
const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
|
|
: pNext( pNext_ )
|
|
, flags( flags_ )
|
|
, topology( topology_ )
|
|
, primitiveRestartEnable( primitiveRestartEnable_ )
|
|
{
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR PipelineInputAssemblyStateCreateInfo( PipelineInputAssemblyStateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
PipelineInputAssemblyStateCreateInfo( VkPipelineInputAssemblyStateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: PipelineInputAssemblyStateCreateInfo( *reinterpret_cast<PipelineInputAssemblyStateCreateInfo const *>( &rhs ) )
|
|
{
|
|
}
|
|
|
|
PipelineInputAssemblyStateCreateInfo & operator=( PipelineInputAssemblyStateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
PipelineInputAssemblyStateCreateInfo & operator=( VkPipelineInputAssemblyStateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PipelineInputAssemblyStateCreateInfo const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
|
|
VULKAN_HPP_CONSTEXPR_14 PipelineInputAssemblyStateCreateInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pNext = pNext_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PipelineInputAssemblyStateCreateInfo &
|
|
setFlags( VULKAN_HPP_NAMESPACE::PipelineInputAssemblyStateCreateFlags flags_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
flags = flags_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PipelineInputAssemblyStateCreateInfo & setTopology( VULKAN_HPP_NAMESPACE::PrimitiveTopology topology_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
topology = topology_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PipelineInputAssemblyStateCreateInfo &
|
|
setPrimitiveRestartEnable( VULKAN_HPP_NAMESPACE::Bool32 primitiveRestartEnable_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
primitiveRestartEnable = primitiveRestartEnable_;
|
|
return *this;
|
|
}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
|
|
|
|
operator VkPipelineInputAssemblyStateCreateInfo const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkPipelineInputAssemblyStateCreateInfo *>( this );
|
|
}
|
|
|
|
operator VkPipelineInputAssemblyStateCreateInfo &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkPipelineInputAssemblyStateCreateInfo *>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
# if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
# else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &,
|
|
const void * const &,
|
|
VULKAN_HPP_NAMESPACE::PipelineInputAssemblyStateCreateFlags const &,
|
|
VULKAN_HPP_NAMESPACE::PrimitiveTopology const &,
|
|
VULKAN_HPP_NAMESPACE::Bool32 const &>
|
|
# endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( sType, pNext, flags, topology, primitiveRestartEnable );
|
|
}
|
|
#endif
|
|
|
|
#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
|
|
auto operator<=>( PipelineInputAssemblyStateCreateInfo const & ) const = default;
|
|
#else
|
|
bool operator==( PipelineInputAssemblyStateCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
# if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
# else
|
|
return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && ( topology == rhs.topology ) &&
|
|
( primitiveRestartEnable == rhs.primitiveRestartEnable );
|
|
# endif
|
|
}
|
|
|
|
bool operator!=( PipelineInputAssemblyStateCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineInputAssemblyStateCreateInfo;
|
|
const void * pNext = {};
|
|
VULKAN_HPP_NAMESPACE::PipelineInputAssemblyStateCreateFlags flags = {};
|
|
VULKAN_HPP_NAMESPACE::PrimitiveTopology topology = VULKAN_HPP_NAMESPACE::PrimitiveTopology::ePointList;
|
|
VULKAN_HPP_NAMESPACE::Bool32 primitiveRestartEnable = {};
|
|
};
|
|
|
|
template <>
|
|
struct CppType<StructureType, StructureType::ePipelineInputAssemblyStateCreateInfo>
|
|
{
|
|
using Type = PipelineInputAssemblyStateCreateInfo;
|
|
};
|
|
|
|
struct PipelineTessellationStateCreateInfo
|
|
{
|
|
using NativeType = VkPipelineTessellationStateCreateInfo;
|
|
|
|
static const bool allowDuplicate = false;
|
|
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePipelineTessellationStateCreateInfo;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR PipelineTessellationStateCreateInfo( VULKAN_HPP_NAMESPACE::PipelineTessellationStateCreateFlags flags_ = {},
|
|
uint32_t patchControlPoints_ = {},
|
|
const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
|
|
: pNext( pNext_ )
|
|
, flags( flags_ )
|
|
, patchControlPoints( patchControlPoints_ )
|
|
{
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR PipelineTessellationStateCreateInfo( PipelineTessellationStateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
PipelineTessellationStateCreateInfo( VkPipelineTessellationStateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: PipelineTessellationStateCreateInfo( *reinterpret_cast<PipelineTessellationStateCreateInfo const *>( &rhs ) )
|
|
{
|
|
}
|
|
|
|
PipelineTessellationStateCreateInfo & operator=( PipelineTessellationStateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
PipelineTessellationStateCreateInfo & operator=( VkPipelineTessellationStateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PipelineTessellationStateCreateInfo const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
|
|
VULKAN_HPP_CONSTEXPR_14 PipelineTessellationStateCreateInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pNext = pNext_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PipelineTessellationStateCreateInfo &
|
|
setFlags( VULKAN_HPP_NAMESPACE::PipelineTessellationStateCreateFlags flags_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
flags = flags_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PipelineTessellationStateCreateInfo & setPatchControlPoints( uint32_t patchControlPoints_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
patchControlPoints = patchControlPoints_;
|
|
return *this;
|
|
}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
|
|
|
|
operator VkPipelineTessellationStateCreateInfo const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkPipelineTessellationStateCreateInfo *>( this );
|
|
}
|
|
|
|
operator VkPipelineTessellationStateCreateInfo &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkPipelineTessellationStateCreateInfo *>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
# if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
# else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &,
|
|
const void * const &,
|
|
VULKAN_HPP_NAMESPACE::PipelineTessellationStateCreateFlags const &,
|
|
uint32_t const &>
|
|
# endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( sType, pNext, flags, patchControlPoints );
|
|
}
|
|
#endif
|
|
|
|
#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
|
|
auto operator<=>( PipelineTessellationStateCreateInfo const & ) const = default;
|
|
#else
|
|
bool operator==( PipelineTessellationStateCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
# if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
# else
|
|
return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && ( patchControlPoints == rhs.patchControlPoints );
|
|
# endif
|
|
}
|
|
|
|
bool operator!=( PipelineTessellationStateCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineTessellationStateCreateInfo;
|
|
const void * pNext = {};
|
|
VULKAN_HPP_NAMESPACE::PipelineTessellationStateCreateFlags flags = {};
|
|
uint32_t patchControlPoints = {};
|
|
};
|
|
|
|
template <>
|
|
struct CppType<StructureType, StructureType::ePipelineTessellationStateCreateInfo>
|
|
{
|
|
using Type = PipelineTessellationStateCreateInfo;
|
|
};
|
|
|
|
struct Viewport
|
|
{
|
|
using NativeType = VkViewport;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR
|
|
Viewport( float x_ = {}, float y_ = {}, float width_ = {}, float height_ = {}, float minDepth_ = {}, float maxDepth_ = {} ) VULKAN_HPP_NOEXCEPT
|
|
: x( x_ )
|
|
, y( y_ )
|
|
, width( width_ )
|
|
, height( height_ )
|
|
, minDepth( minDepth_ )
|
|
, maxDepth( maxDepth_ )
|
|
{
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR Viewport( Viewport const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
Viewport( VkViewport const & rhs ) VULKAN_HPP_NOEXCEPT : Viewport( *reinterpret_cast<Viewport const *>( &rhs ) ) {}
|
|
|
|
Viewport & operator=( Viewport const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
Viewport & operator=( VkViewport const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::Viewport const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
|
|
VULKAN_HPP_CONSTEXPR_14 Viewport & setX( float x_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
x = x_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 Viewport & setY( float y_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
y = y_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 Viewport & setWidth( float width_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
width = width_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 Viewport & setHeight( float height_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
height = height_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 Viewport & setMinDepth( float minDepth_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
minDepth = minDepth_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 Viewport & setMaxDepth( float maxDepth_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
maxDepth = maxDepth_;
|
|
return *this;
|
|
}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
|
|
|
|
operator VkViewport const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkViewport *>( this );
|
|
}
|
|
|
|
operator VkViewport &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkViewport *>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
# if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
# else
|
|
std::tuple<float const &, float const &, float const &, float const &, float const &, float const &>
|
|
# endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( x, y, width, height, minDepth, maxDepth );
|
|
}
|
|
#endif
|
|
|
|
#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
|
|
auto operator<=>( Viewport const & ) const = default;
|
|
#else
|
|
bool operator==( Viewport const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
# if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
# else
|
|
return ( x == rhs.x ) && ( y == rhs.y ) && ( width == rhs.width ) && ( height == rhs.height ) && ( minDepth == rhs.minDepth ) &&
|
|
( maxDepth == rhs.maxDepth );
|
|
# endif
|
|
}
|
|
|
|
bool operator!=( Viewport const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
float x = {};
|
|
float y = {};
|
|
float width = {};
|
|
float height = {};
|
|
float minDepth = {};
|
|
float maxDepth = {};
|
|
};
|
|
|
|
struct PipelineViewportStateCreateInfo
|
|
{
|
|
using NativeType = VkPipelineViewportStateCreateInfo;
|
|
|
|
static const bool allowDuplicate = false;
|
|
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePipelineViewportStateCreateInfo;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR PipelineViewportStateCreateInfo( VULKAN_HPP_NAMESPACE::PipelineViewportStateCreateFlags flags_ = {},
|
|
uint32_t viewportCount_ = {},
|
|
const VULKAN_HPP_NAMESPACE::Viewport * pViewports_ = {},
|
|
uint32_t scissorCount_ = {},
|
|
const VULKAN_HPP_NAMESPACE::Rect2D * pScissors_ = {},
|
|
const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
|
|
: pNext( pNext_ )
|
|
, flags( flags_ )
|
|
, viewportCount( viewportCount_ )
|
|
, pViewports( pViewports_ )
|
|
, scissorCount( scissorCount_ )
|
|
, pScissors( pScissors_ )
|
|
{
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR PipelineViewportStateCreateInfo( PipelineViewportStateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
PipelineViewportStateCreateInfo( VkPipelineViewportStateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: PipelineViewportStateCreateInfo( *reinterpret_cast<PipelineViewportStateCreateInfo const *>( &rhs ) )
|
|
{
|
|
}
|
|
|
|
# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
|
|
PipelineViewportStateCreateInfo( VULKAN_HPP_NAMESPACE::PipelineViewportStateCreateFlags flags_,
|
|
VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::Viewport> const & viewports_,
|
|
VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::Rect2D> const & scissors_ = {},
|
|
const void * pNext_ = nullptr )
|
|
: pNext( pNext_ )
|
|
, flags( flags_ )
|
|
, viewportCount( static_cast<uint32_t>( viewports_.size() ) )
|
|
, pViewports( viewports_.data() )
|
|
, scissorCount( static_cast<uint32_t>( scissors_.size() ) )
|
|
, pScissors( scissors_.data() )
|
|
{
|
|
}
|
|
# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
|
|
|
|
PipelineViewportStateCreateInfo & operator=( PipelineViewportStateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
PipelineViewportStateCreateInfo & operator=( VkPipelineViewportStateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PipelineViewportStateCreateInfo const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
|
|
VULKAN_HPP_CONSTEXPR_14 PipelineViewportStateCreateInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pNext = pNext_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PipelineViewportStateCreateInfo & setFlags( VULKAN_HPP_NAMESPACE::PipelineViewportStateCreateFlags flags_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
flags = flags_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PipelineViewportStateCreateInfo & setViewportCount( uint32_t viewportCount_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
viewportCount = viewportCount_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PipelineViewportStateCreateInfo & setPViewports( const VULKAN_HPP_NAMESPACE::Viewport * pViewports_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pViewports = pViewports_;
|
|
return *this;
|
|
}
|
|
|
|
# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
|
|
PipelineViewportStateCreateInfo &
|
|
setViewports( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::Viewport> const & viewports_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
viewportCount = static_cast<uint32_t>( viewports_.size() );
|
|
pViewports = viewports_.data();
|
|
return *this;
|
|
}
|
|
# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PipelineViewportStateCreateInfo & setScissorCount( uint32_t scissorCount_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
scissorCount = scissorCount_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PipelineViewportStateCreateInfo & setPScissors( const VULKAN_HPP_NAMESPACE::Rect2D * pScissors_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pScissors = pScissors_;
|
|
return *this;
|
|
}
|
|
|
|
# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
|
|
PipelineViewportStateCreateInfo &
|
|
setScissors( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::Rect2D> const & scissors_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
scissorCount = static_cast<uint32_t>( scissors_.size() );
|
|
pScissors = scissors_.data();
|
|
return *this;
|
|
}
|
|
# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
|
|
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
|
|
|
|
operator VkPipelineViewportStateCreateInfo const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkPipelineViewportStateCreateInfo *>( this );
|
|
}
|
|
|
|
operator VkPipelineViewportStateCreateInfo &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkPipelineViewportStateCreateInfo *>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
# if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
# else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &,
|
|
const void * const &,
|
|
VULKAN_HPP_NAMESPACE::PipelineViewportStateCreateFlags const &,
|
|
uint32_t const &,
|
|
const VULKAN_HPP_NAMESPACE::Viewport * const &,
|
|
uint32_t const &,
|
|
const VULKAN_HPP_NAMESPACE::Rect2D * const &>
|
|
# endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( sType, pNext, flags, viewportCount, pViewports, scissorCount, pScissors );
|
|
}
|
|
#endif
|
|
|
|
#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
|
|
auto operator<=>( PipelineViewportStateCreateInfo const & ) const = default;
|
|
#else
|
|
bool operator==( PipelineViewportStateCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
# if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
# else
|
|
return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && ( viewportCount == rhs.viewportCount ) &&
|
|
( pViewports == rhs.pViewports ) && ( scissorCount == rhs.scissorCount ) && ( pScissors == rhs.pScissors );
|
|
# endif
|
|
}
|
|
|
|
bool operator!=( PipelineViewportStateCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineViewportStateCreateInfo;
|
|
const void * pNext = {};
|
|
VULKAN_HPP_NAMESPACE::PipelineViewportStateCreateFlags flags = {};
|
|
uint32_t viewportCount = {};
|
|
const VULKAN_HPP_NAMESPACE::Viewport * pViewports = {};
|
|
uint32_t scissorCount = {};
|
|
const VULKAN_HPP_NAMESPACE::Rect2D * pScissors = {};
|
|
};
|
|
|
|
template <>
|
|
struct CppType<StructureType, StructureType::ePipelineViewportStateCreateInfo>
|
|
{
|
|
using Type = PipelineViewportStateCreateInfo;
|
|
};
|
|
|
|
struct PipelineRasterizationStateCreateInfo
|
|
{
|
|
using NativeType = VkPipelineRasterizationStateCreateInfo;
|
|
|
|
static const bool allowDuplicate = false;
|
|
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePipelineRasterizationStateCreateInfo;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR PipelineRasterizationStateCreateInfo( VULKAN_HPP_NAMESPACE::PipelineRasterizationStateCreateFlags flags_ = {},
|
|
VULKAN_HPP_NAMESPACE::Bool32 depthClampEnable_ = {},
|
|
VULKAN_HPP_NAMESPACE::Bool32 rasterizerDiscardEnable_ = {},
|
|
VULKAN_HPP_NAMESPACE::PolygonMode polygonMode_ = VULKAN_HPP_NAMESPACE::PolygonMode::eFill,
|
|
VULKAN_HPP_NAMESPACE::CullModeFlags cullMode_ = {},
|
|
VULKAN_HPP_NAMESPACE::FrontFace frontFace_ = VULKAN_HPP_NAMESPACE::FrontFace::eCounterClockwise,
|
|
VULKAN_HPP_NAMESPACE::Bool32 depthBiasEnable_ = {},
|
|
float depthBiasConstantFactor_ = {},
|
|
float depthBiasClamp_ = {},
|
|
float depthBiasSlopeFactor_ = {},
|
|
float lineWidth_ = {},
|
|
const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
|
|
: pNext( pNext_ )
|
|
, flags( flags_ )
|
|
, depthClampEnable( depthClampEnable_ )
|
|
, rasterizerDiscardEnable( rasterizerDiscardEnable_ )
|
|
, polygonMode( polygonMode_ )
|
|
, cullMode( cullMode_ )
|
|
, frontFace( frontFace_ )
|
|
, depthBiasEnable( depthBiasEnable_ )
|
|
, depthBiasConstantFactor( depthBiasConstantFactor_ )
|
|
, depthBiasClamp( depthBiasClamp_ )
|
|
, depthBiasSlopeFactor( depthBiasSlopeFactor_ )
|
|
, lineWidth( lineWidth_ )
|
|
{
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR PipelineRasterizationStateCreateInfo( PipelineRasterizationStateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
PipelineRasterizationStateCreateInfo( VkPipelineRasterizationStateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: PipelineRasterizationStateCreateInfo( *reinterpret_cast<PipelineRasterizationStateCreateInfo const *>( &rhs ) )
|
|
{
|
|
}
|
|
|
|
PipelineRasterizationStateCreateInfo & operator=( PipelineRasterizationStateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
PipelineRasterizationStateCreateInfo & operator=( VkPipelineRasterizationStateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PipelineRasterizationStateCreateInfo const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
|
|
VULKAN_HPP_CONSTEXPR_14 PipelineRasterizationStateCreateInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pNext = pNext_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PipelineRasterizationStateCreateInfo &
|
|
setFlags( VULKAN_HPP_NAMESPACE::PipelineRasterizationStateCreateFlags flags_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
flags = flags_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PipelineRasterizationStateCreateInfo & setDepthClampEnable( VULKAN_HPP_NAMESPACE::Bool32 depthClampEnable_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
depthClampEnable = depthClampEnable_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PipelineRasterizationStateCreateInfo &
|
|
setRasterizerDiscardEnable( VULKAN_HPP_NAMESPACE::Bool32 rasterizerDiscardEnable_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
rasterizerDiscardEnable = rasterizerDiscardEnable_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PipelineRasterizationStateCreateInfo & setPolygonMode( VULKAN_HPP_NAMESPACE::PolygonMode polygonMode_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
polygonMode = polygonMode_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PipelineRasterizationStateCreateInfo & setCullMode( VULKAN_HPP_NAMESPACE::CullModeFlags cullMode_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
cullMode = cullMode_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PipelineRasterizationStateCreateInfo & setFrontFace( VULKAN_HPP_NAMESPACE::FrontFace frontFace_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
frontFace = frontFace_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PipelineRasterizationStateCreateInfo & setDepthBiasEnable( VULKAN_HPP_NAMESPACE::Bool32 depthBiasEnable_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
depthBiasEnable = depthBiasEnable_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PipelineRasterizationStateCreateInfo & setDepthBiasConstantFactor( float depthBiasConstantFactor_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
depthBiasConstantFactor = depthBiasConstantFactor_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PipelineRasterizationStateCreateInfo & setDepthBiasClamp( float depthBiasClamp_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
depthBiasClamp = depthBiasClamp_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PipelineRasterizationStateCreateInfo & setDepthBiasSlopeFactor( float depthBiasSlopeFactor_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
depthBiasSlopeFactor = depthBiasSlopeFactor_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PipelineRasterizationStateCreateInfo & setLineWidth( float lineWidth_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
lineWidth = lineWidth_;
|
|
return *this;
|
|
}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
|
|
|
|
operator VkPipelineRasterizationStateCreateInfo const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkPipelineRasterizationStateCreateInfo *>( this );
|
|
}
|
|
|
|
operator VkPipelineRasterizationStateCreateInfo &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkPipelineRasterizationStateCreateInfo *>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
# if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
# else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &,
|
|
const void * const &,
|
|
VULKAN_HPP_NAMESPACE::PipelineRasterizationStateCreateFlags const &,
|
|
VULKAN_HPP_NAMESPACE::Bool32 const &,
|
|
VULKAN_HPP_NAMESPACE::Bool32 const &,
|
|
VULKAN_HPP_NAMESPACE::PolygonMode const &,
|
|
VULKAN_HPP_NAMESPACE::CullModeFlags const &,
|
|
VULKAN_HPP_NAMESPACE::FrontFace const &,
|
|
VULKAN_HPP_NAMESPACE::Bool32 const &,
|
|
float const &,
|
|
float const &,
|
|
float const &,
|
|
float const &>
|
|
# endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( sType,
|
|
pNext,
|
|
flags,
|
|
depthClampEnable,
|
|
rasterizerDiscardEnable,
|
|
polygonMode,
|
|
cullMode,
|
|
frontFace,
|
|
depthBiasEnable,
|
|
depthBiasConstantFactor,
|
|
depthBiasClamp,
|
|
depthBiasSlopeFactor,
|
|
lineWidth );
|
|
}
|
|
#endif
|
|
|
|
#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
|
|
auto operator<=>( PipelineRasterizationStateCreateInfo const & ) const = default;
|
|
#else
|
|
bool operator==( PipelineRasterizationStateCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
# if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
# else
|
|
return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && ( depthClampEnable == rhs.depthClampEnable ) &&
|
|
( rasterizerDiscardEnable == rhs.rasterizerDiscardEnable ) && ( polygonMode == rhs.polygonMode ) && ( cullMode == rhs.cullMode ) &&
|
|
( frontFace == rhs.frontFace ) && ( depthBiasEnable == rhs.depthBiasEnable ) && ( depthBiasConstantFactor == rhs.depthBiasConstantFactor ) &&
|
|
( depthBiasClamp == rhs.depthBiasClamp ) && ( depthBiasSlopeFactor == rhs.depthBiasSlopeFactor ) && ( lineWidth == rhs.lineWidth );
|
|
# endif
|
|
}
|
|
|
|
bool operator!=( PipelineRasterizationStateCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineRasterizationStateCreateInfo;
|
|
const void * pNext = {};
|
|
VULKAN_HPP_NAMESPACE::PipelineRasterizationStateCreateFlags flags = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 depthClampEnable = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 rasterizerDiscardEnable = {};
|
|
VULKAN_HPP_NAMESPACE::PolygonMode polygonMode = VULKAN_HPP_NAMESPACE::PolygonMode::eFill;
|
|
VULKAN_HPP_NAMESPACE::CullModeFlags cullMode = {};
|
|
VULKAN_HPP_NAMESPACE::FrontFace frontFace = VULKAN_HPP_NAMESPACE::FrontFace::eCounterClockwise;
|
|
VULKAN_HPP_NAMESPACE::Bool32 depthBiasEnable = {};
|
|
float depthBiasConstantFactor = {};
|
|
float depthBiasClamp = {};
|
|
float depthBiasSlopeFactor = {};
|
|
float lineWidth = {};
|
|
};
|
|
|
|
template <>
|
|
struct CppType<StructureType, StructureType::ePipelineRasterizationStateCreateInfo>
|
|
{
|
|
using Type = PipelineRasterizationStateCreateInfo;
|
|
};
|
|
|
|
struct PipelineMultisampleStateCreateInfo
|
|
{
|
|
using NativeType = VkPipelineMultisampleStateCreateInfo;
|
|
|
|
static const bool allowDuplicate = false;
|
|
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePipelineMultisampleStateCreateInfo;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR
|
|
PipelineMultisampleStateCreateInfo( VULKAN_HPP_NAMESPACE::PipelineMultisampleStateCreateFlags flags_ = {},
|
|
VULKAN_HPP_NAMESPACE::SampleCountFlagBits rasterizationSamples_ = VULKAN_HPP_NAMESPACE::SampleCountFlagBits::e1,
|
|
VULKAN_HPP_NAMESPACE::Bool32 sampleShadingEnable_ = {},
|
|
float minSampleShading_ = {},
|
|
const VULKAN_HPP_NAMESPACE::SampleMask * pSampleMask_ = {},
|
|
VULKAN_HPP_NAMESPACE::Bool32 alphaToCoverageEnable_ = {},
|
|
VULKAN_HPP_NAMESPACE::Bool32 alphaToOneEnable_ = {},
|
|
const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
|
|
: pNext( pNext_ )
|
|
, flags( flags_ )
|
|
, rasterizationSamples( rasterizationSamples_ )
|
|
, sampleShadingEnable( sampleShadingEnable_ )
|
|
, minSampleShading( minSampleShading_ )
|
|
, pSampleMask( pSampleMask_ )
|
|
, alphaToCoverageEnable( alphaToCoverageEnable_ )
|
|
, alphaToOneEnable( alphaToOneEnable_ )
|
|
{
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR PipelineMultisampleStateCreateInfo( PipelineMultisampleStateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
PipelineMultisampleStateCreateInfo( VkPipelineMultisampleStateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: PipelineMultisampleStateCreateInfo( *reinterpret_cast<PipelineMultisampleStateCreateInfo const *>( &rhs ) )
|
|
{
|
|
}
|
|
|
|
PipelineMultisampleStateCreateInfo & operator=( PipelineMultisampleStateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
PipelineMultisampleStateCreateInfo & operator=( VkPipelineMultisampleStateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PipelineMultisampleStateCreateInfo const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
|
|
VULKAN_HPP_CONSTEXPR_14 PipelineMultisampleStateCreateInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pNext = pNext_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PipelineMultisampleStateCreateInfo &
|
|
setFlags( VULKAN_HPP_NAMESPACE::PipelineMultisampleStateCreateFlags flags_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
flags = flags_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PipelineMultisampleStateCreateInfo &
|
|
setRasterizationSamples( VULKAN_HPP_NAMESPACE::SampleCountFlagBits rasterizationSamples_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
rasterizationSamples = rasterizationSamples_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PipelineMultisampleStateCreateInfo & setSampleShadingEnable( VULKAN_HPP_NAMESPACE::Bool32 sampleShadingEnable_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
sampleShadingEnable = sampleShadingEnable_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PipelineMultisampleStateCreateInfo & setMinSampleShading( float minSampleShading_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
minSampleShading = minSampleShading_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PipelineMultisampleStateCreateInfo & setPSampleMask( const VULKAN_HPP_NAMESPACE::SampleMask * pSampleMask_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pSampleMask = pSampleMask_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PipelineMultisampleStateCreateInfo &
|
|
setAlphaToCoverageEnable( VULKAN_HPP_NAMESPACE::Bool32 alphaToCoverageEnable_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
alphaToCoverageEnable = alphaToCoverageEnable_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PipelineMultisampleStateCreateInfo & setAlphaToOneEnable( VULKAN_HPP_NAMESPACE::Bool32 alphaToOneEnable_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
alphaToOneEnable = alphaToOneEnable_;
|
|
return *this;
|
|
}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
|
|
|
|
operator VkPipelineMultisampleStateCreateInfo const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkPipelineMultisampleStateCreateInfo *>( this );
|
|
}
|
|
|
|
operator VkPipelineMultisampleStateCreateInfo &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkPipelineMultisampleStateCreateInfo *>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
# if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
# else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &,
|
|
const void * const &,
|
|
VULKAN_HPP_NAMESPACE::PipelineMultisampleStateCreateFlags const &,
|
|
VULKAN_HPP_NAMESPACE::SampleCountFlagBits const &,
|
|
VULKAN_HPP_NAMESPACE::Bool32 const &,
|
|
float const &,
|
|
const VULKAN_HPP_NAMESPACE::SampleMask * const &,
|
|
VULKAN_HPP_NAMESPACE::Bool32 const &,
|
|
VULKAN_HPP_NAMESPACE::Bool32 const &>
|
|
# endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( sType, pNext, flags, rasterizationSamples, sampleShadingEnable, minSampleShading, pSampleMask, alphaToCoverageEnable, alphaToOneEnable );
|
|
}
|
|
#endif
|
|
|
|
#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
|
|
auto operator<=>( PipelineMultisampleStateCreateInfo const & ) const = default;
|
|
#else
|
|
bool operator==( PipelineMultisampleStateCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
# if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
# else
|
|
return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && ( rasterizationSamples == rhs.rasterizationSamples ) &&
|
|
( sampleShadingEnable == rhs.sampleShadingEnable ) && ( minSampleShading == rhs.minSampleShading ) && ( pSampleMask == rhs.pSampleMask ) &&
|
|
( alphaToCoverageEnable == rhs.alphaToCoverageEnable ) && ( alphaToOneEnable == rhs.alphaToOneEnable );
|
|
# endif
|
|
}
|
|
|
|
bool operator!=( PipelineMultisampleStateCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineMultisampleStateCreateInfo;
|
|
const void * pNext = {};
|
|
VULKAN_HPP_NAMESPACE::PipelineMultisampleStateCreateFlags flags = {};
|
|
VULKAN_HPP_NAMESPACE::SampleCountFlagBits rasterizationSamples = VULKAN_HPP_NAMESPACE::SampleCountFlagBits::e1;
|
|
VULKAN_HPP_NAMESPACE::Bool32 sampleShadingEnable = {};
|
|
float minSampleShading = {};
|
|
const VULKAN_HPP_NAMESPACE::SampleMask * pSampleMask = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 alphaToCoverageEnable = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 alphaToOneEnable = {};
|
|
};
|
|
|
|
template <>
|
|
struct CppType<StructureType, StructureType::ePipelineMultisampleStateCreateInfo>
|
|
{
|
|
using Type = PipelineMultisampleStateCreateInfo;
|
|
};
|
|
|
|
struct StencilOpState
|
|
{
|
|
using NativeType = VkStencilOpState;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR StencilOpState( VULKAN_HPP_NAMESPACE::StencilOp failOp_ = VULKAN_HPP_NAMESPACE::StencilOp::eKeep,
|
|
VULKAN_HPP_NAMESPACE::StencilOp passOp_ = VULKAN_HPP_NAMESPACE::StencilOp::eKeep,
|
|
VULKAN_HPP_NAMESPACE::StencilOp depthFailOp_ = VULKAN_HPP_NAMESPACE::StencilOp::eKeep,
|
|
VULKAN_HPP_NAMESPACE::CompareOp compareOp_ = VULKAN_HPP_NAMESPACE::CompareOp::eNever,
|
|
uint32_t compareMask_ = {},
|
|
uint32_t writeMask_ = {},
|
|
uint32_t reference_ = {} ) VULKAN_HPP_NOEXCEPT
|
|
: failOp( failOp_ )
|
|
, passOp( passOp_ )
|
|
, depthFailOp( depthFailOp_ )
|
|
, compareOp( compareOp_ )
|
|
, compareMask( compareMask_ )
|
|
, writeMask( writeMask_ )
|
|
, reference( reference_ )
|
|
{
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR StencilOpState( StencilOpState const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
StencilOpState( VkStencilOpState const & rhs ) VULKAN_HPP_NOEXCEPT : StencilOpState( *reinterpret_cast<StencilOpState const *>( &rhs ) ) {}
|
|
|
|
StencilOpState & operator=( StencilOpState const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
StencilOpState & operator=( VkStencilOpState const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::StencilOpState const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
|
|
VULKAN_HPP_CONSTEXPR_14 StencilOpState & setFailOp( VULKAN_HPP_NAMESPACE::StencilOp failOp_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
failOp = failOp_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 StencilOpState & setPassOp( VULKAN_HPP_NAMESPACE::StencilOp passOp_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
passOp = passOp_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 StencilOpState & setDepthFailOp( VULKAN_HPP_NAMESPACE::StencilOp depthFailOp_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
depthFailOp = depthFailOp_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 StencilOpState & setCompareOp( VULKAN_HPP_NAMESPACE::CompareOp compareOp_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
compareOp = compareOp_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 StencilOpState & setCompareMask( uint32_t compareMask_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
compareMask = compareMask_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 StencilOpState & setWriteMask( uint32_t writeMask_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
writeMask = writeMask_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 StencilOpState & setReference( uint32_t reference_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
reference = reference_;
|
|
return *this;
|
|
}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
|
|
|
|
operator VkStencilOpState const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkStencilOpState *>( this );
|
|
}
|
|
|
|
operator VkStencilOpState &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkStencilOpState *>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
# if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
# else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::StencilOp const &,
|
|
VULKAN_HPP_NAMESPACE::StencilOp const &,
|
|
VULKAN_HPP_NAMESPACE::StencilOp const &,
|
|
VULKAN_HPP_NAMESPACE::CompareOp const &,
|
|
uint32_t const &,
|
|
uint32_t const &,
|
|
uint32_t const &>
|
|
# endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( failOp, passOp, depthFailOp, compareOp, compareMask, writeMask, reference );
|
|
}
|
|
#endif
|
|
|
|
#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
|
|
auto operator<=>( StencilOpState const & ) const = default;
|
|
#else
|
|
bool operator==( StencilOpState const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
# if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
# else
|
|
return ( failOp == rhs.failOp ) && ( passOp == rhs.passOp ) && ( depthFailOp == rhs.depthFailOp ) && ( compareOp == rhs.compareOp ) &&
|
|
( compareMask == rhs.compareMask ) && ( writeMask == rhs.writeMask ) && ( reference == rhs.reference );
|
|
# endif
|
|
}
|
|
|
|
bool operator!=( StencilOpState const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::StencilOp failOp = VULKAN_HPP_NAMESPACE::StencilOp::eKeep;
|
|
VULKAN_HPP_NAMESPACE::StencilOp passOp = VULKAN_HPP_NAMESPACE::StencilOp::eKeep;
|
|
VULKAN_HPP_NAMESPACE::StencilOp depthFailOp = VULKAN_HPP_NAMESPACE::StencilOp::eKeep;
|
|
VULKAN_HPP_NAMESPACE::CompareOp compareOp = VULKAN_HPP_NAMESPACE::CompareOp::eNever;
|
|
uint32_t compareMask = {};
|
|
uint32_t writeMask = {};
|
|
uint32_t reference = {};
|
|
};
|
|
|
|
struct PipelineDepthStencilStateCreateInfo
|
|
{
|
|
using NativeType = VkPipelineDepthStencilStateCreateInfo;
|
|
|
|
static const bool allowDuplicate = false;
|
|
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePipelineDepthStencilStateCreateInfo;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR PipelineDepthStencilStateCreateInfo( VULKAN_HPP_NAMESPACE::PipelineDepthStencilStateCreateFlags flags_ = {},
|
|
VULKAN_HPP_NAMESPACE::Bool32 depthTestEnable_ = {},
|
|
VULKAN_HPP_NAMESPACE::Bool32 depthWriteEnable_ = {},
|
|
VULKAN_HPP_NAMESPACE::CompareOp depthCompareOp_ = VULKAN_HPP_NAMESPACE::CompareOp::eNever,
|
|
VULKAN_HPP_NAMESPACE::Bool32 depthBoundsTestEnable_ = {},
|
|
VULKAN_HPP_NAMESPACE::Bool32 stencilTestEnable_ = {},
|
|
VULKAN_HPP_NAMESPACE::StencilOpState front_ = {},
|
|
VULKAN_HPP_NAMESPACE::StencilOpState back_ = {},
|
|
float minDepthBounds_ = {},
|
|
float maxDepthBounds_ = {},
|
|
const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
|
|
: pNext( pNext_ )
|
|
, flags( flags_ )
|
|
, depthTestEnable( depthTestEnable_ )
|
|
, depthWriteEnable( depthWriteEnable_ )
|
|
, depthCompareOp( depthCompareOp_ )
|
|
, depthBoundsTestEnable( depthBoundsTestEnable_ )
|
|
, stencilTestEnable( stencilTestEnable_ )
|
|
, front( front_ )
|
|
, back( back_ )
|
|
, minDepthBounds( minDepthBounds_ )
|
|
, maxDepthBounds( maxDepthBounds_ )
|
|
{
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR PipelineDepthStencilStateCreateInfo( PipelineDepthStencilStateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
PipelineDepthStencilStateCreateInfo( VkPipelineDepthStencilStateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: PipelineDepthStencilStateCreateInfo( *reinterpret_cast<PipelineDepthStencilStateCreateInfo const *>( &rhs ) )
|
|
{
|
|
}
|
|
|
|
PipelineDepthStencilStateCreateInfo & operator=( PipelineDepthStencilStateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
PipelineDepthStencilStateCreateInfo & operator=( VkPipelineDepthStencilStateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PipelineDepthStencilStateCreateInfo const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
|
|
VULKAN_HPP_CONSTEXPR_14 PipelineDepthStencilStateCreateInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pNext = pNext_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PipelineDepthStencilStateCreateInfo &
|
|
setFlags( VULKAN_HPP_NAMESPACE::PipelineDepthStencilStateCreateFlags flags_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
flags = flags_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PipelineDepthStencilStateCreateInfo & setDepthTestEnable( VULKAN_HPP_NAMESPACE::Bool32 depthTestEnable_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
depthTestEnable = depthTestEnable_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PipelineDepthStencilStateCreateInfo & setDepthWriteEnable( VULKAN_HPP_NAMESPACE::Bool32 depthWriteEnable_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
depthWriteEnable = depthWriteEnable_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PipelineDepthStencilStateCreateInfo & setDepthCompareOp( VULKAN_HPP_NAMESPACE::CompareOp depthCompareOp_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
depthCompareOp = depthCompareOp_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PipelineDepthStencilStateCreateInfo &
|
|
setDepthBoundsTestEnable( VULKAN_HPP_NAMESPACE::Bool32 depthBoundsTestEnable_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
depthBoundsTestEnable = depthBoundsTestEnable_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PipelineDepthStencilStateCreateInfo & setStencilTestEnable( VULKAN_HPP_NAMESPACE::Bool32 stencilTestEnable_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
stencilTestEnable = stencilTestEnable_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PipelineDepthStencilStateCreateInfo & setFront( VULKAN_HPP_NAMESPACE::StencilOpState const & front_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
front = front_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PipelineDepthStencilStateCreateInfo & setBack( VULKAN_HPP_NAMESPACE::StencilOpState const & back_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
back = back_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PipelineDepthStencilStateCreateInfo & setMinDepthBounds( float minDepthBounds_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
minDepthBounds = minDepthBounds_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PipelineDepthStencilStateCreateInfo & setMaxDepthBounds( float maxDepthBounds_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
maxDepthBounds = maxDepthBounds_;
|
|
return *this;
|
|
}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
|
|
|
|
operator VkPipelineDepthStencilStateCreateInfo const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkPipelineDepthStencilStateCreateInfo *>( this );
|
|
}
|
|
|
|
operator VkPipelineDepthStencilStateCreateInfo &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkPipelineDepthStencilStateCreateInfo *>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
# if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
# else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &,
|
|
const void * const &,
|
|
VULKAN_HPP_NAMESPACE::PipelineDepthStencilStateCreateFlags const &,
|
|
VULKAN_HPP_NAMESPACE::Bool32 const &,
|
|
VULKAN_HPP_NAMESPACE::Bool32 const &,
|
|
VULKAN_HPP_NAMESPACE::CompareOp const &,
|
|
VULKAN_HPP_NAMESPACE::Bool32 const &,
|
|
VULKAN_HPP_NAMESPACE::Bool32 const &,
|
|
VULKAN_HPP_NAMESPACE::StencilOpState const &,
|
|
VULKAN_HPP_NAMESPACE::StencilOpState const &,
|
|
float const &,
|
|
float const &>
|
|
# endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( sType,
|
|
pNext,
|
|
flags,
|
|
depthTestEnable,
|
|
depthWriteEnable,
|
|
depthCompareOp,
|
|
depthBoundsTestEnable,
|
|
stencilTestEnable,
|
|
front,
|
|
back,
|
|
minDepthBounds,
|
|
maxDepthBounds );
|
|
}
|
|
#endif
|
|
|
|
#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
|
|
auto operator<=>( PipelineDepthStencilStateCreateInfo const & ) const = default;
|
|
#else
|
|
bool operator==( PipelineDepthStencilStateCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
# if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
# else
|
|
return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && ( depthTestEnable == rhs.depthTestEnable ) &&
|
|
( depthWriteEnable == rhs.depthWriteEnable ) && ( depthCompareOp == rhs.depthCompareOp ) &&
|
|
( depthBoundsTestEnable == rhs.depthBoundsTestEnable ) && ( stencilTestEnable == rhs.stencilTestEnable ) && ( front == rhs.front ) &&
|
|
( back == rhs.back ) && ( minDepthBounds == rhs.minDepthBounds ) && ( maxDepthBounds == rhs.maxDepthBounds );
|
|
# endif
|
|
}
|
|
|
|
bool operator!=( PipelineDepthStencilStateCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineDepthStencilStateCreateInfo;
|
|
const void * pNext = {};
|
|
VULKAN_HPP_NAMESPACE::PipelineDepthStencilStateCreateFlags flags = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 depthTestEnable = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 depthWriteEnable = {};
|
|
VULKAN_HPP_NAMESPACE::CompareOp depthCompareOp = VULKAN_HPP_NAMESPACE::CompareOp::eNever;
|
|
VULKAN_HPP_NAMESPACE::Bool32 depthBoundsTestEnable = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 stencilTestEnable = {};
|
|
VULKAN_HPP_NAMESPACE::StencilOpState front = {};
|
|
VULKAN_HPP_NAMESPACE::StencilOpState back = {};
|
|
float minDepthBounds = {};
|
|
float maxDepthBounds = {};
|
|
};
|
|
|
|
template <>
|
|
struct CppType<StructureType, StructureType::ePipelineDepthStencilStateCreateInfo>
|
|
{
|
|
using Type = PipelineDepthStencilStateCreateInfo;
|
|
};
|
|
|
|
struct PipelineColorBlendAttachmentState
|
|
{
|
|
using NativeType = VkPipelineColorBlendAttachmentState;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR PipelineColorBlendAttachmentState( VULKAN_HPP_NAMESPACE::Bool32 blendEnable_ = {},
|
|
VULKAN_HPP_NAMESPACE::BlendFactor srcColorBlendFactor_ = VULKAN_HPP_NAMESPACE::BlendFactor::eZero,
|
|
VULKAN_HPP_NAMESPACE::BlendFactor dstColorBlendFactor_ = VULKAN_HPP_NAMESPACE::BlendFactor::eZero,
|
|
VULKAN_HPP_NAMESPACE::BlendOp colorBlendOp_ = VULKAN_HPP_NAMESPACE::BlendOp::eAdd,
|
|
VULKAN_HPP_NAMESPACE::BlendFactor srcAlphaBlendFactor_ = VULKAN_HPP_NAMESPACE::BlendFactor::eZero,
|
|
VULKAN_HPP_NAMESPACE::BlendFactor dstAlphaBlendFactor_ = VULKAN_HPP_NAMESPACE::BlendFactor::eZero,
|
|
VULKAN_HPP_NAMESPACE::BlendOp alphaBlendOp_ = VULKAN_HPP_NAMESPACE::BlendOp::eAdd,
|
|
VULKAN_HPP_NAMESPACE::ColorComponentFlags colorWriteMask_ = {} ) VULKAN_HPP_NOEXCEPT
|
|
: blendEnable( blendEnable_ )
|
|
, srcColorBlendFactor( srcColorBlendFactor_ )
|
|
, dstColorBlendFactor( dstColorBlendFactor_ )
|
|
, colorBlendOp( colorBlendOp_ )
|
|
, srcAlphaBlendFactor( srcAlphaBlendFactor_ )
|
|
, dstAlphaBlendFactor( dstAlphaBlendFactor_ )
|
|
, alphaBlendOp( alphaBlendOp_ )
|
|
, colorWriteMask( colorWriteMask_ )
|
|
{
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR PipelineColorBlendAttachmentState( PipelineColorBlendAttachmentState const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
PipelineColorBlendAttachmentState( VkPipelineColorBlendAttachmentState const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: PipelineColorBlendAttachmentState( *reinterpret_cast<PipelineColorBlendAttachmentState const *>( &rhs ) )
|
|
{
|
|
}
|
|
|
|
PipelineColorBlendAttachmentState & operator=( PipelineColorBlendAttachmentState const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
PipelineColorBlendAttachmentState & operator=( VkPipelineColorBlendAttachmentState const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PipelineColorBlendAttachmentState const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
|
|
VULKAN_HPP_CONSTEXPR_14 PipelineColorBlendAttachmentState & setBlendEnable( VULKAN_HPP_NAMESPACE::Bool32 blendEnable_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
blendEnable = blendEnable_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PipelineColorBlendAttachmentState &
|
|
setSrcColorBlendFactor( VULKAN_HPP_NAMESPACE::BlendFactor srcColorBlendFactor_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
srcColorBlendFactor = srcColorBlendFactor_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PipelineColorBlendAttachmentState &
|
|
setDstColorBlendFactor( VULKAN_HPP_NAMESPACE::BlendFactor dstColorBlendFactor_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
dstColorBlendFactor = dstColorBlendFactor_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PipelineColorBlendAttachmentState & setColorBlendOp( VULKAN_HPP_NAMESPACE::BlendOp colorBlendOp_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
colorBlendOp = colorBlendOp_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PipelineColorBlendAttachmentState &
|
|
setSrcAlphaBlendFactor( VULKAN_HPP_NAMESPACE::BlendFactor srcAlphaBlendFactor_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
srcAlphaBlendFactor = srcAlphaBlendFactor_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PipelineColorBlendAttachmentState &
|
|
setDstAlphaBlendFactor( VULKAN_HPP_NAMESPACE::BlendFactor dstAlphaBlendFactor_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
dstAlphaBlendFactor = dstAlphaBlendFactor_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PipelineColorBlendAttachmentState & setAlphaBlendOp( VULKAN_HPP_NAMESPACE::BlendOp alphaBlendOp_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
alphaBlendOp = alphaBlendOp_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PipelineColorBlendAttachmentState &
|
|
setColorWriteMask( VULKAN_HPP_NAMESPACE::ColorComponentFlags colorWriteMask_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
colorWriteMask = colorWriteMask_;
|
|
return *this;
|
|
}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
|
|
|
|
operator VkPipelineColorBlendAttachmentState const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkPipelineColorBlendAttachmentState *>( this );
|
|
}
|
|
|
|
operator VkPipelineColorBlendAttachmentState &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkPipelineColorBlendAttachmentState *>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
# if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
# else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::Bool32 const &,
|
|
VULKAN_HPP_NAMESPACE::BlendFactor const &,
|
|
VULKAN_HPP_NAMESPACE::BlendFactor const &,
|
|
VULKAN_HPP_NAMESPACE::BlendOp const &,
|
|
VULKAN_HPP_NAMESPACE::BlendFactor const &,
|
|
VULKAN_HPP_NAMESPACE::BlendFactor const &,
|
|
VULKAN_HPP_NAMESPACE::BlendOp const &,
|
|
VULKAN_HPP_NAMESPACE::ColorComponentFlags const &>
|
|
# endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie(
|
|
blendEnable, srcColorBlendFactor, dstColorBlendFactor, colorBlendOp, srcAlphaBlendFactor, dstAlphaBlendFactor, alphaBlendOp, colorWriteMask );
|
|
}
|
|
#endif
|
|
|
|
#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
|
|
auto operator<=>( PipelineColorBlendAttachmentState const & ) const = default;
|
|
#else
|
|
bool operator==( PipelineColorBlendAttachmentState const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
# if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
# else
|
|
return ( blendEnable == rhs.blendEnable ) && ( srcColorBlendFactor == rhs.srcColorBlendFactor ) && ( dstColorBlendFactor == rhs.dstColorBlendFactor ) &&
|
|
( colorBlendOp == rhs.colorBlendOp ) && ( srcAlphaBlendFactor == rhs.srcAlphaBlendFactor ) && ( dstAlphaBlendFactor == rhs.dstAlphaBlendFactor ) &&
|
|
( alphaBlendOp == rhs.alphaBlendOp ) && ( colorWriteMask == rhs.colorWriteMask );
|
|
# endif
|
|
}
|
|
|
|
bool operator!=( PipelineColorBlendAttachmentState const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::Bool32 blendEnable = {};
|
|
VULKAN_HPP_NAMESPACE::BlendFactor srcColorBlendFactor = VULKAN_HPP_NAMESPACE::BlendFactor::eZero;
|
|
VULKAN_HPP_NAMESPACE::BlendFactor dstColorBlendFactor = VULKAN_HPP_NAMESPACE::BlendFactor::eZero;
|
|
VULKAN_HPP_NAMESPACE::BlendOp colorBlendOp = VULKAN_HPP_NAMESPACE::BlendOp::eAdd;
|
|
VULKAN_HPP_NAMESPACE::BlendFactor srcAlphaBlendFactor = VULKAN_HPP_NAMESPACE::BlendFactor::eZero;
|
|
VULKAN_HPP_NAMESPACE::BlendFactor dstAlphaBlendFactor = VULKAN_HPP_NAMESPACE::BlendFactor::eZero;
|
|
VULKAN_HPP_NAMESPACE::BlendOp alphaBlendOp = VULKAN_HPP_NAMESPACE::BlendOp::eAdd;
|
|
VULKAN_HPP_NAMESPACE::ColorComponentFlags colorWriteMask = {};
|
|
};
|
|
|
|
struct PipelineColorBlendStateCreateInfo
|
|
{
|
|
using NativeType = VkPipelineColorBlendStateCreateInfo;
|
|
|
|
static const bool allowDuplicate = false;
|
|
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePipelineColorBlendStateCreateInfo;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR_14 PipelineColorBlendStateCreateInfo( VULKAN_HPP_NAMESPACE::PipelineColorBlendStateCreateFlags flags_ = {},
|
|
VULKAN_HPP_NAMESPACE::Bool32 logicOpEnable_ = {},
|
|
VULKAN_HPP_NAMESPACE::LogicOp logicOp_ = VULKAN_HPP_NAMESPACE::LogicOp::eClear,
|
|
uint32_t attachmentCount_ = {},
|
|
const VULKAN_HPP_NAMESPACE::PipelineColorBlendAttachmentState * pAttachments_ = {},
|
|
std::array<float, 4> const & blendConstants_ = {},
|
|
const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
|
|
: pNext( pNext_ )
|
|
, flags( flags_ )
|
|
, logicOpEnable( logicOpEnable_ )
|
|
, logicOp( logicOp_ )
|
|
, attachmentCount( attachmentCount_ )
|
|
, pAttachments( pAttachments_ )
|
|
, blendConstants( blendConstants_ )
|
|
{
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PipelineColorBlendStateCreateInfo( PipelineColorBlendStateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
PipelineColorBlendStateCreateInfo( VkPipelineColorBlendStateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: PipelineColorBlendStateCreateInfo( *reinterpret_cast<PipelineColorBlendStateCreateInfo const *>( &rhs ) )
|
|
{
|
|
}
|
|
|
|
# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
|
|
PipelineColorBlendStateCreateInfo(
|
|
VULKAN_HPP_NAMESPACE::PipelineColorBlendStateCreateFlags flags_,
|
|
VULKAN_HPP_NAMESPACE::Bool32 logicOpEnable_,
|
|
VULKAN_HPP_NAMESPACE::LogicOp logicOp_,
|
|
VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::PipelineColorBlendAttachmentState> const & attachments_,
|
|
std::array<float, 4> const & blendConstants_ = {},
|
|
const void * pNext_ = nullptr )
|
|
: pNext( pNext_ )
|
|
, flags( flags_ )
|
|
, logicOpEnable( logicOpEnable_ )
|
|
, logicOp( logicOp_ )
|
|
, attachmentCount( static_cast<uint32_t>( attachments_.size() ) )
|
|
, pAttachments( attachments_.data() )
|
|
, blendConstants( blendConstants_ )
|
|
{
|
|
}
|
|
# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
|
|
|
|
PipelineColorBlendStateCreateInfo & operator=( PipelineColorBlendStateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
PipelineColorBlendStateCreateInfo & operator=( VkPipelineColorBlendStateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PipelineColorBlendStateCreateInfo const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
|
|
VULKAN_HPP_CONSTEXPR_14 PipelineColorBlendStateCreateInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pNext = pNext_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PipelineColorBlendStateCreateInfo & setFlags( VULKAN_HPP_NAMESPACE::PipelineColorBlendStateCreateFlags flags_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
flags = flags_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PipelineColorBlendStateCreateInfo & setLogicOpEnable( VULKAN_HPP_NAMESPACE::Bool32 logicOpEnable_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
logicOpEnable = logicOpEnable_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PipelineColorBlendStateCreateInfo & setLogicOp( VULKAN_HPP_NAMESPACE::LogicOp logicOp_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
logicOp = logicOp_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PipelineColorBlendStateCreateInfo & setAttachmentCount( uint32_t attachmentCount_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
attachmentCount = attachmentCount_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PipelineColorBlendStateCreateInfo &
|
|
setPAttachments( const VULKAN_HPP_NAMESPACE::PipelineColorBlendAttachmentState * pAttachments_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pAttachments = pAttachments_;
|
|
return *this;
|
|
}
|
|
|
|
# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
|
|
PipelineColorBlendStateCreateInfo & setAttachments(
|
|
VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::PipelineColorBlendAttachmentState> const & attachments_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
attachmentCount = static_cast<uint32_t>( attachments_.size() );
|
|
pAttachments = attachments_.data();
|
|
return *this;
|
|
}
|
|
# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PipelineColorBlendStateCreateInfo & setBlendConstants( std::array<float, 4> blendConstants_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
blendConstants = blendConstants_;
|
|
return *this;
|
|
}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
|
|
|
|
operator VkPipelineColorBlendStateCreateInfo const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkPipelineColorBlendStateCreateInfo *>( this );
|
|
}
|
|
|
|
operator VkPipelineColorBlendStateCreateInfo &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkPipelineColorBlendStateCreateInfo *>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
# if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
# else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &,
|
|
const void * const &,
|
|
VULKAN_HPP_NAMESPACE::PipelineColorBlendStateCreateFlags const &,
|
|
VULKAN_HPP_NAMESPACE::Bool32 const &,
|
|
VULKAN_HPP_NAMESPACE::LogicOp const &,
|
|
uint32_t const &,
|
|
const VULKAN_HPP_NAMESPACE::PipelineColorBlendAttachmentState * const &,
|
|
VULKAN_HPP_NAMESPACE::ArrayWrapper1D<float, 4> const &>
|
|
# endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( sType, pNext, flags, logicOpEnable, logicOp, attachmentCount, pAttachments, blendConstants );
|
|
}
|
|
#endif
|
|
|
|
#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
|
|
auto operator<=>( PipelineColorBlendStateCreateInfo const & ) const = default;
|
|
#else
|
|
bool operator==( PipelineColorBlendStateCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
# if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
# else
|
|
return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && ( logicOpEnable == rhs.logicOpEnable ) &&
|
|
( logicOp == rhs.logicOp ) && ( attachmentCount == rhs.attachmentCount ) && ( pAttachments == rhs.pAttachments ) &&
|
|
( blendConstants == rhs.blendConstants );
|
|
# endif
|
|
}
|
|
|
|
bool operator!=( PipelineColorBlendStateCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineColorBlendStateCreateInfo;
|
|
const void * pNext = {};
|
|
VULKAN_HPP_NAMESPACE::PipelineColorBlendStateCreateFlags flags = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 logicOpEnable = {};
|
|
VULKAN_HPP_NAMESPACE::LogicOp logicOp = VULKAN_HPP_NAMESPACE::LogicOp::eClear;
|
|
uint32_t attachmentCount = {};
|
|
const VULKAN_HPP_NAMESPACE::PipelineColorBlendAttachmentState * pAttachments = {};
|
|
VULKAN_HPP_NAMESPACE::ArrayWrapper1D<float, 4> blendConstants = {};
|
|
};
|
|
|
|
template <>
|
|
struct CppType<StructureType, StructureType::ePipelineColorBlendStateCreateInfo>
|
|
{
|
|
using Type = PipelineColorBlendStateCreateInfo;
|
|
};
|
|
|
|
struct PipelineDynamicStateCreateInfo
|
|
{
|
|
using NativeType = VkPipelineDynamicStateCreateInfo;
|
|
|
|
static const bool allowDuplicate = false;
|
|
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePipelineDynamicStateCreateInfo;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR PipelineDynamicStateCreateInfo( VULKAN_HPP_NAMESPACE::PipelineDynamicStateCreateFlags flags_ = {},
|
|
uint32_t dynamicStateCount_ = {},
|
|
const VULKAN_HPP_NAMESPACE::DynamicState * pDynamicStates_ = {},
|
|
const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
|
|
: pNext( pNext_ )
|
|
, flags( flags_ )
|
|
, dynamicStateCount( dynamicStateCount_ )
|
|
, pDynamicStates( pDynamicStates_ )
|
|
{
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR PipelineDynamicStateCreateInfo( PipelineDynamicStateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
PipelineDynamicStateCreateInfo( VkPipelineDynamicStateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: PipelineDynamicStateCreateInfo( *reinterpret_cast<PipelineDynamicStateCreateInfo const *>( &rhs ) )
|
|
{
|
|
}
|
|
|
|
# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
|
|
PipelineDynamicStateCreateInfo( VULKAN_HPP_NAMESPACE::PipelineDynamicStateCreateFlags flags_,
|
|
VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::DynamicState> const & dynamicStates_,
|
|
const void * pNext_ = nullptr )
|
|
: pNext( pNext_ ), flags( flags_ ), dynamicStateCount( static_cast<uint32_t>( dynamicStates_.size() ) ), pDynamicStates( dynamicStates_.data() )
|
|
{
|
|
}
|
|
# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
|
|
|
|
PipelineDynamicStateCreateInfo & operator=( PipelineDynamicStateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
PipelineDynamicStateCreateInfo & operator=( VkPipelineDynamicStateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PipelineDynamicStateCreateInfo const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
|
|
VULKAN_HPP_CONSTEXPR_14 PipelineDynamicStateCreateInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pNext = pNext_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PipelineDynamicStateCreateInfo & setFlags( VULKAN_HPP_NAMESPACE::PipelineDynamicStateCreateFlags flags_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
flags = flags_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PipelineDynamicStateCreateInfo & setDynamicStateCount( uint32_t dynamicStateCount_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
dynamicStateCount = dynamicStateCount_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PipelineDynamicStateCreateInfo & setPDynamicStates( const VULKAN_HPP_NAMESPACE::DynamicState * pDynamicStates_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pDynamicStates = pDynamicStates_;
|
|
return *this;
|
|
}
|
|
|
|
# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
|
|
PipelineDynamicStateCreateInfo &
|
|
setDynamicStates( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::DynamicState> const & dynamicStates_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
dynamicStateCount = static_cast<uint32_t>( dynamicStates_.size() );
|
|
pDynamicStates = dynamicStates_.data();
|
|
return *this;
|
|
}
|
|
# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
|
|
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
|
|
|
|
operator VkPipelineDynamicStateCreateInfo const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkPipelineDynamicStateCreateInfo *>( this );
|
|
}
|
|
|
|
operator VkPipelineDynamicStateCreateInfo &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkPipelineDynamicStateCreateInfo *>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
# if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
# else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &,
|
|
const void * const &,
|
|
VULKAN_HPP_NAMESPACE::PipelineDynamicStateCreateFlags const &,
|
|
uint32_t const &,
|
|
const VULKAN_HPP_NAMESPACE::DynamicState * const &>
|
|
# endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( sType, pNext, flags, dynamicStateCount, pDynamicStates );
|
|
}
|
|
#endif
|
|
|
|
#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
|
|
auto operator<=>( PipelineDynamicStateCreateInfo const & ) const = default;
|
|
#else
|
|
bool operator==( PipelineDynamicStateCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
# if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
# else
|
|
return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && ( dynamicStateCount == rhs.dynamicStateCount ) &&
|
|
( pDynamicStates == rhs.pDynamicStates );
|
|
# endif
|
|
}
|
|
|
|
bool operator!=( PipelineDynamicStateCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineDynamicStateCreateInfo;
|
|
const void * pNext = {};
|
|
VULKAN_HPP_NAMESPACE::PipelineDynamicStateCreateFlags flags = {};
|
|
uint32_t dynamicStateCount = {};
|
|
const VULKAN_HPP_NAMESPACE::DynamicState * pDynamicStates = {};
|
|
};
|
|
|
|
template <>
|
|
struct CppType<StructureType, StructureType::ePipelineDynamicStateCreateInfo>
|
|
{
|
|
using Type = PipelineDynamicStateCreateInfo;
|
|
};
|
|
|
|
struct GraphicsPipelineCreateInfo
|
|
{
|
|
using NativeType = VkGraphicsPipelineCreateInfo;
|
|
|
|
static const bool allowDuplicate = false;
|
|
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eGraphicsPipelineCreateInfo;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR_14 GraphicsPipelineCreateInfo( VULKAN_HPP_NAMESPACE::PipelineCreateFlags flags_ = {},
|
|
uint32_t stageCount_ = {},
|
|
const VULKAN_HPP_NAMESPACE::PipelineShaderStageCreateInfo * pStages_ = {},
|
|
const VULKAN_HPP_NAMESPACE::PipelineVertexInputStateCreateInfo * pVertexInputState_ = {},
|
|
const VULKAN_HPP_NAMESPACE::PipelineInputAssemblyStateCreateInfo * pInputAssemblyState_ = {},
|
|
const VULKAN_HPP_NAMESPACE::PipelineTessellationStateCreateInfo * pTessellationState_ = {},
|
|
const VULKAN_HPP_NAMESPACE::PipelineViewportStateCreateInfo * pViewportState_ = {},
|
|
const VULKAN_HPP_NAMESPACE::PipelineRasterizationStateCreateInfo * pRasterizationState_ = {},
|
|
const VULKAN_HPP_NAMESPACE::PipelineMultisampleStateCreateInfo * pMultisampleState_ = {},
|
|
const VULKAN_HPP_NAMESPACE::PipelineDepthStencilStateCreateInfo * pDepthStencilState_ = {},
|
|
const VULKAN_HPP_NAMESPACE::PipelineColorBlendStateCreateInfo * pColorBlendState_ = {},
|
|
const VULKAN_HPP_NAMESPACE::PipelineDynamicStateCreateInfo * pDynamicState_ = {},
|
|
VULKAN_HPP_NAMESPACE::PipelineLayout layout_ = {},
|
|
VULKAN_HPP_NAMESPACE::RenderPass renderPass_ = {},
|
|
uint32_t subpass_ = {},
|
|
VULKAN_HPP_NAMESPACE::Pipeline basePipelineHandle_ = {},
|
|
int32_t basePipelineIndex_ = {},
|
|
const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
|
|
: pNext( pNext_ )
|
|
, flags( flags_ )
|
|
, stageCount( stageCount_ )
|
|
, pStages( pStages_ )
|
|
, pVertexInputState( pVertexInputState_ )
|
|
, pInputAssemblyState( pInputAssemblyState_ )
|
|
, pTessellationState( pTessellationState_ )
|
|
, pViewportState( pViewportState_ )
|
|
, pRasterizationState( pRasterizationState_ )
|
|
, pMultisampleState( pMultisampleState_ )
|
|
, pDepthStencilState( pDepthStencilState_ )
|
|
, pColorBlendState( pColorBlendState_ )
|
|
, pDynamicState( pDynamicState_ )
|
|
, layout( layout_ )
|
|
, renderPass( renderPass_ )
|
|
, subpass( subpass_ )
|
|
, basePipelineHandle( basePipelineHandle_ )
|
|
, basePipelineIndex( basePipelineIndex_ )
|
|
{
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 GraphicsPipelineCreateInfo( GraphicsPipelineCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
GraphicsPipelineCreateInfo( VkGraphicsPipelineCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: GraphicsPipelineCreateInfo( *reinterpret_cast<GraphicsPipelineCreateInfo const *>( &rhs ) )
|
|
{
|
|
}
|
|
|
|
# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
|
|
GraphicsPipelineCreateInfo( VULKAN_HPP_NAMESPACE::PipelineCreateFlags flags_,
|
|
VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::PipelineShaderStageCreateInfo> const & stages_,
|
|
const VULKAN_HPP_NAMESPACE::PipelineVertexInputStateCreateInfo * pVertexInputState_ = {},
|
|
const VULKAN_HPP_NAMESPACE::PipelineInputAssemblyStateCreateInfo * pInputAssemblyState_ = {},
|
|
const VULKAN_HPP_NAMESPACE::PipelineTessellationStateCreateInfo * pTessellationState_ = {},
|
|
const VULKAN_HPP_NAMESPACE::PipelineViewportStateCreateInfo * pViewportState_ = {},
|
|
const VULKAN_HPP_NAMESPACE::PipelineRasterizationStateCreateInfo * pRasterizationState_ = {},
|
|
const VULKAN_HPP_NAMESPACE::PipelineMultisampleStateCreateInfo * pMultisampleState_ = {},
|
|
const VULKAN_HPP_NAMESPACE::PipelineDepthStencilStateCreateInfo * pDepthStencilState_ = {},
|
|
const VULKAN_HPP_NAMESPACE::PipelineColorBlendStateCreateInfo * pColorBlendState_ = {},
|
|
const VULKAN_HPP_NAMESPACE::PipelineDynamicStateCreateInfo * pDynamicState_ = {},
|
|
VULKAN_HPP_NAMESPACE::PipelineLayout layout_ = {},
|
|
VULKAN_HPP_NAMESPACE::RenderPass renderPass_ = {},
|
|
uint32_t subpass_ = {},
|
|
VULKAN_HPP_NAMESPACE::Pipeline basePipelineHandle_ = {},
|
|
int32_t basePipelineIndex_ = {},
|
|
const void * pNext_ = nullptr )
|
|
: pNext( pNext_ )
|
|
, flags( flags_ )
|
|
, stageCount( static_cast<uint32_t>( stages_.size() ) )
|
|
, pStages( stages_.data() )
|
|
, pVertexInputState( pVertexInputState_ )
|
|
, pInputAssemblyState( pInputAssemblyState_ )
|
|
, pTessellationState( pTessellationState_ )
|
|
, pViewportState( pViewportState_ )
|
|
, pRasterizationState( pRasterizationState_ )
|
|
, pMultisampleState( pMultisampleState_ )
|
|
, pDepthStencilState( pDepthStencilState_ )
|
|
, pColorBlendState( pColorBlendState_ )
|
|
, pDynamicState( pDynamicState_ )
|
|
, layout( layout_ )
|
|
, renderPass( renderPass_ )
|
|
, subpass( subpass_ )
|
|
, basePipelineHandle( basePipelineHandle_ )
|
|
, basePipelineIndex( basePipelineIndex_ )
|
|
{
|
|
}
|
|
# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
|
|
|
|
GraphicsPipelineCreateInfo & operator=( GraphicsPipelineCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
GraphicsPipelineCreateInfo & operator=( VkGraphicsPipelineCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::GraphicsPipelineCreateInfo const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
|
|
VULKAN_HPP_CONSTEXPR_14 GraphicsPipelineCreateInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pNext = pNext_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 GraphicsPipelineCreateInfo & setFlags( VULKAN_HPP_NAMESPACE::PipelineCreateFlags flags_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
flags = flags_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 GraphicsPipelineCreateInfo & setStageCount( uint32_t stageCount_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
stageCount = stageCount_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 GraphicsPipelineCreateInfo & setPStages( const VULKAN_HPP_NAMESPACE::PipelineShaderStageCreateInfo * pStages_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pStages = pStages_;
|
|
return *this;
|
|
}
|
|
|
|
# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
|
|
GraphicsPipelineCreateInfo &
|
|
setStages( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::PipelineShaderStageCreateInfo> const & stages_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
stageCount = static_cast<uint32_t>( stages_.size() );
|
|
pStages = stages_.data();
|
|
return *this;
|
|
}
|
|
# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 GraphicsPipelineCreateInfo &
|
|
setPVertexInputState( const VULKAN_HPP_NAMESPACE::PipelineVertexInputStateCreateInfo * pVertexInputState_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pVertexInputState = pVertexInputState_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 GraphicsPipelineCreateInfo &
|
|
setPInputAssemblyState( const VULKAN_HPP_NAMESPACE::PipelineInputAssemblyStateCreateInfo * pInputAssemblyState_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pInputAssemblyState = pInputAssemblyState_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 GraphicsPipelineCreateInfo &
|
|
setPTessellationState( const VULKAN_HPP_NAMESPACE::PipelineTessellationStateCreateInfo * pTessellationState_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pTessellationState = pTessellationState_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 GraphicsPipelineCreateInfo &
|
|
setPViewportState( const VULKAN_HPP_NAMESPACE::PipelineViewportStateCreateInfo * pViewportState_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pViewportState = pViewportState_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 GraphicsPipelineCreateInfo &
|
|
setPRasterizationState( const VULKAN_HPP_NAMESPACE::PipelineRasterizationStateCreateInfo * pRasterizationState_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pRasterizationState = pRasterizationState_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 GraphicsPipelineCreateInfo &
|
|
setPMultisampleState( const VULKAN_HPP_NAMESPACE::PipelineMultisampleStateCreateInfo * pMultisampleState_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pMultisampleState = pMultisampleState_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 GraphicsPipelineCreateInfo &
|
|
setPDepthStencilState( const VULKAN_HPP_NAMESPACE::PipelineDepthStencilStateCreateInfo * pDepthStencilState_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pDepthStencilState = pDepthStencilState_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 GraphicsPipelineCreateInfo &
|
|
setPColorBlendState( const VULKAN_HPP_NAMESPACE::PipelineColorBlendStateCreateInfo * pColorBlendState_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pColorBlendState = pColorBlendState_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 GraphicsPipelineCreateInfo &
|
|
setPDynamicState( const VULKAN_HPP_NAMESPACE::PipelineDynamicStateCreateInfo * pDynamicState_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pDynamicState = pDynamicState_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 GraphicsPipelineCreateInfo & setLayout( VULKAN_HPP_NAMESPACE::PipelineLayout layout_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
layout = layout_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 GraphicsPipelineCreateInfo & setRenderPass( VULKAN_HPP_NAMESPACE::RenderPass renderPass_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
renderPass = renderPass_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 GraphicsPipelineCreateInfo & setSubpass( uint32_t subpass_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
subpass = subpass_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 GraphicsPipelineCreateInfo & setBasePipelineHandle( VULKAN_HPP_NAMESPACE::Pipeline basePipelineHandle_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
basePipelineHandle = basePipelineHandle_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 GraphicsPipelineCreateInfo & setBasePipelineIndex( int32_t basePipelineIndex_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
basePipelineIndex = basePipelineIndex_;
|
|
return *this;
|
|
}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
|
|
|
|
operator VkGraphicsPipelineCreateInfo const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkGraphicsPipelineCreateInfo *>( this );
|
|
}
|
|
|
|
operator VkGraphicsPipelineCreateInfo &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkGraphicsPipelineCreateInfo *>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
# if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
# else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &,
|
|
const void * const &,
|
|
VULKAN_HPP_NAMESPACE::PipelineCreateFlags const &,
|
|
uint32_t const &,
|
|
const VULKAN_HPP_NAMESPACE::PipelineShaderStageCreateInfo * const &,
|
|
const VULKAN_HPP_NAMESPACE::PipelineVertexInputStateCreateInfo * const &,
|
|
const VULKAN_HPP_NAMESPACE::PipelineInputAssemblyStateCreateInfo * const &,
|
|
const VULKAN_HPP_NAMESPACE::PipelineTessellationStateCreateInfo * const &,
|
|
const VULKAN_HPP_NAMESPACE::PipelineViewportStateCreateInfo * const &,
|
|
const VULKAN_HPP_NAMESPACE::PipelineRasterizationStateCreateInfo * const &,
|
|
const VULKAN_HPP_NAMESPACE::PipelineMultisampleStateCreateInfo * const &,
|
|
const VULKAN_HPP_NAMESPACE::PipelineDepthStencilStateCreateInfo * const &,
|
|
const VULKAN_HPP_NAMESPACE::PipelineColorBlendStateCreateInfo * const &,
|
|
const VULKAN_HPP_NAMESPACE::PipelineDynamicStateCreateInfo * const &,
|
|
VULKAN_HPP_NAMESPACE::PipelineLayout const &,
|
|
VULKAN_HPP_NAMESPACE::RenderPass const &,
|
|
uint32_t const &,
|
|
VULKAN_HPP_NAMESPACE::Pipeline const &,
|
|
int32_t const &>
|
|
# endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( sType,
|
|
pNext,
|
|
flags,
|
|
stageCount,
|
|
pStages,
|
|
pVertexInputState,
|
|
pInputAssemblyState,
|
|
pTessellationState,
|
|
pViewportState,
|
|
pRasterizationState,
|
|
pMultisampleState,
|
|
pDepthStencilState,
|
|
pColorBlendState,
|
|
pDynamicState,
|
|
layout,
|
|
renderPass,
|
|
subpass,
|
|
basePipelineHandle,
|
|
basePipelineIndex );
|
|
}
|
|
#endif
|
|
|
|
#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
|
|
auto operator<=>( GraphicsPipelineCreateInfo const & ) const = default;
|
|
#else
|
|
bool operator==( GraphicsPipelineCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
# if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
# else
|
|
return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && ( stageCount == rhs.stageCount ) && ( pStages == rhs.pStages ) &&
|
|
( pVertexInputState == rhs.pVertexInputState ) && ( pInputAssemblyState == rhs.pInputAssemblyState ) &&
|
|
( pTessellationState == rhs.pTessellationState ) && ( pViewportState == rhs.pViewportState ) &&
|
|
( pRasterizationState == rhs.pRasterizationState ) && ( pMultisampleState == rhs.pMultisampleState ) &&
|
|
( pDepthStencilState == rhs.pDepthStencilState ) && ( pColorBlendState == rhs.pColorBlendState ) && ( pDynamicState == rhs.pDynamicState ) &&
|
|
( layout == rhs.layout ) && ( renderPass == rhs.renderPass ) && ( subpass == rhs.subpass ) && ( basePipelineHandle == rhs.basePipelineHandle ) &&
|
|
( basePipelineIndex == rhs.basePipelineIndex );
|
|
# endif
|
|
}
|
|
|
|
bool operator!=( GraphicsPipelineCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eGraphicsPipelineCreateInfo;
|
|
const void * pNext = {};
|
|
VULKAN_HPP_NAMESPACE::PipelineCreateFlags flags = {};
|
|
uint32_t stageCount = {};
|
|
const VULKAN_HPP_NAMESPACE::PipelineShaderStageCreateInfo * pStages = {};
|
|
const VULKAN_HPP_NAMESPACE::PipelineVertexInputStateCreateInfo * pVertexInputState = {};
|
|
const VULKAN_HPP_NAMESPACE::PipelineInputAssemblyStateCreateInfo * pInputAssemblyState = {};
|
|
const VULKAN_HPP_NAMESPACE::PipelineTessellationStateCreateInfo * pTessellationState = {};
|
|
const VULKAN_HPP_NAMESPACE::PipelineViewportStateCreateInfo * pViewportState = {};
|
|
const VULKAN_HPP_NAMESPACE::PipelineRasterizationStateCreateInfo * pRasterizationState = {};
|
|
const VULKAN_HPP_NAMESPACE::PipelineMultisampleStateCreateInfo * pMultisampleState = {};
|
|
const VULKAN_HPP_NAMESPACE::PipelineDepthStencilStateCreateInfo * pDepthStencilState = {};
|
|
const VULKAN_HPP_NAMESPACE::PipelineColorBlendStateCreateInfo * pColorBlendState = {};
|
|
const VULKAN_HPP_NAMESPACE::PipelineDynamicStateCreateInfo * pDynamicState = {};
|
|
VULKAN_HPP_NAMESPACE::PipelineLayout layout = {};
|
|
VULKAN_HPP_NAMESPACE::RenderPass renderPass = {};
|
|
uint32_t subpass = {};
|
|
VULKAN_HPP_NAMESPACE::Pipeline basePipelineHandle = {};
|
|
int32_t basePipelineIndex = {};
|
|
};
|
|
|
|
template <>
|
|
struct CppType<StructureType, StructureType::eGraphicsPipelineCreateInfo>
|
|
{
|
|
using Type = GraphicsPipelineCreateInfo;
|
|
};
|
|
|
|
struct XYColorEXT
|
|
{
|
|
using NativeType = VkXYColorEXT;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR XYColorEXT( float x_ = {}, float y_ = {} ) VULKAN_HPP_NOEXCEPT
|
|
: x( x_ )
|
|
, y( y_ )
|
|
{
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR XYColorEXT( XYColorEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
XYColorEXT( VkXYColorEXT const & rhs ) VULKAN_HPP_NOEXCEPT : XYColorEXT( *reinterpret_cast<XYColorEXT const *>( &rhs ) ) {}
|
|
|
|
XYColorEXT & operator=( XYColorEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
XYColorEXT & operator=( VkXYColorEXT const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::XYColorEXT const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
|
|
VULKAN_HPP_CONSTEXPR_14 XYColorEXT & setX( float x_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
x = x_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 XYColorEXT & setY( float y_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
y = y_;
|
|
return *this;
|
|
}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
|
|
|
|
operator VkXYColorEXT const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkXYColorEXT *>( this );
|
|
}
|
|
|
|
operator VkXYColorEXT &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkXYColorEXT *>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
# if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
# else
|
|
std::tuple<float const &, float const &>
|
|
# endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( x, y );
|
|
}
|
|
#endif
|
|
|
|
#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
|
|
auto operator<=>( XYColorEXT const & ) const = default;
|
|
#else
|
|
bool operator==( XYColorEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
# if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
# else
|
|
return ( x == rhs.x ) && ( y == rhs.y );
|
|
# endif
|
|
}
|
|
|
|
bool operator!=( XYColorEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
float x = {};
|
|
float y = {};
|
|
};
|
|
|
|
struct HdrMetadataEXT
|
|
{
|
|
using NativeType = VkHdrMetadataEXT;
|
|
|
|
static const bool allowDuplicate = false;
|
|
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eHdrMetadataEXT;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR HdrMetadataEXT( VULKAN_HPP_NAMESPACE::XYColorEXT displayPrimaryRed_ = {},
|
|
VULKAN_HPP_NAMESPACE::XYColorEXT displayPrimaryGreen_ = {},
|
|
VULKAN_HPP_NAMESPACE::XYColorEXT displayPrimaryBlue_ = {},
|
|
VULKAN_HPP_NAMESPACE::XYColorEXT whitePoint_ = {},
|
|
float maxLuminance_ = {},
|
|
float minLuminance_ = {},
|
|
float maxContentLightLevel_ = {},
|
|
float maxFrameAverageLightLevel_ = {},
|
|
const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
|
|
: pNext( pNext_ )
|
|
, displayPrimaryRed( displayPrimaryRed_ )
|
|
, displayPrimaryGreen( displayPrimaryGreen_ )
|
|
, displayPrimaryBlue( displayPrimaryBlue_ )
|
|
, whitePoint( whitePoint_ )
|
|
, maxLuminance( maxLuminance_ )
|
|
, minLuminance( minLuminance_ )
|
|
, maxContentLightLevel( maxContentLightLevel_ )
|
|
, maxFrameAverageLightLevel( maxFrameAverageLightLevel_ )
|
|
{
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR HdrMetadataEXT( HdrMetadataEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
HdrMetadataEXT( VkHdrMetadataEXT const & rhs ) VULKAN_HPP_NOEXCEPT : HdrMetadataEXT( *reinterpret_cast<HdrMetadataEXT const *>( &rhs ) ) {}
|
|
|
|
HdrMetadataEXT & operator=( HdrMetadataEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
HdrMetadataEXT & operator=( VkHdrMetadataEXT const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::HdrMetadataEXT const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
|
|
VULKAN_HPP_CONSTEXPR_14 HdrMetadataEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pNext = pNext_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 HdrMetadataEXT & setDisplayPrimaryRed( VULKAN_HPP_NAMESPACE::XYColorEXT const & displayPrimaryRed_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
displayPrimaryRed = displayPrimaryRed_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 HdrMetadataEXT & setDisplayPrimaryGreen( VULKAN_HPP_NAMESPACE::XYColorEXT const & displayPrimaryGreen_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
displayPrimaryGreen = displayPrimaryGreen_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 HdrMetadataEXT & setDisplayPrimaryBlue( VULKAN_HPP_NAMESPACE::XYColorEXT const & displayPrimaryBlue_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
displayPrimaryBlue = displayPrimaryBlue_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 HdrMetadataEXT & setWhitePoint( VULKAN_HPP_NAMESPACE::XYColorEXT const & whitePoint_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
whitePoint = whitePoint_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 HdrMetadataEXT & setMaxLuminance( float maxLuminance_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
maxLuminance = maxLuminance_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 HdrMetadataEXT & setMinLuminance( float minLuminance_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
minLuminance = minLuminance_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 HdrMetadataEXT & setMaxContentLightLevel( float maxContentLightLevel_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
maxContentLightLevel = maxContentLightLevel_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 HdrMetadataEXT & setMaxFrameAverageLightLevel( float maxFrameAverageLightLevel_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
maxFrameAverageLightLevel = maxFrameAverageLightLevel_;
|
|
return *this;
|
|
}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
|
|
|
|
operator VkHdrMetadataEXT const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkHdrMetadataEXT *>( this );
|
|
}
|
|
|
|
operator VkHdrMetadataEXT &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkHdrMetadataEXT *>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
# if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
# else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &,
|
|
const void * const &,
|
|
VULKAN_HPP_NAMESPACE::XYColorEXT const &,
|
|
VULKAN_HPP_NAMESPACE::XYColorEXT const &,
|
|
VULKAN_HPP_NAMESPACE::XYColorEXT const &,
|
|
VULKAN_HPP_NAMESPACE::XYColorEXT const &,
|
|
float const &,
|
|
float const &,
|
|
float const &,
|
|
float const &>
|
|
# endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( sType,
|
|
pNext,
|
|
displayPrimaryRed,
|
|
displayPrimaryGreen,
|
|
displayPrimaryBlue,
|
|
whitePoint,
|
|
maxLuminance,
|
|
minLuminance,
|
|
maxContentLightLevel,
|
|
maxFrameAverageLightLevel );
|
|
}
|
|
#endif
|
|
|
|
#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
|
|
auto operator<=>( HdrMetadataEXT const & ) const = default;
|
|
#else
|
|
bool operator==( HdrMetadataEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
# if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
# else
|
|
return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( displayPrimaryRed == rhs.displayPrimaryRed ) &&
|
|
( displayPrimaryGreen == rhs.displayPrimaryGreen ) && ( displayPrimaryBlue == rhs.displayPrimaryBlue ) && ( whitePoint == rhs.whitePoint ) &&
|
|
( maxLuminance == rhs.maxLuminance ) && ( minLuminance == rhs.minLuminance ) && ( maxContentLightLevel == rhs.maxContentLightLevel ) &&
|
|
( maxFrameAverageLightLevel == rhs.maxFrameAverageLightLevel );
|
|
# endif
|
|
}
|
|
|
|
bool operator!=( HdrMetadataEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eHdrMetadataEXT;
|
|
const void * pNext = {};
|
|
VULKAN_HPP_NAMESPACE::XYColorEXT displayPrimaryRed = {};
|
|
VULKAN_HPP_NAMESPACE::XYColorEXT displayPrimaryGreen = {};
|
|
VULKAN_HPP_NAMESPACE::XYColorEXT displayPrimaryBlue = {};
|
|
VULKAN_HPP_NAMESPACE::XYColorEXT whitePoint = {};
|
|
float maxLuminance = {};
|
|
float minLuminance = {};
|
|
float maxContentLightLevel = {};
|
|
float maxFrameAverageLightLevel = {};
|
|
};
|
|
|
|
template <>
|
|
struct CppType<StructureType, StructureType::eHdrMetadataEXT>
|
|
{
|
|
using Type = HdrMetadataEXT;
|
|
};
|
|
|
|
struct HeadlessSurfaceCreateInfoEXT
|
|
{
|
|
using NativeType = VkHeadlessSurfaceCreateInfoEXT;
|
|
|
|
static const bool allowDuplicate = false;
|
|
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eHeadlessSurfaceCreateInfoEXT;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR HeadlessSurfaceCreateInfoEXT( VULKAN_HPP_NAMESPACE::HeadlessSurfaceCreateFlagsEXT flags_ = {},
|
|
const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
|
|
: pNext( pNext_ )
|
|
, flags( flags_ )
|
|
{
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR HeadlessSurfaceCreateInfoEXT( HeadlessSurfaceCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
HeadlessSurfaceCreateInfoEXT( VkHeadlessSurfaceCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: HeadlessSurfaceCreateInfoEXT( *reinterpret_cast<HeadlessSurfaceCreateInfoEXT const *>( &rhs ) )
|
|
{
|
|
}
|
|
|
|
HeadlessSurfaceCreateInfoEXT & operator=( HeadlessSurfaceCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
HeadlessSurfaceCreateInfoEXT & operator=( VkHeadlessSurfaceCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::HeadlessSurfaceCreateInfoEXT const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
|
|
VULKAN_HPP_CONSTEXPR_14 HeadlessSurfaceCreateInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pNext = pNext_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 HeadlessSurfaceCreateInfoEXT & setFlags( VULKAN_HPP_NAMESPACE::HeadlessSurfaceCreateFlagsEXT flags_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
flags = flags_;
|
|
return *this;
|
|
}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
|
|
|
|
operator VkHeadlessSurfaceCreateInfoEXT const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkHeadlessSurfaceCreateInfoEXT *>( this );
|
|
}
|
|
|
|
operator VkHeadlessSurfaceCreateInfoEXT &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkHeadlessSurfaceCreateInfoEXT *>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
# if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
# else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::HeadlessSurfaceCreateFlagsEXT const &>
|
|
# endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( sType, pNext, flags );
|
|
}
|
|
#endif
|
|
|
|
#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
|
|
auto operator<=>( HeadlessSurfaceCreateInfoEXT const & ) const = default;
|
|
#else
|
|
bool operator==( HeadlessSurfaceCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
# if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
# else
|
|
return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags );
|
|
# endif
|
|
}
|
|
|
|
bool operator!=( HeadlessSurfaceCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eHeadlessSurfaceCreateInfoEXT;
|
|
const void * pNext = {};
|
|
VULKAN_HPP_NAMESPACE::HeadlessSurfaceCreateFlagsEXT flags = {};
|
|
};
|
|
|
|
template <>
|
|
struct CppType<StructureType, StructureType::eHeadlessSurfaceCreateInfoEXT>
|
|
{
|
|
using Type = HeadlessSurfaceCreateInfoEXT;
|
|
};
|
|
|
|
struct ImageBlit
|
|
{
|
|
using NativeType = VkImageBlit;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR_14 ImageBlit( VULKAN_HPP_NAMESPACE::ImageSubresourceLayers srcSubresource_ = {},
|
|
std::array<VULKAN_HPP_NAMESPACE::Offset3D, 2> const & srcOffsets_ = {},
|
|
VULKAN_HPP_NAMESPACE::ImageSubresourceLayers dstSubresource_ = {},
|
|
std::array<VULKAN_HPP_NAMESPACE::Offset3D, 2> const & dstOffsets_ = {} ) VULKAN_HPP_NOEXCEPT
|
|
: srcSubresource( srcSubresource_ )
|
|
, srcOffsets( srcOffsets_ )
|
|
, dstSubresource( dstSubresource_ )
|
|
, dstOffsets( dstOffsets_ )
|
|
{
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 ImageBlit( ImageBlit const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
ImageBlit( VkImageBlit const & rhs ) VULKAN_HPP_NOEXCEPT : ImageBlit( *reinterpret_cast<ImageBlit const *>( &rhs ) ) {}
|
|
|
|
ImageBlit & operator=( ImageBlit const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
ImageBlit & operator=( VkImageBlit const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ImageBlit const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
|
|
VULKAN_HPP_CONSTEXPR_14 ImageBlit & setSrcSubresource( VULKAN_HPP_NAMESPACE::ImageSubresourceLayers const & srcSubresource_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
srcSubresource = srcSubresource_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 ImageBlit & setSrcOffsets( std::array<VULKAN_HPP_NAMESPACE::Offset3D, 2> const & srcOffsets_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
srcOffsets = srcOffsets_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 ImageBlit & setDstSubresource( VULKAN_HPP_NAMESPACE::ImageSubresourceLayers const & dstSubresource_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
dstSubresource = dstSubresource_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 ImageBlit & setDstOffsets( std::array<VULKAN_HPP_NAMESPACE::Offset3D, 2> const & dstOffsets_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
dstOffsets = dstOffsets_;
|
|
return *this;
|
|
}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
|
|
|
|
operator VkImageBlit const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkImageBlit *>( this );
|
|
}
|
|
|
|
operator VkImageBlit &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkImageBlit *>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
# if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
# else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::ImageSubresourceLayers const &,
|
|
VULKAN_HPP_NAMESPACE::ArrayWrapper1D<VULKAN_HPP_NAMESPACE::Offset3D, 2> const &,
|
|
VULKAN_HPP_NAMESPACE::ImageSubresourceLayers const &,
|
|
VULKAN_HPP_NAMESPACE::ArrayWrapper1D<VULKAN_HPP_NAMESPACE::Offset3D, 2> const &>
|
|
# endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( srcSubresource, srcOffsets, dstSubresource, dstOffsets );
|
|
}
|
|
#endif
|
|
|
|
#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
|
|
auto operator<=>( ImageBlit const & ) const = default;
|
|
#else
|
|
bool operator==( ImageBlit const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
# if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
# else
|
|
return ( srcSubresource == rhs.srcSubresource ) && ( srcOffsets == rhs.srcOffsets ) && ( dstSubresource == rhs.dstSubresource ) &&
|
|
( dstOffsets == rhs.dstOffsets );
|
|
# endif
|
|
}
|
|
|
|
bool operator!=( ImageBlit const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::ImageSubresourceLayers srcSubresource = {};
|
|
VULKAN_HPP_NAMESPACE::ArrayWrapper1D<VULKAN_HPP_NAMESPACE::Offset3D, 2> srcOffsets = {};
|
|
VULKAN_HPP_NAMESPACE::ImageSubresourceLayers dstSubresource = {};
|
|
VULKAN_HPP_NAMESPACE::ArrayWrapper1D<VULKAN_HPP_NAMESPACE::Offset3D, 2> dstOffsets = {};
|
|
};
|
|
|
|
struct ImageCopy
|
|
{
|
|
using NativeType = VkImageCopy;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR ImageCopy( VULKAN_HPP_NAMESPACE::ImageSubresourceLayers srcSubresource_ = {},
|
|
VULKAN_HPP_NAMESPACE::Offset3D srcOffset_ = {},
|
|
VULKAN_HPP_NAMESPACE::ImageSubresourceLayers dstSubresource_ = {},
|
|
VULKAN_HPP_NAMESPACE::Offset3D dstOffset_ = {},
|
|
VULKAN_HPP_NAMESPACE::Extent3D extent_ = {} ) VULKAN_HPP_NOEXCEPT
|
|
: srcSubresource( srcSubresource_ )
|
|
, srcOffset( srcOffset_ )
|
|
, dstSubresource( dstSubresource_ )
|
|
, dstOffset( dstOffset_ )
|
|
, extent( extent_ )
|
|
{
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR ImageCopy( ImageCopy const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
ImageCopy( VkImageCopy const & rhs ) VULKAN_HPP_NOEXCEPT : ImageCopy( *reinterpret_cast<ImageCopy const *>( &rhs ) ) {}
|
|
|
|
ImageCopy & operator=( ImageCopy const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
ImageCopy & operator=( VkImageCopy const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ImageCopy const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
|
|
VULKAN_HPP_CONSTEXPR_14 ImageCopy & setSrcSubresource( VULKAN_HPP_NAMESPACE::ImageSubresourceLayers const & srcSubresource_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
srcSubresource = srcSubresource_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 ImageCopy & setSrcOffset( VULKAN_HPP_NAMESPACE::Offset3D const & srcOffset_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
srcOffset = srcOffset_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 ImageCopy & setDstSubresource( VULKAN_HPP_NAMESPACE::ImageSubresourceLayers const & dstSubresource_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
dstSubresource = dstSubresource_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 ImageCopy & setDstOffset( VULKAN_HPP_NAMESPACE::Offset3D const & dstOffset_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
dstOffset = dstOffset_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 ImageCopy & setExtent( VULKAN_HPP_NAMESPACE::Extent3D const & extent_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
extent = extent_;
|
|
return *this;
|
|
}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
|
|
|
|
operator VkImageCopy const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkImageCopy *>( this );
|
|
}
|
|
|
|
operator VkImageCopy &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkImageCopy *>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
# if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
# else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::ImageSubresourceLayers const &,
|
|
VULKAN_HPP_NAMESPACE::Offset3D const &,
|
|
VULKAN_HPP_NAMESPACE::ImageSubresourceLayers const &,
|
|
VULKAN_HPP_NAMESPACE::Offset3D const &,
|
|
VULKAN_HPP_NAMESPACE::Extent3D const &>
|
|
# endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( srcSubresource, srcOffset, dstSubresource, dstOffset, extent );
|
|
}
|
|
#endif
|
|
|
|
#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
|
|
auto operator<=>( ImageCopy const & ) const = default;
|
|
#else
|
|
bool operator==( ImageCopy const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
# if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
# else
|
|
return ( srcSubresource == rhs.srcSubresource ) && ( srcOffset == rhs.srcOffset ) && ( dstSubresource == rhs.dstSubresource ) &&
|
|
( dstOffset == rhs.dstOffset ) && ( extent == rhs.extent );
|
|
# endif
|
|
}
|
|
|
|
bool operator!=( ImageCopy const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::ImageSubresourceLayers srcSubresource = {};
|
|
VULKAN_HPP_NAMESPACE::Offset3D srcOffset = {};
|
|
VULKAN_HPP_NAMESPACE::ImageSubresourceLayers dstSubresource = {};
|
|
VULKAN_HPP_NAMESPACE::Offset3D dstOffset = {};
|
|
VULKAN_HPP_NAMESPACE::Extent3D extent = {};
|
|
};
|
|
|
|
struct SubresourceLayout
|
|
{
|
|
using NativeType = VkSubresourceLayout;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR SubresourceLayout( VULKAN_HPP_NAMESPACE::DeviceSize offset_ = {},
|
|
VULKAN_HPP_NAMESPACE::DeviceSize size_ = {},
|
|
VULKAN_HPP_NAMESPACE::DeviceSize rowPitch_ = {},
|
|
VULKAN_HPP_NAMESPACE::DeviceSize arrayPitch_ = {},
|
|
VULKAN_HPP_NAMESPACE::DeviceSize depthPitch_ = {} ) VULKAN_HPP_NOEXCEPT
|
|
: offset( offset_ )
|
|
, size( size_ )
|
|
, rowPitch( rowPitch_ )
|
|
, arrayPitch( arrayPitch_ )
|
|
, depthPitch( depthPitch_ )
|
|
{
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR SubresourceLayout( SubresourceLayout const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
SubresourceLayout( VkSubresourceLayout const & rhs ) VULKAN_HPP_NOEXCEPT : SubresourceLayout( *reinterpret_cast<SubresourceLayout const *>( &rhs ) ) {}
|
|
|
|
SubresourceLayout & operator=( SubresourceLayout const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
SubresourceLayout & operator=( VkSubresourceLayout const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::SubresourceLayout const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
|
|
VULKAN_HPP_CONSTEXPR_14 SubresourceLayout & setOffset( VULKAN_HPP_NAMESPACE::DeviceSize offset_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
offset = offset_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 SubresourceLayout & setSize( VULKAN_HPP_NAMESPACE::DeviceSize size_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
size = size_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 SubresourceLayout & setRowPitch( VULKAN_HPP_NAMESPACE::DeviceSize rowPitch_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
rowPitch = rowPitch_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 SubresourceLayout & setArrayPitch( VULKAN_HPP_NAMESPACE::DeviceSize arrayPitch_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
arrayPitch = arrayPitch_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 SubresourceLayout & setDepthPitch( VULKAN_HPP_NAMESPACE::DeviceSize depthPitch_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
depthPitch = depthPitch_;
|
|
return *this;
|
|
}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
|
|
|
|
operator VkSubresourceLayout const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkSubresourceLayout *>( this );
|
|
}
|
|
|
|
operator VkSubresourceLayout &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkSubresourceLayout *>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
# if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
# else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::DeviceSize const &,
|
|
VULKAN_HPP_NAMESPACE::DeviceSize const &,
|
|
VULKAN_HPP_NAMESPACE::DeviceSize const &,
|
|
VULKAN_HPP_NAMESPACE::DeviceSize const &,
|
|
VULKAN_HPP_NAMESPACE::DeviceSize const &>
|
|
# endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( offset, size, rowPitch, arrayPitch, depthPitch );
|
|
}
|
|
#endif
|
|
|
|
#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
|
|
auto operator<=>( SubresourceLayout const & ) const = default;
|
|
#else
|
|
bool operator==( SubresourceLayout const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
# if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
# else
|
|
return ( offset == rhs.offset ) && ( size == rhs.size ) && ( rowPitch == rhs.rowPitch ) && ( arrayPitch == rhs.arrayPitch ) &&
|
|
( depthPitch == rhs.depthPitch );
|
|
# endif
|
|
}
|
|
|
|
bool operator!=( SubresourceLayout const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::DeviceSize offset = {};
|
|
VULKAN_HPP_NAMESPACE::DeviceSize size = {};
|
|
VULKAN_HPP_NAMESPACE::DeviceSize rowPitch = {};
|
|
VULKAN_HPP_NAMESPACE::DeviceSize arrayPitch = {};
|
|
VULKAN_HPP_NAMESPACE::DeviceSize depthPitch = {};
|
|
};
|
|
|
|
struct ImageDrmFormatModifierExplicitCreateInfoEXT
|
|
{
|
|
using NativeType = VkImageDrmFormatModifierExplicitCreateInfoEXT;
|
|
|
|
static const bool allowDuplicate = false;
|
|
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eImageDrmFormatModifierExplicitCreateInfoEXT;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR ImageDrmFormatModifierExplicitCreateInfoEXT( uint64_t drmFormatModifier_ = {},
|
|
uint32_t drmFormatModifierPlaneCount_ = {},
|
|
const VULKAN_HPP_NAMESPACE::SubresourceLayout * pPlaneLayouts_ = {},
|
|
const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
|
|
: pNext( pNext_ )
|
|
, drmFormatModifier( drmFormatModifier_ )
|
|
, drmFormatModifierPlaneCount( drmFormatModifierPlaneCount_ )
|
|
, pPlaneLayouts( pPlaneLayouts_ )
|
|
{
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR ImageDrmFormatModifierExplicitCreateInfoEXT( ImageDrmFormatModifierExplicitCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
ImageDrmFormatModifierExplicitCreateInfoEXT( VkImageDrmFormatModifierExplicitCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: ImageDrmFormatModifierExplicitCreateInfoEXT( *reinterpret_cast<ImageDrmFormatModifierExplicitCreateInfoEXT const *>( &rhs ) )
|
|
{
|
|
}
|
|
|
|
# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
|
|
ImageDrmFormatModifierExplicitCreateInfoEXT(
|
|
uint64_t drmFormatModifier_,
|
|
VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::SubresourceLayout> const & planeLayouts_,
|
|
const void * pNext_ = nullptr )
|
|
: pNext( pNext_ )
|
|
, drmFormatModifier( drmFormatModifier_ )
|
|
, drmFormatModifierPlaneCount( static_cast<uint32_t>( planeLayouts_.size() ) )
|
|
, pPlaneLayouts( planeLayouts_.data() )
|
|
{
|
|
}
|
|
# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
|
|
|
|
ImageDrmFormatModifierExplicitCreateInfoEXT & operator=( ImageDrmFormatModifierExplicitCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
ImageDrmFormatModifierExplicitCreateInfoEXT & operator=( VkImageDrmFormatModifierExplicitCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ImageDrmFormatModifierExplicitCreateInfoEXT const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
|
|
VULKAN_HPP_CONSTEXPR_14 ImageDrmFormatModifierExplicitCreateInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pNext = pNext_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 ImageDrmFormatModifierExplicitCreateInfoEXT & setDrmFormatModifier( uint64_t drmFormatModifier_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
drmFormatModifier = drmFormatModifier_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 ImageDrmFormatModifierExplicitCreateInfoEXT &
|
|
setDrmFormatModifierPlaneCount( uint32_t drmFormatModifierPlaneCount_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
drmFormatModifierPlaneCount = drmFormatModifierPlaneCount_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 ImageDrmFormatModifierExplicitCreateInfoEXT &
|
|
setPPlaneLayouts( const VULKAN_HPP_NAMESPACE::SubresourceLayout * pPlaneLayouts_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pPlaneLayouts = pPlaneLayouts_;
|
|
return *this;
|
|
}
|
|
|
|
# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
|
|
ImageDrmFormatModifierExplicitCreateInfoEXT &
|
|
setPlaneLayouts( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::SubresourceLayout> const & planeLayouts_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
drmFormatModifierPlaneCount = static_cast<uint32_t>( planeLayouts_.size() );
|
|
pPlaneLayouts = planeLayouts_.data();
|
|
return *this;
|
|
}
|
|
# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
|
|
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
|
|
|
|
operator VkImageDrmFormatModifierExplicitCreateInfoEXT const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkImageDrmFormatModifierExplicitCreateInfoEXT *>( this );
|
|
}
|
|
|
|
operator VkImageDrmFormatModifierExplicitCreateInfoEXT &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkImageDrmFormatModifierExplicitCreateInfoEXT *>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
# if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
# else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &,
|
|
const void * const &,
|
|
uint64_t const &,
|
|
uint32_t const &,
|
|
const VULKAN_HPP_NAMESPACE::SubresourceLayout * const &>
|
|
# endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( sType, pNext, drmFormatModifier, drmFormatModifierPlaneCount, pPlaneLayouts );
|
|
}
|
|
#endif
|
|
|
|
#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
|
|
auto operator<=>( ImageDrmFormatModifierExplicitCreateInfoEXT const & ) const = default;
|
|
#else
|
|
bool operator==( ImageDrmFormatModifierExplicitCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
# if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
# else
|
|
return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( drmFormatModifier == rhs.drmFormatModifier ) &&
|
|
( drmFormatModifierPlaneCount == rhs.drmFormatModifierPlaneCount ) && ( pPlaneLayouts == rhs.pPlaneLayouts );
|
|
# endif
|
|
}
|
|
|
|
bool operator!=( ImageDrmFormatModifierExplicitCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eImageDrmFormatModifierExplicitCreateInfoEXT;
|
|
const void * pNext = {};
|
|
uint64_t drmFormatModifier = {};
|
|
uint32_t drmFormatModifierPlaneCount = {};
|
|
const VULKAN_HPP_NAMESPACE::SubresourceLayout * pPlaneLayouts = {};
|
|
};
|
|
|
|
template <>
|
|
struct CppType<StructureType, StructureType::eImageDrmFormatModifierExplicitCreateInfoEXT>
|
|
{
|
|
using Type = ImageDrmFormatModifierExplicitCreateInfoEXT;
|
|
};
|
|
|
|
struct ImageDrmFormatModifierListCreateInfoEXT
|
|
{
|
|
using NativeType = VkImageDrmFormatModifierListCreateInfoEXT;
|
|
|
|
static const bool allowDuplicate = false;
|
|
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eImageDrmFormatModifierListCreateInfoEXT;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR ImageDrmFormatModifierListCreateInfoEXT( uint32_t drmFormatModifierCount_ = {},
|
|
const uint64_t * pDrmFormatModifiers_ = {},
|
|
const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
|
|
: pNext( pNext_ )
|
|
, drmFormatModifierCount( drmFormatModifierCount_ )
|
|
, pDrmFormatModifiers( pDrmFormatModifiers_ )
|
|
{
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR ImageDrmFormatModifierListCreateInfoEXT( ImageDrmFormatModifierListCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
ImageDrmFormatModifierListCreateInfoEXT( VkImageDrmFormatModifierListCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: ImageDrmFormatModifierListCreateInfoEXT( *reinterpret_cast<ImageDrmFormatModifierListCreateInfoEXT const *>( &rhs ) )
|
|
{
|
|
}
|
|
|
|
# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
|
|
ImageDrmFormatModifierListCreateInfoEXT( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const uint64_t> const & drmFormatModifiers_,
|
|
const void * pNext_ = nullptr )
|
|
: pNext( pNext_ ), drmFormatModifierCount( static_cast<uint32_t>( drmFormatModifiers_.size() ) ), pDrmFormatModifiers( drmFormatModifiers_.data() )
|
|
{
|
|
}
|
|
# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
|
|
|
|
ImageDrmFormatModifierListCreateInfoEXT & operator=( ImageDrmFormatModifierListCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
ImageDrmFormatModifierListCreateInfoEXT & operator=( VkImageDrmFormatModifierListCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ImageDrmFormatModifierListCreateInfoEXT const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
|
|
VULKAN_HPP_CONSTEXPR_14 ImageDrmFormatModifierListCreateInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pNext = pNext_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 ImageDrmFormatModifierListCreateInfoEXT & setDrmFormatModifierCount( uint32_t drmFormatModifierCount_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
drmFormatModifierCount = drmFormatModifierCount_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 ImageDrmFormatModifierListCreateInfoEXT & setPDrmFormatModifiers( const uint64_t * pDrmFormatModifiers_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pDrmFormatModifiers = pDrmFormatModifiers_;
|
|
return *this;
|
|
}
|
|
|
|
# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
|
|
ImageDrmFormatModifierListCreateInfoEXT &
|
|
setDrmFormatModifiers( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const uint64_t> const & drmFormatModifiers_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
drmFormatModifierCount = static_cast<uint32_t>( drmFormatModifiers_.size() );
|
|
pDrmFormatModifiers = drmFormatModifiers_.data();
|
|
return *this;
|
|
}
|
|
# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
|
|
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
|
|
|
|
operator VkImageDrmFormatModifierListCreateInfoEXT const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkImageDrmFormatModifierListCreateInfoEXT *>( this );
|
|
}
|
|
|
|
operator VkImageDrmFormatModifierListCreateInfoEXT &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkImageDrmFormatModifierListCreateInfoEXT *>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
# if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
# else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, uint32_t const &, const uint64_t * const &>
|
|
# endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( sType, pNext, drmFormatModifierCount, pDrmFormatModifiers );
|
|
}
|
|
#endif
|
|
|
|
#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
|
|
auto operator<=>( ImageDrmFormatModifierListCreateInfoEXT const & ) const = default;
|
|
#else
|
|
bool operator==( ImageDrmFormatModifierListCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
# if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
# else
|
|
return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( drmFormatModifierCount == rhs.drmFormatModifierCount ) &&
|
|
( pDrmFormatModifiers == rhs.pDrmFormatModifiers );
|
|
# endif
|
|
}
|
|
|
|
bool operator!=( ImageDrmFormatModifierListCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eImageDrmFormatModifierListCreateInfoEXT;
|
|
const void * pNext = {};
|
|
uint32_t drmFormatModifierCount = {};
|
|
const uint64_t * pDrmFormatModifiers = {};
|
|
};
|
|
|
|
template <>
|
|
struct CppType<StructureType, StructureType::eImageDrmFormatModifierListCreateInfoEXT>
|
|
{
|
|
using Type = ImageDrmFormatModifierListCreateInfoEXT;
|
|
};
|
|
|
|
struct ImageDrmFormatModifierPropertiesEXT
|
|
{
|
|
using NativeType = VkImageDrmFormatModifierPropertiesEXT;
|
|
|
|
static const bool allowDuplicate = false;
|
|
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eImageDrmFormatModifierPropertiesEXT;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR ImageDrmFormatModifierPropertiesEXT( uint64_t drmFormatModifier_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
|
|
: pNext( pNext_ )
|
|
, drmFormatModifier( drmFormatModifier_ )
|
|
{
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR ImageDrmFormatModifierPropertiesEXT( ImageDrmFormatModifierPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
ImageDrmFormatModifierPropertiesEXT( VkImageDrmFormatModifierPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: ImageDrmFormatModifierPropertiesEXT( *reinterpret_cast<ImageDrmFormatModifierPropertiesEXT const *>( &rhs ) )
|
|
{
|
|
}
|
|
|
|
ImageDrmFormatModifierPropertiesEXT & operator=( ImageDrmFormatModifierPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
ImageDrmFormatModifierPropertiesEXT & operator=( VkImageDrmFormatModifierPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ImageDrmFormatModifierPropertiesEXT const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
operator VkImageDrmFormatModifierPropertiesEXT const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkImageDrmFormatModifierPropertiesEXT *>( this );
|
|
}
|
|
|
|
operator VkImageDrmFormatModifierPropertiesEXT &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkImageDrmFormatModifierPropertiesEXT *>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
# if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
# else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, uint64_t const &>
|
|
# endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( sType, pNext, drmFormatModifier );
|
|
}
|
|
#endif
|
|
|
|
#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
|
|
auto operator<=>( ImageDrmFormatModifierPropertiesEXT const & ) const = default;
|
|
#else
|
|
bool operator==( ImageDrmFormatModifierPropertiesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
# if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
# else
|
|
return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( drmFormatModifier == rhs.drmFormatModifier );
|
|
# endif
|
|
}
|
|
|
|
bool operator!=( ImageDrmFormatModifierPropertiesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eImageDrmFormatModifierPropertiesEXT;
|
|
void * pNext = {};
|
|
uint64_t drmFormatModifier = {};
|
|
};
|
|
|
|
template <>
|
|
struct CppType<StructureType, StructureType::eImageDrmFormatModifierPropertiesEXT>
|
|
{
|
|
using Type = ImageDrmFormatModifierPropertiesEXT;
|
|
};
|
|
|
|
struct ImageFormatListCreateInfo
|
|
{
|
|
using NativeType = VkImageFormatListCreateInfo;
|
|
|
|
static const bool allowDuplicate = false;
|
|
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eImageFormatListCreateInfo;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR ImageFormatListCreateInfo( uint32_t viewFormatCount_ = {},
|
|
const VULKAN_HPP_NAMESPACE::Format * pViewFormats_ = {},
|
|
const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
|
|
: pNext( pNext_ )
|
|
, viewFormatCount( viewFormatCount_ )
|
|
, pViewFormats( pViewFormats_ )
|
|
{
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR ImageFormatListCreateInfo( ImageFormatListCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
ImageFormatListCreateInfo( VkImageFormatListCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: ImageFormatListCreateInfo( *reinterpret_cast<ImageFormatListCreateInfo const *>( &rhs ) )
|
|
{
|
|
}
|
|
|
|
# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
|
|
ImageFormatListCreateInfo( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::Format> const & viewFormats_,
|
|
const void * pNext_ = nullptr )
|
|
: pNext( pNext_ ), viewFormatCount( static_cast<uint32_t>( viewFormats_.size() ) ), pViewFormats( viewFormats_.data() )
|
|
{
|
|
}
|
|
# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
|
|
|
|
ImageFormatListCreateInfo & operator=( ImageFormatListCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
ImageFormatListCreateInfo & operator=( VkImageFormatListCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ImageFormatListCreateInfo const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
|
|
VULKAN_HPP_CONSTEXPR_14 ImageFormatListCreateInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pNext = pNext_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 ImageFormatListCreateInfo & setViewFormatCount( uint32_t viewFormatCount_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
viewFormatCount = viewFormatCount_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 ImageFormatListCreateInfo & setPViewFormats( const VULKAN_HPP_NAMESPACE::Format * pViewFormats_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pViewFormats = pViewFormats_;
|
|
return *this;
|
|
}
|
|
|
|
# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
|
|
ImageFormatListCreateInfo &
|
|
setViewFormats( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::Format> const & viewFormats_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
viewFormatCount = static_cast<uint32_t>( viewFormats_.size() );
|
|
pViewFormats = viewFormats_.data();
|
|
return *this;
|
|
}
|
|
# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
|
|
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
|
|
|
|
operator VkImageFormatListCreateInfo const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkImageFormatListCreateInfo *>( this );
|
|
}
|
|
|
|
operator VkImageFormatListCreateInfo &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkImageFormatListCreateInfo *>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
# if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
# else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, uint32_t const &, const VULKAN_HPP_NAMESPACE::Format * const &>
|
|
# endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( sType, pNext, viewFormatCount, pViewFormats );
|
|
}
|
|
#endif
|
|
|
|
#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
|
|
auto operator<=>( ImageFormatListCreateInfo const & ) const = default;
|
|
#else
|
|
bool operator==( ImageFormatListCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
# if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
# else
|
|
return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( viewFormatCount == rhs.viewFormatCount ) && ( pViewFormats == rhs.pViewFormats );
|
|
# endif
|
|
}
|
|
|
|
bool operator!=( ImageFormatListCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eImageFormatListCreateInfo;
|
|
const void * pNext = {};
|
|
uint32_t viewFormatCount = {};
|
|
const VULKAN_HPP_NAMESPACE::Format * pViewFormats = {};
|
|
};
|
|
|
|
template <>
|
|
struct CppType<StructureType, StructureType::eImageFormatListCreateInfo>
|
|
{
|
|
using Type = ImageFormatListCreateInfo;
|
|
};
|
|
|
|
using ImageFormatListCreateInfoKHR = ImageFormatListCreateInfo;
|
|
|
|
struct ImageFormatProperties
|
|
{
|
|
using NativeType = VkImageFormatProperties;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR ImageFormatProperties( VULKAN_HPP_NAMESPACE::Extent3D maxExtent_ = {},
|
|
uint32_t maxMipLevels_ = {},
|
|
uint32_t maxArrayLayers_ = {},
|
|
VULKAN_HPP_NAMESPACE::SampleCountFlags sampleCounts_ = {},
|
|
VULKAN_HPP_NAMESPACE::DeviceSize maxResourceSize_ = {} ) VULKAN_HPP_NOEXCEPT
|
|
: maxExtent( maxExtent_ )
|
|
, maxMipLevels( maxMipLevels_ )
|
|
, maxArrayLayers( maxArrayLayers_ )
|
|
, sampleCounts( sampleCounts_ )
|
|
, maxResourceSize( maxResourceSize_ )
|
|
{
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR ImageFormatProperties( ImageFormatProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
ImageFormatProperties( VkImageFormatProperties const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: ImageFormatProperties( *reinterpret_cast<ImageFormatProperties const *>( &rhs ) )
|
|
{
|
|
}
|
|
|
|
ImageFormatProperties & operator=( ImageFormatProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
ImageFormatProperties & operator=( VkImageFormatProperties const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ImageFormatProperties const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
operator VkImageFormatProperties const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkImageFormatProperties *>( this );
|
|
}
|
|
|
|
operator VkImageFormatProperties &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkImageFormatProperties *>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
# if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
# else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::Extent3D const &,
|
|
uint32_t const &,
|
|
uint32_t const &,
|
|
VULKAN_HPP_NAMESPACE::SampleCountFlags const &,
|
|
VULKAN_HPP_NAMESPACE::DeviceSize const &>
|
|
# endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( maxExtent, maxMipLevels, maxArrayLayers, sampleCounts, maxResourceSize );
|
|
}
|
|
#endif
|
|
|
|
#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
|
|
auto operator<=>( ImageFormatProperties const & ) const = default;
|
|
#else
|
|
bool operator==( ImageFormatProperties const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
# if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
# else
|
|
return ( maxExtent == rhs.maxExtent ) && ( maxMipLevels == rhs.maxMipLevels ) && ( maxArrayLayers == rhs.maxArrayLayers ) &&
|
|
( sampleCounts == rhs.sampleCounts ) && ( maxResourceSize == rhs.maxResourceSize );
|
|
# endif
|
|
}
|
|
|
|
bool operator!=( ImageFormatProperties const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::Extent3D maxExtent = {};
|
|
uint32_t maxMipLevels = {};
|
|
uint32_t maxArrayLayers = {};
|
|
VULKAN_HPP_NAMESPACE::SampleCountFlags sampleCounts = {};
|
|
VULKAN_HPP_NAMESPACE::DeviceSize maxResourceSize = {};
|
|
};
|
|
|
|
struct ImageFormatProperties2
|
|
{
|
|
using NativeType = VkImageFormatProperties2;
|
|
|
|
static const bool allowDuplicate = false;
|
|
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eImageFormatProperties2;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR ImageFormatProperties2( VULKAN_HPP_NAMESPACE::ImageFormatProperties imageFormatProperties_ = {},
|
|
void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
|
|
: pNext( pNext_ )
|
|
, imageFormatProperties( imageFormatProperties_ )
|
|
{
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR ImageFormatProperties2( ImageFormatProperties2 const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
ImageFormatProperties2( VkImageFormatProperties2 const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: ImageFormatProperties2( *reinterpret_cast<ImageFormatProperties2 const *>( &rhs ) )
|
|
{
|
|
}
|
|
|
|
ImageFormatProperties2 & operator=( ImageFormatProperties2 const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
ImageFormatProperties2 & operator=( VkImageFormatProperties2 const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ImageFormatProperties2 const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
operator VkImageFormatProperties2 const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkImageFormatProperties2 *>( this );
|
|
}
|
|
|
|
operator VkImageFormatProperties2 &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkImageFormatProperties2 *>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
# if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
# else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::ImageFormatProperties const &>
|
|
# endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( sType, pNext, imageFormatProperties );
|
|
}
|
|
#endif
|
|
|
|
#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
|
|
auto operator<=>( ImageFormatProperties2 const & ) const = default;
|
|
#else
|
|
bool operator==( ImageFormatProperties2 const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
# if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
# else
|
|
return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( imageFormatProperties == rhs.imageFormatProperties );
|
|
# endif
|
|
}
|
|
|
|
bool operator!=( ImageFormatProperties2 const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eImageFormatProperties2;
|
|
void * pNext = {};
|
|
VULKAN_HPP_NAMESPACE::ImageFormatProperties imageFormatProperties = {};
|
|
};
|
|
|
|
template <>
|
|
struct CppType<StructureType, StructureType::eImageFormatProperties2>
|
|
{
|
|
using Type = ImageFormatProperties2;
|
|
};
|
|
|
|
using ImageFormatProperties2KHR = ImageFormatProperties2;
|
|
|
|
struct ImageMemoryBarrier
|
|
{
|
|
using NativeType = VkImageMemoryBarrier;
|
|
|
|
static const bool allowDuplicate = false;
|
|
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eImageMemoryBarrier;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR ImageMemoryBarrier( VULKAN_HPP_NAMESPACE::AccessFlags srcAccessMask_ = {},
|
|
VULKAN_HPP_NAMESPACE::AccessFlags dstAccessMask_ = {},
|
|
VULKAN_HPP_NAMESPACE::ImageLayout oldLayout_ = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined,
|
|
VULKAN_HPP_NAMESPACE::ImageLayout newLayout_ = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined,
|
|
uint32_t srcQueueFamilyIndex_ = {},
|
|
uint32_t dstQueueFamilyIndex_ = {},
|
|
VULKAN_HPP_NAMESPACE::Image image_ = {},
|
|
VULKAN_HPP_NAMESPACE::ImageSubresourceRange subresourceRange_ = {},
|
|
const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
|
|
: pNext( pNext_ )
|
|
, srcAccessMask( srcAccessMask_ )
|
|
, dstAccessMask( dstAccessMask_ )
|
|
, oldLayout( oldLayout_ )
|
|
, newLayout( newLayout_ )
|
|
, srcQueueFamilyIndex( srcQueueFamilyIndex_ )
|
|
, dstQueueFamilyIndex( dstQueueFamilyIndex_ )
|
|
, image( image_ )
|
|
, subresourceRange( subresourceRange_ )
|
|
{
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR ImageMemoryBarrier( ImageMemoryBarrier const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
ImageMemoryBarrier( VkImageMemoryBarrier const & rhs ) VULKAN_HPP_NOEXCEPT : ImageMemoryBarrier( *reinterpret_cast<ImageMemoryBarrier const *>( &rhs ) ) {}
|
|
|
|
ImageMemoryBarrier & operator=( ImageMemoryBarrier const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
ImageMemoryBarrier & operator=( VkImageMemoryBarrier const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ImageMemoryBarrier const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
|
|
VULKAN_HPP_CONSTEXPR_14 ImageMemoryBarrier & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pNext = pNext_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 ImageMemoryBarrier & setSrcAccessMask( VULKAN_HPP_NAMESPACE::AccessFlags srcAccessMask_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
srcAccessMask = srcAccessMask_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 ImageMemoryBarrier & setDstAccessMask( VULKAN_HPP_NAMESPACE::AccessFlags dstAccessMask_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
dstAccessMask = dstAccessMask_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 ImageMemoryBarrier & setOldLayout( VULKAN_HPP_NAMESPACE::ImageLayout oldLayout_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
oldLayout = oldLayout_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 ImageMemoryBarrier & setNewLayout( VULKAN_HPP_NAMESPACE::ImageLayout newLayout_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
newLayout = newLayout_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 ImageMemoryBarrier & setSrcQueueFamilyIndex( uint32_t srcQueueFamilyIndex_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
srcQueueFamilyIndex = srcQueueFamilyIndex_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 ImageMemoryBarrier & setDstQueueFamilyIndex( uint32_t dstQueueFamilyIndex_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
dstQueueFamilyIndex = dstQueueFamilyIndex_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 ImageMemoryBarrier & setImage( VULKAN_HPP_NAMESPACE::Image image_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
image = image_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 ImageMemoryBarrier &
|
|
setSubresourceRange( VULKAN_HPP_NAMESPACE::ImageSubresourceRange const & subresourceRange_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
subresourceRange = subresourceRange_;
|
|
return *this;
|
|
}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
|
|
|
|
operator VkImageMemoryBarrier const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkImageMemoryBarrier *>( this );
|
|
}
|
|
|
|
operator VkImageMemoryBarrier &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkImageMemoryBarrier *>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
# if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
# else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &,
|
|
const void * const &,
|
|
VULKAN_HPP_NAMESPACE::AccessFlags const &,
|
|
VULKAN_HPP_NAMESPACE::AccessFlags const &,
|
|
VULKAN_HPP_NAMESPACE::ImageLayout const &,
|
|
VULKAN_HPP_NAMESPACE::ImageLayout const &,
|
|
uint32_t const &,
|
|
uint32_t const &,
|
|
VULKAN_HPP_NAMESPACE::Image const &,
|
|
VULKAN_HPP_NAMESPACE::ImageSubresourceRange const &>
|
|
# endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( sType, pNext, srcAccessMask, dstAccessMask, oldLayout, newLayout, srcQueueFamilyIndex, dstQueueFamilyIndex, image, subresourceRange );
|
|
}
|
|
#endif
|
|
|
|
#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
|
|
auto operator<=>( ImageMemoryBarrier const & ) const = default;
|
|
#else
|
|
bool operator==( ImageMemoryBarrier const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
# if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
# else
|
|
return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( srcAccessMask == rhs.srcAccessMask ) && ( dstAccessMask == rhs.dstAccessMask ) &&
|
|
( oldLayout == rhs.oldLayout ) && ( newLayout == rhs.newLayout ) && ( srcQueueFamilyIndex == rhs.srcQueueFamilyIndex ) &&
|
|
( dstQueueFamilyIndex == rhs.dstQueueFamilyIndex ) && ( image == rhs.image ) && ( subresourceRange == rhs.subresourceRange );
|
|
# endif
|
|
}
|
|
|
|
bool operator!=( ImageMemoryBarrier const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eImageMemoryBarrier;
|
|
const void * pNext = {};
|
|
VULKAN_HPP_NAMESPACE::AccessFlags srcAccessMask = {};
|
|
VULKAN_HPP_NAMESPACE::AccessFlags dstAccessMask = {};
|
|
VULKAN_HPP_NAMESPACE::ImageLayout oldLayout = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined;
|
|
VULKAN_HPP_NAMESPACE::ImageLayout newLayout = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined;
|
|
uint32_t srcQueueFamilyIndex = {};
|
|
uint32_t dstQueueFamilyIndex = {};
|
|
VULKAN_HPP_NAMESPACE::Image image = {};
|
|
VULKAN_HPP_NAMESPACE::ImageSubresourceRange subresourceRange = {};
|
|
};
|
|
|
|
template <>
|
|
struct CppType<StructureType, StructureType::eImageMemoryBarrier>
|
|
{
|
|
using Type = ImageMemoryBarrier;
|
|
};
|
|
|
|
struct ImageMemoryRequirementsInfo2
|
|
{
|
|
using NativeType = VkImageMemoryRequirementsInfo2;
|
|
|
|
static const bool allowDuplicate = false;
|
|
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eImageMemoryRequirementsInfo2;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR ImageMemoryRequirementsInfo2( VULKAN_HPP_NAMESPACE::Image image_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
|
|
: pNext( pNext_ )
|
|
, image( image_ )
|
|
{
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR ImageMemoryRequirementsInfo2( ImageMemoryRequirementsInfo2 const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
ImageMemoryRequirementsInfo2( VkImageMemoryRequirementsInfo2 const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: ImageMemoryRequirementsInfo2( *reinterpret_cast<ImageMemoryRequirementsInfo2 const *>( &rhs ) )
|
|
{
|
|
}
|
|
|
|
ImageMemoryRequirementsInfo2 & operator=( ImageMemoryRequirementsInfo2 const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
ImageMemoryRequirementsInfo2 & operator=( VkImageMemoryRequirementsInfo2 const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ImageMemoryRequirementsInfo2 const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
|
|
VULKAN_HPP_CONSTEXPR_14 ImageMemoryRequirementsInfo2 & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pNext = pNext_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 ImageMemoryRequirementsInfo2 & setImage( VULKAN_HPP_NAMESPACE::Image image_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
image = image_;
|
|
return *this;
|
|
}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
|
|
|
|
operator VkImageMemoryRequirementsInfo2 const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkImageMemoryRequirementsInfo2 *>( this );
|
|
}
|
|
|
|
operator VkImageMemoryRequirementsInfo2 &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkImageMemoryRequirementsInfo2 *>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
# if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
# else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::Image const &>
|
|
# endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( sType, pNext, image );
|
|
}
|
|
#endif
|
|
|
|
#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
|
|
auto operator<=>( ImageMemoryRequirementsInfo2 const & ) const = default;
|
|
#else
|
|
bool operator==( ImageMemoryRequirementsInfo2 const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
# if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
# else
|
|
return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( image == rhs.image );
|
|
# endif
|
|
}
|
|
|
|
bool operator!=( ImageMemoryRequirementsInfo2 const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eImageMemoryRequirementsInfo2;
|
|
const void * pNext = {};
|
|
VULKAN_HPP_NAMESPACE::Image image = {};
|
|
};
|
|
|
|
template <>
|
|
struct CppType<StructureType, StructureType::eImageMemoryRequirementsInfo2>
|
|
{
|
|
using Type = ImageMemoryRequirementsInfo2;
|
|
};
|
|
|
|
using ImageMemoryRequirementsInfo2KHR = ImageMemoryRequirementsInfo2;
|
|
|
|
struct ImagePlaneMemoryRequirementsInfo
|
|
{
|
|
using NativeType = VkImagePlaneMemoryRequirementsInfo;
|
|
|
|
static const bool allowDuplicate = false;
|
|
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eImagePlaneMemoryRequirementsInfo;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR
|
|
ImagePlaneMemoryRequirementsInfo( VULKAN_HPP_NAMESPACE::ImageAspectFlagBits planeAspect_ = VULKAN_HPP_NAMESPACE::ImageAspectFlagBits::eColor,
|
|
const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
|
|
: pNext( pNext_ )
|
|
, planeAspect( planeAspect_ )
|
|
{
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR ImagePlaneMemoryRequirementsInfo( ImagePlaneMemoryRequirementsInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
ImagePlaneMemoryRequirementsInfo( VkImagePlaneMemoryRequirementsInfo const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: ImagePlaneMemoryRequirementsInfo( *reinterpret_cast<ImagePlaneMemoryRequirementsInfo const *>( &rhs ) )
|
|
{
|
|
}
|
|
|
|
ImagePlaneMemoryRequirementsInfo & operator=( ImagePlaneMemoryRequirementsInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
ImagePlaneMemoryRequirementsInfo & operator=( VkImagePlaneMemoryRequirementsInfo const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ImagePlaneMemoryRequirementsInfo const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
|
|
VULKAN_HPP_CONSTEXPR_14 ImagePlaneMemoryRequirementsInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pNext = pNext_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 ImagePlaneMemoryRequirementsInfo & setPlaneAspect( VULKAN_HPP_NAMESPACE::ImageAspectFlagBits planeAspect_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
planeAspect = planeAspect_;
|
|
return *this;
|
|
}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
|
|
|
|
operator VkImagePlaneMemoryRequirementsInfo const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkImagePlaneMemoryRequirementsInfo *>( this );
|
|
}
|
|
|
|
operator VkImagePlaneMemoryRequirementsInfo &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkImagePlaneMemoryRequirementsInfo *>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
# if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
# else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::ImageAspectFlagBits const &>
|
|
# endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( sType, pNext, planeAspect );
|
|
}
|
|
#endif
|
|
|
|
#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
|
|
auto operator<=>( ImagePlaneMemoryRequirementsInfo const & ) const = default;
|
|
#else
|
|
bool operator==( ImagePlaneMemoryRequirementsInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
# if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
# else
|
|
return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( planeAspect == rhs.planeAspect );
|
|
# endif
|
|
}
|
|
|
|
bool operator!=( ImagePlaneMemoryRequirementsInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eImagePlaneMemoryRequirementsInfo;
|
|
const void * pNext = {};
|
|
VULKAN_HPP_NAMESPACE::ImageAspectFlagBits planeAspect = VULKAN_HPP_NAMESPACE::ImageAspectFlagBits::eColor;
|
|
};
|
|
|
|
template <>
|
|
struct CppType<StructureType, StructureType::eImagePlaneMemoryRequirementsInfo>
|
|
{
|
|
using Type = ImagePlaneMemoryRequirementsInfo;
|
|
};
|
|
|
|
using ImagePlaneMemoryRequirementsInfoKHR = ImagePlaneMemoryRequirementsInfo;
|
|
|
|
struct ImageResolve
|
|
{
|
|
using NativeType = VkImageResolve;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR ImageResolve( VULKAN_HPP_NAMESPACE::ImageSubresourceLayers srcSubresource_ = {},
|
|
VULKAN_HPP_NAMESPACE::Offset3D srcOffset_ = {},
|
|
VULKAN_HPP_NAMESPACE::ImageSubresourceLayers dstSubresource_ = {},
|
|
VULKAN_HPP_NAMESPACE::Offset3D dstOffset_ = {},
|
|
VULKAN_HPP_NAMESPACE::Extent3D extent_ = {} ) VULKAN_HPP_NOEXCEPT
|
|
: srcSubresource( srcSubresource_ )
|
|
, srcOffset( srcOffset_ )
|
|
, dstSubresource( dstSubresource_ )
|
|
, dstOffset( dstOffset_ )
|
|
, extent( extent_ )
|
|
{
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR ImageResolve( ImageResolve const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
ImageResolve( VkImageResolve const & rhs ) VULKAN_HPP_NOEXCEPT : ImageResolve( *reinterpret_cast<ImageResolve const *>( &rhs ) ) {}
|
|
|
|
ImageResolve & operator=( ImageResolve const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
ImageResolve & operator=( VkImageResolve const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ImageResolve const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
|
|
VULKAN_HPP_CONSTEXPR_14 ImageResolve & setSrcSubresource( VULKAN_HPP_NAMESPACE::ImageSubresourceLayers const & srcSubresource_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
srcSubresource = srcSubresource_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 ImageResolve & setSrcOffset( VULKAN_HPP_NAMESPACE::Offset3D const & srcOffset_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
srcOffset = srcOffset_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 ImageResolve & setDstSubresource( VULKAN_HPP_NAMESPACE::ImageSubresourceLayers const & dstSubresource_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
dstSubresource = dstSubresource_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 ImageResolve & setDstOffset( VULKAN_HPP_NAMESPACE::Offset3D const & dstOffset_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
dstOffset = dstOffset_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 ImageResolve & setExtent( VULKAN_HPP_NAMESPACE::Extent3D const & extent_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
extent = extent_;
|
|
return *this;
|
|
}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
|
|
|
|
operator VkImageResolve const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkImageResolve *>( this );
|
|
}
|
|
|
|
operator VkImageResolve &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkImageResolve *>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
# if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
# else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::ImageSubresourceLayers const &,
|
|
VULKAN_HPP_NAMESPACE::Offset3D const &,
|
|
VULKAN_HPP_NAMESPACE::ImageSubresourceLayers const &,
|
|
VULKAN_HPP_NAMESPACE::Offset3D const &,
|
|
VULKAN_HPP_NAMESPACE::Extent3D const &>
|
|
# endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( srcSubresource, srcOffset, dstSubresource, dstOffset, extent );
|
|
}
|
|
#endif
|
|
|
|
#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
|
|
auto operator<=>( ImageResolve const & ) const = default;
|
|
#else
|
|
bool operator==( ImageResolve const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
# if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
# else
|
|
return ( srcSubresource == rhs.srcSubresource ) && ( srcOffset == rhs.srcOffset ) && ( dstSubresource == rhs.dstSubresource ) &&
|
|
( dstOffset == rhs.dstOffset ) && ( extent == rhs.extent );
|
|
# endif
|
|
}
|
|
|
|
bool operator!=( ImageResolve const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::ImageSubresourceLayers srcSubresource = {};
|
|
VULKAN_HPP_NAMESPACE::Offset3D srcOffset = {};
|
|
VULKAN_HPP_NAMESPACE::ImageSubresourceLayers dstSubresource = {};
|
|
VULKAN_HPP_NAMESPACE::Offset3D dstOffset = {};
|
|
VULKAN_HPP_NAMESPACE::Extent3D extent = {};
|
|
};
|
|
|
|
struct ImageResolve2
|
|
{
|
|
using NativeType = VkImageResolve2;
|
|
|
|
static const bool allowDuplicate = false;
|
|
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eImageResolve2;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR ImageResolve2( VULKAN_HPP_NAMESPACE::ImageSubresourceLayers srcSubresource_ = {},
|
|
VULKAN_HPP_NAMESPACE::Offset3D srcOffset_ = {},
|
|
VULKAN_HPP_NAMESPACE::ImageSubresourceLayers dstSubresource_ = {},
|
|
VULKAN_HPP_NAMESPACE::Offset3D dstOffset_ = {},
|
|
VULKAN_HPP_NAMESPACE::Extent3D extent_ = {},
|
|
const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
|
|
: pNext( pNext_ )
|
|
, srcSubresource( srcSubresource_ )
|
|
, srcOffset( srcOffset_ )
|
|
, dstSubresource( dstSubresource_ )
|
|
, dstOffset( dstOffset_ )
|
|
, extent( extent_ )
|
|
{
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR ImageResolve2( ImageResolve2 const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
ImageResolve2( VkImageResolve2 const & rhs ) VULKAN_HPP_NOEXCEPT : ImageResolve2( *reinterpret_cast<ImageResolve2 const *>( &rhs ) ) {}
|
|
|
|
ImageResolve2 & operator=( ImageResolve2 const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
ImageResolve2 & operator=( VkImageResolve2 const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ImageResolve2 const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
|
|
VULKAN_HPP_CONSTEXPR_14 ImageResolve2 & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pNext = pNext_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 ImageResolve2 & setSrcSubresource( VULKAN_HPP_NAMESPACE::ImageSubresourceLayers const & srcSubresource_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
srcSubresource = srcSubresource_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 ImageResolve2 & setSrcOffset( VULKAN_HPP_NAMESPACE::Offset3D const & srcOffset_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
srcOffset = srcOffset_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 ImageResolve2 & setDstSubresource( VULKAN_HPP_NAMESPACE::ImageSubresourceLayers const & dstSubresource_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
dstSubresource = dstSubresource_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 ImageResolve2 & setDstOffset( VULKAN_HPP_NAMESPACE::Offset3D const & dstOffset_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
dstOffset = dstOffset_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 ImageResolve2 & setExtent( VULKAN_HPP_NAMESPACE::Extent3D const & extent_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
extent = extent_;
|
|
return *this;
|
|
}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
|
|
|
|
operator VkImageResolve2 const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkImageResolve2 *>( this );
|
|
}
|
|
|
|
operator VkImageResolve2 &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkImageResolve2 *>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
# if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
# else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &,
|
|
const void * const &,
|
|
VULKAN_HPP_NAMESPACE::ImageSubresourceLayers const &,
|
|
VULKAN_HPP_NAMESPACE::Offset3D const &,
|
|
VULKAN_HPP_NAMESPACE::ImageSubresourceLayers const &,
|
|
VULKAN_HPP_NAMESPACE::Offset3D const &,
|
|
VULKAN_HPP_NAMESPACE::Extent3D const &>
|
|
# endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( sType, pNext, srcSubresource, srcOffset, dstSubresource, dstOffset, extent );
|
|
}
|
|
#endif
|
|
|
|
#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
|
|
auto operator<=>( ImageResolve2 const & ) const = default;
|
|
#else
|
|
bool operator==( ImageResolve2 const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
# if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
# else
|
|
return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( srcSubresource == rhs.srcSubresource ) && ( srcOffset == rhs.srcOffset ) &&
|
|
( dstSubresource == rhs.dstSubresource ) && ( dstOffset == rhs.dstOffset ) && ( extent == rhs.extent );
|
|
# endif
|
|
}
|
|
|
|
bool operator!=( ImageResolve2 const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eImageResolve2;
|
|
const void * pNext = {};
|
|
VULKAN_HPP_NAMESPACE::ImageSubresourceLayers srcSubresource = {};
|
|
VULKAN_HPP_NAMESPACE::Offset3D srcOffset = {};
|
|
VULKAN_HPP_NAMESPACE::ImageSubresourceLayers dstSubresource = {};
|
|
VULKAN_HPP_NAMESPACE::Offset3D dstOffset = {};
|
|
VULKAN_HPP_NAMESPACE::Extent3D extent = {};
|
|
};
|
|
|
|
template <>
|
|
struct CppType<StructureType, StructureType::eImageResolve2>
|
|
{
|
|
using Type = ImageResolve2;
|
|
};
|
|
|
|
using ImageResolve2KHR = ImageResolve2;
|
|
|
|
struct ImageStencilUsageCreateInfo
|
|
{
|
|
using NativeType = VkImageStencilUsageCreateInfo;
|
|
|
|
static const bool allowDuplicate = false;
|
|
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eImageStencilUsageCreateInfo;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR ImageStencilUsageCreateInfo( VULKAN_HPP_NAMESPACE::ImageUsageFlags stencilUsage_ = {},
|
|
const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
|
|
: pNext( pNext_ )
|
|
, stencilUsage( stencilUsage_ )
|
|
{
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR ImageStencilUsageCreateInfo( ImageStencilUsageCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
ImageStencilUsageCreateInfo( VkImageStencilUsageCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: ImageStencilUsageCreateInfo( *reinterpret_cast<ImageStencilUsageCreateInfo const *>( &rhs ) )
|
|
{
|
|
}
|
|
|
|
ImageStencilUsageCreateInfo & operator=( ImageStencilUsageCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
ImageStencilUsageCreateInfo & operator=( VkImageStencilUsageCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ImageStencilUsageCreateInfo const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
|
|
VULKAN_HPP_CONSTEXPR_14 ImageStencilUsageCreateInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pNext = pNext_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 ImageStencilUsageCreateInfo & setStencilUsage( VULKAN_HPP_NAMESPACE::ImageUsageFlags stencilUsage_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
stencilUsage = stencilUsage_;
|
|
return *this;
|
|
}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
|
|
|
|
operator VkImageStencilUsageCreateInfo const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkImageStencilUsageCreateInfo *>( this );
|
|
}
|
|
|
|
operator VkImageStencilUsageCreateInfo &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkImageStencilUsageCreateInfo *>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
# if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
# else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::ImageUsageFlags const &>
|
|
# endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( sType, pNext, stencilUsage );
|
|
}
|
|
#endif
|
|
|
|
#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
|
|
auto operator<=>( ImageStencilUsageCreateInfo const & ) const = default;
|
|
#else
|
|
bool operator==( ImageStencilUsageCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
# if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
# else
|
|
return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( stencilUsage == rhs.stencilUsage );
|
|
# endif
|
|
}
|
|
|
|
bool operator!=( ImageStencilUsageCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eImageStencilUsageCreateInfo;
|
|
const void * pNext = {};
|
|
VULKAN_HPP_NAMESPACE::ImageUsageFlags stencilUsage = {};
|
|
};
|
|
|
|
template <>
|
|
struct CppType<StructureType, StructureType::eImageStencilUsageCreateInfo>
|
|
{
|
|
using Type = ImageStencilUsageCreateInfo;
|
|
};
|
|
|
|
using ImageStencilUsageCreateInfoEXT = ImageStencilUsageCreateInfo;
|
|
|
|
struct ImageSubresource
|
|
{
|
|
using NativeType = VkImageSubresource;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR
|
|
ImageSubresource( VULKAN_HPP_NAMESPACE::ImageAspectFlags aspectMask_ = {}, uint32_t mipLevel_ = {}, uint32_t arrayLayer_ = {} ) VULKAN_HPP_NOEXCEPT
|
|
: aspectMask( aspectMask_ )
|
|
, mipLevel( mipLevel_ )
|
|
, arrayLayer( arrayLayer_ )
|
|
{
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR ImageSubresource( ImageSubresource const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
ImageSubresource( VkImageSubresource const & rhs ) VULKAN_HPP_NOEXCEPT : ImageSubresource( *reinterpret_cast<ImageSubresource const *>( &rhs ) ) {}
|
|
|
|
ImageSubresource & operator=( ImageSubresource const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
ImageSubresource & operator=( VkImageSubresource const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ImageSubresource const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
|
|
VULKAN_HPP_CONSTEXPR_14 ImageSubresource & setAspectMask( VULKAN_HPP_NAMESPACE::ImageAspectFlags aspectMask_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
aspectMask = aspectMask_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 ImageSubresource & setMipLevel( uint32_t mipLevel_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
mipLevel = mipLevel_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 ImageSubresource & setArrayLayer( uint32_t arrayLayer_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
arrayLayer = arrayLayer_;
|
|
return *this;
|
|
}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
|
|
|
|
operator VkImageSubresource const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkImageSubresource *>( this );
|
|
}
|
|
|
|
operator VkImageSubresource &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkImageSubresource *>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
# if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
# else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::ImageAspectFlags const &, uint32_t const &, uint32_t const &>
|
|
# endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( aspectMask, mipLevel, arrayLayer );
|
|
}
|
|
#endif
|
|
|
|
#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
|
|
auto operator<=>( ImageSubresource const & ) const = default;
|
|
#else
|
|
bool operator==( ImageSubresource const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
# if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
# else
|
|
return ( aspectMask == rhs.aspectMask ) && ( mipLevel == rhs.mipLevel ) && ( arrayLayer == rhs.arrayLayer );
|
|
# endif
|
|
}
|
|
|
|
bool operator!=( ImageSubresource const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::ImageAspectFlags aspectMask = {};
|
|
uint32_t mipLevel = {};
|
|
uint32_t arrayLayer = {};
|
|
};
|
|
|
|
struct ImageSwapchainCreateInfoKHR
|
|
{
|
|
using NativeType = VkImageSwapchainCreateInfoKHR;
|
|
|
|
static const bool allowDuplicate = false;
|
|
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eImageSwapchainCreateInfoKHR;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR ImageSwapchainCreateInfoKHR( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
|
|
: pNext( pNext_ )
|
|
, swapchain( swapchain_ )
|
|
{
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR ImageSwapchainCreateInfoKHR( ImageSwapchainCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
ImageSwapchainCreateInfoKHR( VkImageSwapchainCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: ImageSwapchainCreateInfoKHR( *reinterpret_cast<ImageSwapchainCreateInfoKHR const *>( &rhs ) )
|
|
{
|
|
}
|
|
|
|
ImageSwapchainCreateInfoKHR & operator=( ImageSwapchainCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
ImageSwapchainCreateInfoKHR & operator=( VkImageSwapchainCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ImageSwapchainCreateInfoKHR const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
|
|
VULKAN_HPP_CONSTEXPR_14 ImageSwapchainCreateInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pNext = pNext_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 ImageSwapchainCreateInfoKHR & setSwapchain( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
swapchain = swapchain_;
|
|
return *this;
|
|
}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
|
|
|
|
operator VkImageSwapchainCreateInfoKHR const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkImageSwapchainCreateInfoKHR *>( this );
|
|
}
|
|
|
|
operator VkImageSwapchainCreateInfoKHR &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkImageSwapchainCreateInfoKHR *>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
# if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
# else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::SwapchainKHR const &>
|
|
# endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( sType, pNext, swapchain );
|
|
}
|
|
#endif
|
|
|
|
#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
|
|
auto operator<=>( ImageSwapchainCreateInfoKHR const & ) const = default;
|
|
#else
|
|
bool operator==( ImageSwapchainCreateInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
# if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
# else
|
|
return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( swapchain == rhs.swapchain );
|
|
# endif
|
|
}
|
|
|
|
bool operator!=( ImageSwapchainCreateInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eImageSwapchainCreateInfoKHR;
|
|
const void * pNext = {};
|
|
VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain = {};
|
|
};
|
|
|
|
template <>
|
|
struct CppType<StructureType, StructureType::eImageSwapchainCreateInfoKHR>
|
|
{
|
|
using Type = ImageSwapchainCreateInfoKHR;
|
|
};
|
|
|
|
struct ImageViewASTCDecodeModeEXT
|
|
{
|
|
using NativeType = VkImageViewASTCDecodeModeEXT;
|
|
|
|
static const bool allowDuplicate = false;
|
|
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eImageViewAstcDecodeModeEXT;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR ImageViewASTCDecodeModeEXT( VULKAN_HPP_NAMESPACE::Format decodeMode_ = VULKAN_HPP_NAMESPACE::Format::eUndefined,
|
|
const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
|
|
: pNext( pNext_ )
|
|
, decodeMode( decodeMode_ )
|
|
{
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR ImageViewASTCDecodeModeEXT( ImageViewASTCDecodeModeEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
ImageViewASTCDecodeModeEXT( VkImageViewASTCDecodeModeEXT const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: ImageViewASTCDecodeModeEXT( *reinterpret_cast<ImageViewASTCDecodeModeEXT const *>( &rhs ) )
|
|
{
|
|
}
|
|
|
|
ImageViewASTCDecodeModeEXT & operator=( ImageViewASTCDecodeModeEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
ImageViewASTCDecodeModeEXT & operator=( VkImageViewASTCDecodeModeEXT const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ImageViewASTCDecodeModeEXT const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
|
|
VULKAN_HPP_CONSTEXPR_14 ImageViewASTCDecodeModeEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pNext = pNext_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 ImageViewASTCDecodeModeEXT & setDecodeMode( VULKAN_HPP_NAMESPACE::Format decodeMode_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
decodeMode = decodeMode_;
|
|
return *this;
|
|
}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
|
|
|
|
operator VkImageViewASTCDecodeModeEXT const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkImageViewASTCDecodeModeEXT *>( this );
|
|
}
|
|
|
|
operator VkImageViewASTCDecodeModeEXT &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkImageViewASTCDecodeModeEXT *>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
# if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
# else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::Format const &>
|
|
# endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( sType, pNext, decodeMode );
|
|
}
|
|
#endif
|
|
|
|
#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
|
|
auto operator<=>( ImageViewASTCDecodeModeEXT const & ) const = default;
|
|
#else
|
|
bool operator==( ImageViewASTCDecodeModeEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
# if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
# else
|
|
return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( decodeMode == rhs.decodeMode );
|
|
# endif
|
|
}
|
|
|
|
bool operator!=( ImageViewASTCDecodeModeEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eImageViewAstcDecodeModeEXT;
|
|
const void * pNext = {};
|
|
VULKAN_HPP_NAMESPACE::Format decodeMode = VULKAN_HPP_NAMESPACE::Format::eUndefined;
|
|
};
|
|
|
|
template <>
|
|
struct CppType<StructureType, StructureType::eImageViewAstcDecodeModeEXT>
|
|
{
|
|
using Type = ImageViewASTCDecodeModeEXT;
|
|
};
|
|
|
|
struct ImageViewCreateInfo
|
|
{
|
|
using NativeType = VkImageViewCreateInfo;
|
|
|
|
static const bool allowDuplicate = false;
|
|
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eImageViewCreateInfo;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR ImageViewCreateInfo( VULKAN_HPP_NAMESPACE::ImageViewCreateFlags flags_ = {},
|
|
VULKAN_HPP_NAMESPACE::Image image_ = {},
|
|
VULKAN_HPP_NAMESPACE::ImageViewType viewType_ = VULKAN_HPP_NAMESPACE::ImageViewType::e1D,
|
|
VULKAN_HPP_NAMESPACE::Format format_ = VULKAN_HPP_NAMESPACE::Format::eUndefined,
|
|
VULKAN_HPP_NAMESPACE::ComponentMapping components_ = {},
|
|
VULKAN_HPP_NAMESPACE::ImageSubresourceRange subresourceRange_ = {},
|
|
const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
|
|
: pNext( pNext_ )
|
|
, flags( flags_ )
|
|
, image( image_ )
|
|
, viewType( viewType_ )
|
|
, format( format_ )
|
|
, components( components_ )
|
|
, subresourceRange( subresourceRange_ )
|
|
{
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR ImageViewCreateInfo( ImageViewCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
ImageViewCreateInfo( VkImageViewCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT : ImageViewCreateInfo( *reinterpret_cast<ImageViewCreateInfo const *>( &rhs ) )
|
|
{
|
|
}
|
|
|
|
ImageViewCreateInfo & operator=( ImageViewCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
ImageViewCreateInfo & operator=( VkImageViewCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ImageViewCreateInfo const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
|
|
VULKAN_HPP_CONSTEXPR_14 ImageViewCreateInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pNext = pNext_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 ImageViewCreateInfo & setFlags( VULKAN_HPP_NAMESPACE::ImageViewCreateFlags flags_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
flags = flags_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 ImageViewCreateInfo & setImage( VULKAN_HPP_NAMESPACE::Image image_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
image = image_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 ImageViewCreateInfo & setViewType( VULKAN_HPP_NAMESPACE::ImageViewType viewType_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
viewType = viewType_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 ImageViewCreateInfo & setFormat( VULKAN_HPP_NAMESPACE::Format format_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
format = format_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 ImageViewCreateInfo & setComponents( VULKAN_HPP_NAMESPACE::ComponentMapping const & components_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
components = components_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 ImageViewCreateInfo &
|
|
setSubresourceRange( VULKAN_HPP_NAMESPACE::ImageSubresourceRange const & subresourceRange_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
subresourceRange = subresourceRange_;
|
|
return *this;
|
|
}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
|
|
|
|
operator VkImageViewCreateInfo const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkImageViewCreateInfo *>( this );
|
|
}
|
|
|
|
operator VkImageViewCreateInfo &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkImageViewCreateInfo *>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
# if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
# else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &,
|
|
const void * const &,
|
|
VULKAN_HPP_NAMESPACE::ImageViewCreateFlags const &,
|
|
VULKAN_HPP_NAMESPACE::Image const &,
|
|
VULKAN_HPP_NAMESPACE::ImageViewType const &,
|
|
VULKAN_HPP_NAMESPACE::Format const &,
|
|
VULKAN_HPP_NAMESPACE::ComponentMapping const &,
|
|
VULKAN_HPP_NAMESPACE::ImageSubresourceRange const &>
|
|
# endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( sType, pNext, flags, image, viewType, format, components, subresourceRange );
|
|
}
|
|
#endif
|
|
|
|
#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
|
|
auto operator<=>( ImageViewCreateInfo const & ) const = default;
|
|
#else
|
|
bool operator==( ImageViewCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
# if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
# else
|
|
return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && ( image == rhs.image ) && ( viewType == rhs.viewType ) &&
|
|
( format == rhs.format ) && ( components == rhs.components ) && ( subresourceRange == rhs.subresourceRange );
|
|
# endif
|
|
}
|
|
|
|
bool operator!=( ImageViewCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eImageViewCreateInfo;
|
|
const void * pNext = {};
|
|
VULKAN_HPP_NAMESPACE::ImageViewCreateFlags flags = {};
|
|
VULKAN_HPP_NAMESPACE::Image image = {};
|
|
VULKAN_HPP_NAMESPACE::ImageViewType viewType = VULKAN_HPP_NAMESPACE::ImageViewType::e1D;
|
|
VULKAN_HPP_NAMESPACE::Format format = VULKAN_HPP_NAMESPACE::Format::eUndefined;
|
|
VULKAN_HPP_NAMESPACE::ComponentMapping components = {};
|
|
VULKAN_HPP_NAMESPACE::ImageSubresourceRange subresourceRange = {};
|
|
};
|
|
|
|
template <>
|
|
struct CppType<StructureType, StructureType::eImageViewCreateInfo>
|
|
{
|
|
using Type = ImageViewCreateInfo;
|
|
};
|
|
|
|
struct ImageViewUsageCreateInfo
|
|
{
|
|
using NativeType = VkImageViewUsageCreateInfo;
|
|
|
|
static const bool allowDuplicate = false;
|
|
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eImageViewUsageCreateInfo;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR ImageViewUsageCreateInfo( VULKAN_HPP_NAMESPACE::ImageUsageFlags usage_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
|
|
: pNext( pNext_ )
|
|
, usage( usage_ )
|
|
{
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR ImageViewUsageCreateInfo( ImageViewUsageCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
ImageViewUsageCreateInfo( VkImageViewUsageCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: ImageViewUsageCreateInfo( *reinterpret_cast<ImageViewUsageCreateInfo const *>( &rhs ) )
|
|
{
|
|
}
|
|
|
|
ImageViewUsageCreateInfo & operator=( ImageViewUsageCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
ImageViewUsageCreateInfo & operator=( VkImageViewUsageCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ImageViewUsageCreateInfo const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
|
|
VULKAN_HPP_CONSTEXPR_14 ImageViewUsageCreateInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pNext = pNext_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 ImageViewUsageCreateInfo & setUsage( VULKAN_HPP_NAMESPACE::ImageUsageFlags usage_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
usage = usage_;
|
|
return *this;
|
|
}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
|
|
|
|
operator VkImageViewUsageCreateInfo const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkImageViewUsageCreateInfo *>( this );
|
|
}
|
|
|
|
operator VkImageViewUsageCreateInfo &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkImageViewUsageCreateInfo *>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
# if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
# else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::ImageUsageFlags const &>
|
|
# endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( sType, pNext, usage );
|
|
}
|
|
#endif
|
|
|
|
#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
|
|
auto operator<=>( ImageViewUsageCreateInfo const & ) const = default;
|
|
#else
|
|
bool operator==( ImageViewUsageCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
# if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
# else
|
|
return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( usage == rhs.usage );
|
|
# endif
|
|
}
|
|
|
|
bool operator!=( ImageViewUsageCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eImageViewUsageCreateInfo;
|
|
const void * pNext = {};
|
|
VULKAN_HPP_NAMESPACE::ImageUsageFlags usage = {};
|
|
};
|
|
|
|
template <>
|
|
struct CppType<StructureType, StructureType::eImageViewUsageCreateInfo>
|
|
{
|
|
using Type = ImageViewUsageCreateInfo;
|
|
};
|
|
|
|
using ImageViewUsageCreateInfoKHR = ImageViewUsageCreateInfo;
|
|
|
|
struct ImportFenceFdInfoKHR
|
|
{
|
|
using NativeType = VkImportFenceFdInfoKHR;
|
|
|
|
static const bool allowDuplicate = false;
|
|
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eImportFenceFdInfoKHR;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR ImportFenceFdInfoKHR(
|
|
VULKAN_HPP_NAMESPACE::Fence fence_ = {},
|
|
VULKAN_HPP_NAMESPACE::FenceImportFlags flags_ = {},
|
|
VULKAN_HPP_NAMESPACE::ExternalFenceHandleTypeFlagBits handleType_ = VULKAN_HPP_NAMESPACE::ExternalFenceHandleTypeFlagBits::eOpaqueFd,
|
|
int fd_ = {},
|
|
const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
|
|
: pNext( pNext_ )
|
|
, fence( fence_ )
|
|
, flags( flags_ )
|
|
, handleType( handleType_ )
|
|
, fd( fd_ )
|
|
{
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR ImportFenceFdInfoKHR( ImportFenceFdInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
ImportFenceFdInfoKHR( VkImportFenceFdInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: ImportFenceFdInfoKHR( *reinterpret_cast<ImportFenceFdInfoKHR const *>( &rhs ) )
|
|
{
|
|
}
|
|
|
|
ImportFenceFdInfoKHR & operator=( ImportFenceFdInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
ImportFenceFdInfoKHR & operator=( VkImportFenceFdInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ImportFenceFdInfoKHR const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
|
|
VULKAN_HPP_CONSTEXPR_14 ImportFenceFdInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pNext = pNext_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 ImportFenceFdInfoKHR & setFence( VULKAN_HPP_NAMESPACE::Fence fence_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
fence = fence_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 ImportFenceFdInfoKHR & setFlags( VULKAN_HPP_NAMESPACE::FenceImportFlags flags_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
flags = flags_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 ImportFenceFdInfoKHR & setHandleType( VULKAN_HPP_NAMESPACE::ExternalFenceHandleTypeFlagBits handleType_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
handleType = handleType_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 ImportFenceFdInfoKHR & setFd( int fd_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
fd = fd_;
|
|
return *this;
|
|
}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
|
|
|
|
operator VkImportFenceFdInfoKHR const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkImportFenceFdInfoKHR *>( this );
|
|
}
|
|
|
|
operator VkImportFenceFdInfoKHR &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkImportFenceFdInfoKHR *>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
# if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
# else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &,
|
|
const void * const &,
|
|
VULKAN_HPP_NAMESPACE::Fence const &,
|
|
VULKAN_HPP_NAMESPACE::FenceImportFlags const &,
|
|
VULKAN_HPP_NAMESPACE::ExternalFenceHandleTypeFlagBits const &,
|
|
int const &>
|
|
# endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( sType, pNext, fence, flags, handleType, fd );
|
|
}
|
|
#endif
|
|
|
|
#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
|
|
auto operator<=>( ImportFenceFdInfoKHR const & ) const = default;
|
|
#else
|
|
bool operator==( ImportFenceFdInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
# if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
# else
|
|
return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( fence == rhs.fence ) && ( flags == rhs.flags ) && ( handleType == rhs.handleType ) &&
|
|
( fd == rhs.fd );
|
|
# endif
|
|
}
|
|
|
|
bool operator!=( ImportFenceFdInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eImportFenceFdInfoKHR;
|
|
const void * pNext = {};
|
|
VULKAN_HPP_NAMESPACE::Fence fence = {};
|
|
VULKAN_HPP_NAMESPACE::FenceImportFlags flags = {};
|
|
VULKAN_HPP_NAMESPACE::ExternalFenceHandleTypeFlagBits handleType = VULKAN_HPP_NAMESPACE::ExternalFenceHandleTypeFlagBits::eOpaqueFd;
|
|
int fd = {};
|
|
};
|
|
|
|
template <>
|
|
struct CppType<StructureType, StructureType::eImportFenceFdInfoKHR>
|
|
{
|
|
using Type = ImportFenceFdInfoKHR;
|
|
};
|
|
|
|
#if defined( VK_USE_PLATFORM_SCI )
|
|
struct ImportFenceSciSyncInfoNV
|
|
{
|
|
using NativeType = VkImportFenceSciSyncInfoNV;
|
|
|
|
static const bool allowDuplicate = false;
|
|
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eImportFenceSciSyncInfoNV;
|
|
|
|
# if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR ImportFenceSciSyncInfoNV(
|
|
VULKAN_HPP_NAMESPACE::Fence fence_ = {},
|
|
VULKAN_HPP_NAMESPACE::ExternalFenceHandleTypeFlagBits handleType_ = VULKAN_HPP_NAMESPACE::ExternalFenceHandleTypeFlagBits::eOpaqueFd,
|
|
void * handle_ = {},
|
|
const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
|
|
: pNext( pNext_ )
|
|
, fence( fence_ )
|
|
, handleType( handleType_ )
|
|
, handle( handle_ )
|
|
{
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR ImportFenceSciSyncInfoNV( ImportFenceSciSyncInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
ImportFenceSciSyncInfoNV( VkImportFenceSciSyncInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: ImportFenceSciSyncInfoNV( *reinterpret_cast<ImportFenceSciSyncInfoNV const *>( &rhs ) )
|
|
{
|
|
}
|
|
|
|
ImportFenceSciSyncInfoNV & operator=( ImportFenceSciSyncInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
# endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
ImportFenceSciSyncInfoNV & operator=( VkImportFenceSciSyncInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ImportFenceSciSyncInfoNV const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
# if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
|
|
VULKAN_HPP_CONSTEXPR_14 ImportFenceSciSyncInfoNV & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pNext = pNext_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 ImportFenceSciSyncInfoNV & setFence( VULKAN_HPP_NAMESPACE::Fence fence_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
fence = fence_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 ImportFenceSciSyncInfoNV & setHandleType( VULKAN_HPP_NAMESPACE::ExternalFenceHandleTypeFlagBits handleType_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
handleType = handleType_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 ImportFenceSciSyncInfoNV & setHandle( void * handle_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
handle = handle_;
|
|
return *this;
|
|
}
|
|
# endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
|
|
|
|
operator VkImportFenceSciSyncInfoNV const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkImportFenceSciSyncInfoNV *>( this );
|
|
}
|
|
|
|
operator VkImportFenceSciSyncInfoNV &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkImportFenceSciSyncInfoNV *>( this );
|
|
}
|
|
|
|
# if defined( VULKAN_HPP_USE_REFLECT )
|
|
# if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
# else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &,
|
|
const void * const &,
|
|
VULKAN_HPP_NAMESPACE::Fence const &,
|
|
VULKAN_HPP_NAMESPACE::ExternalFenceHandleTypeFlagBits const &,
|
|
void * const &>
|
|
# endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( sType, pNext, fence, handleType, handle );
|
|
}
|
|
# endif
|
|
|
|
# if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
|
|
auto operator<=>( ImportFenceSciSyncInfoNV const & ) const = default;
|
|
# else
|
|
bool operator==( ImportFenceSciSyncInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
# if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
# else
|
|
return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( fence == rhs.fence ) && ( handleType == rhs.handleType ) && ( handle == rhs.handle );
|
|
# endif
|
|
}
|
|
|
|
bool operator!=( ImportFenceSciSyncInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
# endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eImportFenceSciSyncInfoNV;
|
|
const void * pNext = {};
|
|
VULKAN_HPP_NAMESPACE::Fence fence = {};
|
|
VULKAN_HPP_NAMESPACE::ExternalFenceHandleTypeFlagBits handleType = VULKAN_HPP_NAMESPACE::ExternalFenceHandleTypeFlagBits::eOpaqueFd;
|
|
void * handle = {};
|
|
};
|
|
|
|
template <>
|
|
struct CppType<StructureType, StructureType::eImportFenceSciSyncInfoNV>
|
|
{
|
|
using Type = ImportFenceSciSyncInfoNV;
|
|
};
|
|
#endif /*VK_USE_PLATFORM_SCI*/
|
|
|
|
struct ImportMemoryFdInfoKHR
|
|
{
|
|
using NativeType = VkImportMemoryFdInfoKHR;
|
|
|
|
static const bool allowDuplicate = false;
|
|
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eImportMemoryFdInfoKHR;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR ImportMemoryFdInfoKHR(
|
|
VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType_ = VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits::eOpaqueFd,
|
|
int fd_ = {},
|
|
const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
|
|
: pNext( pNext_ )
|
|
, handleType( handleType_ )
|
|
, fd( fd_ )
|
|
{
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR ImportMemoryFdInfoKHR( ImportMemoryFdInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
ImportMemoryFdInfoKHR( VkImportMemoryFdInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: ImportMemoryFdInfoKHR( *reinterpret_cast<ImportMemoryFdInfoKHR const *>( &rhs ) )
|
|
{
|
|
}
|
|
|
|
ImportMemoryFdInfoKHR & operator=( ImportMemoryFdInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
ImportMemoryFdInfoKHR & operator=( VkImportMemoryFdInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ImportMemoryFdInfoKHR const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
|
|
VULKAN_HPP_CONSTEXPR_14 ImportMemoryFdInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pNext = pNext_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 ImportMemoryFdInfoKHR & setHandleType( VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
handleType = handleType_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 ImportMemoryFdInfoKHR & setFd( int fd_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
fd = fd_;
|
|
return *this;
|
|
}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
|
|
|
|
operator VkImportMemoryFdInfoKHR const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkImportMemoryFdInfoKHR *>( this );
|
|
}
|
|
|
|
operator VkImportMemoryFdInfoKHR &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkImportMemoryFdInfoKHR *>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
# if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
# else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits const &, int const &>
|
|
# endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( sType, pNext, handleType, fd );
|
|
}
|
|
#endif
|
|
|
|
#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
|
|
auto operator<=>( ImportMemoryFdInfoKHR const & ) const = default;
|
|
#else
|
|
bool operator==( ImportMemoryFdInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
# if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
# else
|
|
return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( handleType == rhs.handleType ) && ( fd == rhs.fd );
|
|
# endif
|
|
}
|
|
|
|
bool operator!=( ImportMemoryFdInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eImportMemoryFdInfoKHR;
|
|
const void * pNext = {};
|
|
VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType = VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits::eOpaqueFd;
|
|
int fd = {};
|
|
};
|
|
|
|
template <>
|
|
struct CppType<StructureType, StructureType::eImportMemoryFdInfoKHR>
|
|
{
|
|
using Type = ImportMemoryFdInfoKHR;
|
|
};
|
|
|
|
struct ImportMemoryHostPointerInfoEXT
|
|
{
|
|
using NativeType = VkImportMemoryHostPointerInfoEXT;
|
|
|
|
static const bool allowDuplicate = false;
|
|
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eImportMemoryHostPointerInfoEXT;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR ImportMemoryHostPointerInfoEXT(
|
|
VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType_ = VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits::eOpaqueFd,
|
|
void * pHostPointer_ = {},
|
|
const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
|
|
: pNext( pNext_ )
|
|
, handleType( handleType_ )
|
|
, pHostPointer( pHostPointer_ )
|
|
{
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR ImportMemoryHostPointerInfoEXT( ImportMemoryHostPointerInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
ImportMemoryHostPointerInfoEXT( VkImportMemoryHostPointerInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: ImportMemoryHostPointerInfoEXT( *reinterpret_cast<ImportMemoryHostPointerInfoEXT const *>( &rhs ) )
|
|
{
|
|
}
|
|
|
|
ImportMemoryHostPointerInfoEXT & operator=( ImportMemoryHostPointerInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
ImportMemoryHostPointerInfoEXT & operator=( VkImportMemoryHostPointerInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ImportMemoryHostPointerInfoEXT const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
|
|
VULKAN_HPP_CONSTEXPR_14 ImportMemoryHostPointerInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pNext = pNext_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 ImportMemoryHostPointerInfoEXT &
|
|
setHandleType( VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
handleType = handleType_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 ImportMemoryHostPointerInfoEXT & setPHostPointer( void * pHostPointer_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pHostPointer = pHostPointer_;
|
|
return *this;
|
|
}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
|
|
|
|
operator VkImportMemoryHostPointerInfoEXT const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkImportMemoryHostPointerInfoEXT *>( this );
|
|
}
|
|
|
|
operator VkImportMemoryHostPointerInfoEXT &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkImportMemoryHostPointerInfoEXT *>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
# if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
# else
|
|
std::
|
|
tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits const &, void * const &>
|
|
# endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( sType, pNext, handleType, pHostPointer );
|
|
}
|
|
#endif
|
|
|
|
#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
|
|
auto operator<=>( ImportMemoryHostPointerInfoEXT const & ) const = default;
|
|
#else
|
|
bool operator==( ImportMemoryHostPointerInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
# if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
# else
|
|
return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( handleType == rhs.handleType ) && ( pHostPointer == rhs.pHostPointer );
|
|
# endif
|
|
}
|
|
|
|
bool operator!=( ImportMemoryHostPointerInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eImportMemoryHostPointerInfoEXT;
|
|
const void * pNext = {};
|
|
VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType = VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits::eOpaqueFd;
|
|
void * pHostPointer = {};
|
|
};
|
|
|
|
template <>
|
|
struct CppType<StructureType, StructureType::eImportMemoryHostPointerInfoEXT>
|
|
{
|
|
using Type = ImportMemoryHostPointerInfoEXT;
|
|
};
|
|
|
|
#if defined( VK_USE_PLATFORM_SCI )
|
|
struct ImportMemorySciBufInfoNV
|
|
{
|
|
using NativeType = VkImportMemorySciBufInfoNV;
|
|
|
|
static const bool allowDuplicate = false;
|
|
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eImportMemorySciBufInfoNV;
|
|
|
|
# if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR ImportMemorySciBufInfoNV(
|
|
VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType_ = VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits::eOpaqueFd,
|
|
NvSciBufObj handle_ = {},
|
|
const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
|
|
: pNext( pNext_ )
|
|
, handleType( handleType_ )
|
|
, handle( handle_ )
|
|
{
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR ImportMemorySciBufInfoNV( ImportMemorySciBufInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
ImportMemorySciBufInfoNV( VkImportMemorySciBufInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: ImportMemorySciBufInfoNV( *reinterpret_cast<ImportMemorySciBufInfoNV const *>( &rhs ) )
|
|
{
|
|
}
|
|
|
|
ImportMemorySciBufInfoNV & operator=( ImportMemorySciBufInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
# endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
ImportMemorySciBufInfoNV & operator=( VkImportMemorySciBufInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ImportMemorySciBufInfoNV const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
# if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
|
|
VULKAN_HPP_CONSTEXPR_14 ImportMemorySciBufInfoNV & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pNext = pNext_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 ImportMemorySciBufInfoNV & setHandleType( VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
handleType = handleType_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 ImportMemorySciBufInfoNV & setHandle( NvSciBufObj handle_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
handle = handle_;
|
|
return *this;
|
|
}
|
|
# endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
|
|
|
|
operator VkImportMemorySciBufInfoNV const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkImportMemorySciBufInfoNV *>( this );
|
|
}
|
|
|
|
operator VkImportMemorySciBufInfoNV &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkImportMemorySciBufInfoNV *>( this );
|
|
}
|
|
|
|
# if defined( VULKAN_HPP_USE_REFLECT )
|
|
# if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
# else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &,
|
|
const void * const &,
|
|
VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits const &,
|
|
NvSciBufObj const &>
|
|
# endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( sType, pNext, handleType, handle );
|
|
}
|
|
# endif
|
|
|
|
# if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
|
|
std::strong_ordering operator<=>( ImportMemorySciBufInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
if ( auto cmp = sType <=> rhs.sType; cmp != 0 )
|
|
return cmp;
|
|
if ( auto cmp = pNext <=> rhs.pNext; cmp != 0 )
|
|
return cmp;
|
|
if ( auto cmp = handleType <=> rhs.handleType; cmp != 0 )
|
|
return cmp;
|
|
if ( auto cmp = memcmp( &handle, &rhs.handle, sizeof( NvSciBufObj ) ); cmp != 0 )
|
|
return ( cmp < 0 ) ? std::strong_ordering::less : std::strong_ordering::greater;
|
|
|
|
return std::strong_ordering::equivalent;
|
|
}
|
|
# endif
|
|
|
|
bool operator==( ImportMemorySciBufInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( handleType == rhs.handleType ) &&
|
|
( memcmp( &handle, &rhs.handle, sizeof( NvSciBufObj ) ) == 0 );
|
|
}
|
|
|
|
bool operator!=( ImportMemorySciBufInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eImportMemorySciBufInfoNV;
|
|
const void * pNext = {};
|
|
VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType = VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits::eOpaqueFd;
|
|
NvSciBufObj handle = {};
|
|
};
|
|
|
|
template <>
|
|
struct CppType<StructureType, StructureType::eImportMemorySciBufInfoNV>
|
|
{
|
|
using Type = ImportMemorySciBufInfoNV;
|
|
};
|
|
#endif /*VK_USE_PLATFORM_SCI*/
|
|
|
|
#if defined( VK_USE_PLATFORM_SCREEN_QNX )
|
|
struct ImportScreenBufferInfoQNX
|
|
{
|
|
using NativeType = VkImportScreenBufferInfoQNX;
|
|
|
|
static const bool allowDuplicate = false;
|
|
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eImportScreenBufferInfoQNX;
|
|
|
|
# if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR ImportScreenBufferInfoQNX( struct _screen_buffer * buffer_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
|
|
: pNext( pNext_ )
|
|
, buffer( buffer_ )
|
|
{
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR ImportScreenBufferInfoQNX( ImportScreenBufferInfoQNX const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
ImportScreenBufferInfoQNX( VkImportScreenBufferInfoQNX const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: ImportScreenBufferInfoQNX( *reinterpret_cast<ImportScreenBufferInfoQNX const *>( &rhs ) )
|
|
{
|
|
}
|
|
|
|
ImportScreenBufferInfoQNX & operator=( ImportScreenBufferInfoQNX const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
# endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
ImportScreenBufferInfoQNX & operator=( VkImportScreenBufferInfoQNX const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ImportScreenBufferInfoQNX const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
# if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
|
|
VULKAN_HPP_CONSTEXPR_14 ImportScreenBufferInfoQNX & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pNext = pNext_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 ImportScreenBufferInfoQNX & setBuffer( struct _screen_buffer * buffer_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
buffer = buffer_;
|
|
return *this;
|
|
}
|
|
# endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
|
|
|
|
operator VkImportScreenBufferInfoQNX const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkImportScreenBufferInfoQNX *>( this );
|
|
}
|
|
|
|
operator VkImportScreenBufferInfoQNX &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkImportScreenBufferInfoQNX *>( this );
|
|
}
|
|
|
|
# if defined( VULKAN_HPP_USE_REFLECT )
|
|
# if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
# else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, struct _screen_buffer * const &>
|
|
# endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( sType, pNext, buffer );
|
|
}
|
|
# endif
|
|
|
|
# if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
|
|
auto operator<=>( ImportScreenBufferInfoQNX const & ) const = default;
|
|
# else
|
|
bool operator==( ImportScreenBufferInfoQNX const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
# if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
# else
|
|
return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( buffer == rhs.buffer );
|
|
# endif
|
|
}
|
|
|
|
bool operator!=( ImportScreenBufferInfoQNX const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
# endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eImportScreenBufferInfoQNX;
|
|
const void * pNext = {};
|
|
struct _screen_buffer * buffer = {};
|
|
};
|
|
|
|
template <>
|
|
struct CppType<StructureType, StructureType::eImportScreenBufferInfoQNX>
|
|
{
|
|
using Type = ImportScreenBufferInfoQNX;
|
|
};
|
|
#endif /*VK_USE_PLATFORM_SCREEN_QNX*/
|
|
|
|
struct ImportSemaphoreFdInfoKHR
|
|
{
|
|
using NativeType = VkImportSemaphoreFdInfoKHR;
|
|
|
|
static const bool allowDuplicate = false;
|
|
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eImportSemaphoreFdInfoKHR;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR ImportSemaphoreFdInfoKHR(
|
|
VULKAN_HPP_NAMESPACE::Semaphore semaphore_ = {},
|
|
VULKAN_HPP_NAMESPACE::SemaphoreImportFlags flags_ = {},
|
|
VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlagBits handleType_ = VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlagBits::eOpaqueFd,
|
|
int fd_ = {},
|
|
const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
|
|
: pNext( pNext_ )
|
|
, semaphore( semaphore_ )
|
|
, flags( flags_ )
|
|
, handleType( handleType_ )
|
|
, fd( fd_ )
|
|
{
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR ImportSemaphoreFdInfoKHR( ImportSemaphoreFdInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
ImportSemaphoreFdInfoKHR( VkImportSemaphoreFdInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: ImportSemaphoreFdInfoKHR( *reinterpret_cast<ImportSemaphoreFdInfoKHR const *>( &rhs ) )
|
|
{
|
|
}
|
|
|
|
ImportSemaphoreFdInfoKHR & operator=( ImportSemaphoreFdInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
ImportSemaphoreFdInfoKHR & operator=( VkImportSemaphoreFdInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ImportSemaphoreFdInfoKHR const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
|
|
VULKAN_HPP_CONSTEXPR_14 ImportSemaphoreFdInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pNext = pNext_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 ImportSemaphoreFdInfoKHR & setSemaphore( VULKAN_HPP_NAMESPACE::Semaphore semaphore_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
semaphore = semaphore_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 ImportSemaphoreFdInfoKHR & setFlags( VULKAN_HPP_NAMESPACE::SemaphoreImportFlags flags_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
flags = flags_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 ImportSemaphoreFdInfoKHR &
|
|
setHandleType( VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlagBits handleType_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
handleType = handleType_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 ImportSemaphoreFdInfoKHR & setFd( int fd_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
fd = fd_;
|
|
return *this;
|
|
}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
|
|
|
|
operator VkImportSemaphoreFdInfoKHR const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkImportSemaphoreFdInfoKHR *>( this );
|
|
}
|
|
|
|
operator VkImportSemaphoreFdInfoKHR &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkImportSemaphoreFdInfoKHR *>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
# if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
# else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &,
|
|
const void * const &,
|
|
VULKAN_HPP_NAMESPACE::Semaphore const &,
|
|
VULKAN_HPP_NAMESPACE::SemaphoreImportFlags const &,
|
|
VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlagBits const &,
|
|
int const &>
|
|
# endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( sType, pNext, semaphore, flags, handleType, fd );
|
|
}
|
|
#endif
|
|
|
|
#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
|
|
auto operator<=>( ImportSemaphoreFdInfoKHR const & ) const = default;
|
|
#else
|
|
bool operator==( ImportSemaphoreFdInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
# if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
# else
|
|
return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( semaphore == rhs.semaphore ) && ( flags == rhs.flags ) && ( handleType == rhs.handleType ) &&
|
|
( fd == rhs.fd );
|
|
# endif
|
|
}
|
|
|
|
bool operator!=( ImportSemaphoreFdInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eImportSemaphoreFdInfoKHR;
|
|
const void * pNext = {};
|
|
VULKAN_HPP_NAMESPACE::Semaphore semaphore = {};
|
|
VULKAN_HPP_NAMESPACE::SemaphoreImportFlags flags = {};
|
|
VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlagBits handleType = VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlagBits::eOpaqueFd;
|
|
int fd = {};
|
|
};
|
|
|
|
template <>
|
|
struct CppType<StructureType, StructureType::eImportSemaphoreFdInfoKHR>
|
|
{
|
|
using Type = ImportSemaphoreFdInfoKHR;
|
|
};
|
|
|
|
#if defined( VK_USE_PLATFORM_SCI )
|
|
struct ImportSemaphoreSciSyncInfoNV
|
|
{
|
|
using NativeType = VkImportSemaphoreSciSyncInfoNV;
|
|
|
|
static const bool allowDuplicate = false;
|
|
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eImportSemaphoreSciSyncInfoNV;
|
|
|
|
# if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR ImportSemaphoreSciSyncInfoNV(
|
|
VULKAN_HPP_NAMESPACE::Semaphore semaphore_ = {},
|
|
VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlagBits handleType_ = VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlagBits::eOpaqueFd,
|
|
void * handle_ = {},
|
|
const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
|
|
: pNext( pNext_ )
|
|
, semaphore( semaphore_ )
|
|
, handleType( handleType_ )
|
|
, handle( handle_ )
|
|
{
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR ImportSemaphoreSciSyncInfoNV( ImportSemaphoreSciSyncInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
ImportSemaphoreSciSyncInfoNV( VkImportSemaphoreSciSyncInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: ImportSemaphoreSciSyncInfoNV( *reinterpret_cast<ImportSemaphoreSciSyncInfoNV const *>( &rhs ) )
|
|
{
|
|
}
|
|
|
|
ImportSemaphoreSciSyncInfoNV & operator=( ImportSemaphoreSciSyncInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
# endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
ImportSemaphoreSciSyncInfoNV & operator=( VkImportSemaphoreSciSyncInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ImportSemaphoreSciSyncInfoNV const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
# if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
|
|
VULKAN_HPP_CONSTEXPR_14 ImportSemaphoreSciSyncInfoNV & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pNext = pNext_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 ImportSemaphoreSciSyncInfoNV & setSemaphore( VULKAN_HPP_NAMESPACE::Semaphore semaphore_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
semaphore = semaphore_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 ImportSemaphoreSciSyncInfoNV &
|
|
setHandleType( VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlagBits handleType_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
handleType = handleType_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 ImportSemaphoreSciSyncInfoNV & setHandle( void * handle_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
handle = handle_;
|
|
return *this;
|
|
}
|
|
# endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
|
|
|
|
operator VkImportSemaphoreSciSyncInfoNV const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkImportSemaphoreSciSyncInfoNV *>( this );
|
|
}
|
|
|
|
operator VkImportSemaphoreSciSyncInfoNV &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkImportSemaphoreSciSyncInfoNV *>( this );
|
|
}
|
|
|
|
# if defined( VULKAN_HPP_USE_REFLECT )
|
|
# if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
# else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &,
|
|
const void * const &,
|
|
VULKAN_HPP_NAMESPACE::Semaphore const &,
|
|
VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlagBits const &,
|
|
void * const &>
|
|
# endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( sType, pNext, semaphore, handleType, handle );
|
|
}
|
|
# endif
|
|
|
|
# if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
|
|
auto operator<=>( ImportSemaphoreSciSyncInfoNV const & ) const = default;
|
|
# else
|
|
bool operator==( ImportSemaphoreSciSyncInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
# if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
# else
|
|
return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( semaphore == rhs.semaphore ) && ( handleType == rhs.handleType ) && ( handle == rhs.handle );
|
|
# endif
|
|
}
|
|
|
|
bool operator!=( ImportSemaphoreSciSyncInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
# endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eImportSemaphoreSciSyncInfoNV;
|
|
const void * pNext = {};
|
|
VULKAN_HPP_NAMESPACE::Semaphore semaphore = {};
|
|
VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlagBits handleType = VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlagBits::eOpaqueFd;
|
|
void * handle = {};
|
|
};
|
|
|
|
template <>
|
|
struct CppType<StructureType, StructureType::eImportSemaphoreSciSyncInfoNV>
|
|
{
|
|
using Type = ImportSemaphoreSciSyncInfoNV;
|
|
};
|
|
#endif /*VK_USE_PLATFORM_SCI*/
|
|
|
|
struct InputAttachmentAspectReference
|
|
{
|
|
using NativeType = VkInputAttachmentAspectReference;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR InputAttachmentAspectReference( uint32_t subpass_ = {},
|
|
uint32_t inputAttachmentIndex_ = {},
|
|
VULKAN_HPP_NAMESPACE::ImageAspectFlags aspectMask_ = {} ) VULKAN_HPP_NOEXCEPT
|
|
: subpass( subpass_ )
|
|
, inputAttachmentIndex( inputAttachmentIndex_ )
|
|
, aspectMask( aspectMask_ )
|
|
{
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR InputAttachmentAspectReference( InputAttachmentAspectReference const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
InputAttachmentAspectReference( VkInputAttachmentAspectReference const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: InputAttachmentAspectReference( *reinterpret_cast<InputAttachmentAspectReference const *>( &rhs ) )
|
|
{
|
|
}
|
|
|
|
InputAttachmentAspectReference & operator=( InputAttachmentAspectReference const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
InputAttachmentAspectReference & operator=( VkInputAttachmentAspectReference const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::InputAttachmentAspectReference const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
|
|
VULKAN_HPP_CONSTEXPR_14 InputAttachmentAspectReference & setSubpass( uint32_t subpass_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
subpass = subpass_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 InputAttachmentAspectReference & setInputAttachmentIndex( uint32_t inputAttachmentIndex_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
inputAttachmentIndex = inputAttachmentIndex_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 InputAttachmentAspectReference & setAspectMask( VULKAN_HPP_NAMESPACE::ImageAspectFlags aspectMask_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
aspectMask = aspectMask_;
|
|
return *this;
|
|
}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
|
|
|
|
operator VkInputAttachmentAspectReference const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkInputAttachmentAspectReference *>( this );
|
|
}
|
|
|
|
operator VkInputAttachmentAspectReference &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkInputAttachmentAspectReference *>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
# if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
# else
|
|
std::tuple<uint32_t const &, uint32_t const &, VULKAN_HPP_NAMESPACE::ImageAspectFlags const &>
|
|
# endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( subpass, inputAttachmentIndex, aspectMask );
|
|
}
|
|
#endif
|
|
|
|
#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
|
|
auto operator<=>( InputAttachmentAspectReference const & ) const = default;
|
|
#else
|
|
bool operator==( InputAttachmentAspectReference const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
# if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
# else
|
|
return ( subpass == rhs.subpass ) && ( inputAttachmentIndex == rhs.inputAttachmentIndex ) && ( aspectMask == rhs.aspectMask );
|
|
# endif
|
|
}
|
|
|
|
bool operator!=( InputAttachmentAspectReference const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
uint32_t subpass = {};
|
|
uint32_t inputAttachmentIndex = {};
|
|
VULKAN_HPP_NAMESPACE::ImageAspectFlags aspectMask = {};
|
|
};
|
|
|
|
using InputAttachmentAspectReferenceKHR = InputAttachmentAspectReference;
|
|
|
|
struct InstanceCreateInfo
|
|
{
|
|
using NativeType = VkInstanceCreateInfo;
|
|
|
|
static const bool allowDuplicate = false;
|
|
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eInstanceCreateInfo;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR InstanceCreateInfo( VULKAN_HPP_NAMESPACE::InstanceCreateFlags flags_ = {},
|
|
const VULKAN_HPP_NAMESPACE::ApplicationInfo * pApplicationInfo_ = {},
|
|
uint32_t enabledLayerCount_ = {},
|
|
const char * const * ppEnabledLayerNames_ = {},
|
|
uint32_t enabledExtensionCount_ = {},
|
|
const char * const * ppEnabledExtensionNames_ = {},
|
|
const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
|
|
: pNext( pNext_ )
|
|
, flags( flags_ )
|
|
, pApplicationInfo( pApplicationInfo_ )
|
|
, enabledLayerCount( enabledLayerCount_ )
|
|
, ppEnabledLayerNames( ppEnabledLayerNames_ )
|
|
, enabledExtensionCount( enabledExtensionCount_ )
|
|
, ppEnabledExtensionNames( ppEnabledExtensionNames_ )
|
|
{
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR InstanceCreateInfo( InstanceCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
InstanceCreateInfo( VkInstanceCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT : InstanceCreateInfo( *reinterpret_cast<InstanceCreateInfo const *>( &rhs ) ) {}
|
|
|
|
# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
|
|
InstanceCreateInfo( VULKAN_HPP_NAMESPACE::InstanceCreateFlags flags_,
|
|
const VULKAN_HPP_NAMESPACE::ApplicationInfo * pApplicationInfo_,
|
|
VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const char * const> const & pEnabledLayerNames_,
|
|
VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const char * const> const & pEnabledExtensionNames_ = {},
|
|
const void * pNext_ = nullptr )
|
|
: pNext( pNext_ )
|
|
, flags( flags_ )
|
|
, pApplicationInfo( pApplicationInfo_ )
|
|
, enabledLayerCount( static_cast<uint32_t>( pEnabledLayerNames_.size() ) )
|
|
, ppEnabledLayerNames( pEnabledLayerNames_.data() )
|
|
, enabledExtensionCount( static_cast<uint32_t>( pEnabledExtensionNames_.size() ) )
|
|
, ppEnabledExtensionNames( pEnabledExtensionNames_.data() )
|
|
{
|
|
}
|
|
# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
|
|
|
|
InstanceCreateInfo & operator=( InstanceCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
InstanceCreateInfo & operator=( VkInstanceCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::InstanceCreateInfo const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
|
|
VULKAN_HPP_CONSTEXPR_14 InstanceCreateInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pNext = pNext_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 InstanceCreateInfo & setFlags( VULKAN_HPP_NAMESPACE::InstanceCreateFlags flags_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
flags = flags_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 InstanceCreateInfo & setPApplicationInfo( const VULKAN_HPP_NAMESPACE::ApplicationInfo * pApplicationInfo_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pApplicationInfo = pApplicationInfo_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 InstanceCreateInfo & setEnabledLayerCount( uint32_t enabledLayerCount_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
enabledLayerCount = enabledLayerCount_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 InstanceCreateInfo & setPpEnabledLayerNames( const char * const * ppEnabledLayerNames_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
ppEnabledLayerNames = ppEnabledLayerNames_;
|
|
return *this;
|
|
}
|
|
|
|
# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
|
|
InstanceCreateInfo &
|
|
setPEnabledLayerNames( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const char * const> const & pEnabledLayerNames_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
enabledLayerCount = static_cast<uint32_t>( pEnabledLayerNames_.size() );
|
|
ppEnabledLayerNames = pEnabledLayerNames_.data();
|
|
return *this;
|
|
}
|
|
# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 InstanceCreateInfo & setEnabledExtensionCount( uint32_t enabledExtensionCount_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
enabledExtensionCount = enabledExtensionCount_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 InstanceCreateInfo & setPpEnabledExtensionNames( const char * const * ppEnabledExtensionNames_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
ppEnabledExtensionNames = ppEnabledExtensionNames_;
|
|
return *this;
|
|
}
|
|
|
|
# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
|
|
InstanceCreateInfo &
|
|
setPEnabledExtensionNames( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const char * const> const & pEnabledExtensionNames_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
enabledExtensionCount = static_cast<uint32_t>( pEnabledExtensionNames_.size() );
|
|
ppEnabledExtensionNames = pEnabledExtensionNames_.data();
|
|
return *this;
|
|
}
|
|
# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
|
|
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
|
|
|
|
operator VkInstanceCreateInfo const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkInstanceCreateInfo *>( this );
|
|
}
|
|
|
|
operator VkInstanceCreateInfo &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkInstanceCreateInfo *>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
# if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
# else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &,
|
|
const void * const &,
|
|
VULKAN_HPP_NAMESPACE::InstanceCreateFlags const &,
|
|
const VULKAN_HPP_NAMESPACE::ApplicationInfo * const &,
|
|
uint32_t const &,
|
|
const char * const * const &,
|
|
uint32_t const &,
|
|
const char * const * const &>
|
|
# endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( sType, pNext, flags, pApplicationInfo, enabledLayerCount, ppEnabledLayerNames, enabledExtensionCount, ppEnabledExtensionNames );
|
|
}
|
|
#endif
|
|
|
|
#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
|
|
std::strong_ordering operator<=>( InstanceCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
if ( auto cmp = sType <=> rhs.sType; cmp != 0 )
|
|
return cmp;
|
|
if ( auto cmp = pNext <=> rhs.pNext; cmp != 0 )
|
|
return cmp;
|
|
if ( auto cmp = flags <=> rhs.flags; cmp != 0 )
|
|
return cmp;
|
|
if ( auto cmp = pApplicationInfo <=> rhs.pApplicationInfo; cmp != 0 )
|
|
return cmp;
|
|
if ( auto cmp = enabledLayerCount <=> rhs.enabledLayerCount; cmp != 0 )
|
|
return cmp;
|
|
for ( size_t i = 0; i < enabledLayerCount; ++i )
|
|
{
|
|
if ( ppEnabledLayerNames[i] != rhs.ppEnabledLayerNames[i] )
|
|
if ( auto cmp = strcmp( ppEnabledLayerNames[i], rhs.ppEnabledLayerNames[i] ); cmp != 0 )
|
|
return cmp < 0 ? std::strong_ordering::less : std::strong_ordering::greater;
|
|
}
|
|
if ( auto cmp = enabledExtensionCount <=> rhs.enabledExtensionCount; cmp != 0 )
|
|
return cmp;
|
|
for ( size_t i = 0; i < enabledExtensionCount; ++i )
|
|
{
|
|
if ( ppEnabledExtensionNames[i] != rhs.ppEnabledExtensionNames[i] )
|
|
if ( auto cmp = strcmp( ppEnabledExtensionNames[i], rhs.ppEnabledExtensionNames[i] ); cmp != 0 )
|
|
return cmp < 0 ? std::strong_ordering::less : std::strong_ordering::greater;
|
|
}
|
|
|
|
return std::strong_ordering::equivalent;
|
|
}
|
|
#endif
|
|
|
|
bool operator==( InstanceCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && ( pApplicationInfo == rhs.pApplicationInfo ) &&
|
|
( enabledLayerCount == rhs.enabledLayerCount ) &&
|
|
std::equal( ppEnabledLayerNames,
|
|
ppEnabledLayerNames + enabledLayerCount,
|
|
rhs.ppEnabledLayerNames,
|
|
[]( char const * left, char const * right ) { return ( left == right ) || ( strcmp( left, right ) == 0 ); } ) &&
|
|
( enabledExtensionCount == rhs.enabledExtensionCount ) &&
|
|
std::equal( ppEnabledExtensionNames,
|
|
ppEnabledExtensionNames + enabledExtensionCount,
|
|
rhs.ppEnabledExtensionNames,
|
|
[]( char const * left, char const * right ) { return ( left == right ) || ( strcmp( left, right ) == 0 ); } );
|
|
}
|
|
|
|
bool operator!=( InstanceCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eInstanceCreateInfo;
|
|
const void * pNext = {};
|
|
VULKAN_HPP_NAMESPACE::InstanceCreateFlags flags = {};
|
|
const VULKAN_HPP_NAMESPACE::ApplicationInfo * pApplicationInfo = {};
|
|
uint32_t enabledLayerCount = {};
|
|
const char * const * ppEnabledLayerNames = {};
|
|
uint32_t enabledExtensionCount = {};
|
|
const char * const * ppEnabledExtensionNames = {};
|
|
};
|
|
|
|
template <>
|
|
struct CppType<StructureType, StructureType::eInstanceCreateInfo>
|
|
{
|
|
using Type = InstanceCreateInfo;
|
|
};
|
|
|
|
struct LayerProperties
|
|
{
|
|
using NativeType = VkLayerProperties;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR_14 LayerProperties( std::array<char, VK_MAX_EXTENSION_NAME_SIZE> const & layerName_ = {},
|
|
uint32_t specVersion_ = {},
|
|
uint32_t implementationVersion_ = {},
|
|
std::array<char, VK_MAX_DESCRIPTION_SIZE> const & description_ = {} ) VULKAN_HPP_NOEXCEPT
|
|
: layerName( layerName_ )
|
|
, specVersion( specVersion_ )
|
|
, implementationVersion( implementationVersion_ )
|
|
, description( description_ )
|
|
{
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 LayerProperties( LayerProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
LayerProperties( VkLayerProperties const & rhs ) VULKAN_HPP_NOEXCEPT : LayerProperties( *reinterpret_cast<LayerProperties const *>( &rhs ) ) {}
|
|
|
|
# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
|
|
LayerProperties( std::string const & layerName_, uint32_t specVersion_ = {}, uint32_t implementationVersion_ = {}, std::string const & description_ = {} )
|
|
: specVersion( specVersion_ ), implementationVersion( implementationVersion_ )
|
|
{
|
|
VULKAN_HPP_ASSERT( layerName_.size() < VK_MAX_EXTENSION_NAME_SIZE );
|
|
# if defined( WIN32 )
|
|
strncpy_s( layerName, VK_MAX_EXTENSION_NAME_SIZE, layerName_.data(), layerName_.size() );
|
|
# else
|
|
strncpy( layerName, layerName_.data(), std::min<size_t>( VK_MAX_EXTENSION_NAME_SIZE, layerName_.size() ) );
|
|
# endif
|
|
|
|
VULKAN_HPP_ASSERT( description_.size() < VK_MAX_DESCRIPTION_SIZE );
|
|
# if defined( WIN32 )
|
|
strncpy_s( description, VK_MAX_DESCRIPTION_SIZE, description_.data(), description_.size() );
|
|
# else
|
|
strncpy( description, description_.data(), std::min<size_t>( VK_MAX_DESCRIPTION_SIZE, description_.size() ) );
|
|
# endif
|
|
}
|
|
# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
|
|
|
|
LayerProperties & operator=( LayerProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
LayerProperties & operator=( VkLayerProperties const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::LayerProperties const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
operator VkLayerProperties const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkLayerProperties *>( this );
|
|
}
|
|
|
|
operator VkLayerProperties &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkLayerProperties *>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
# if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
# else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::ArrayWrapper1D<char, VK_MAX_EXTENSION_NAME_SIZE> const &,
|
|
uint32_t const &,
|
|
uint32_t const &,
|
|
VULKAN_HPP_NAMESPACE::ArrayWrapper1D<char, VK_MAX_DESCRIPTION_SIZE> const &>
|
|
# endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( layerName, specVersion, implementationVersion, description );
|
|
}
|
|
#endif
|
|
|
|
#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
|
|
std::strong_ordering operator<=>( LayerProperties const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
if ( auto cmp = strcmp( layerName, rhs.layerName ); cmp != 0 )
|
|
return ( cmp < 0 ) ? std::strong_ordering::less : std::strong_ordering::greater;
|
|
if ( auto cmp = specVersion <=> rhs.specVersion; cmp != 0 )
|
|
return cmp;
|
|
if ( auto cmp = implementationVersion <=> rhs.implementationVersion; cmp != 0 )
|
|
return cmp;
|
|
if ( auto cmp = strcmp( description, rhs.description ); cmp != 0 )
|
|
return ( cmp < 0 ) ? std::strong_ordering::less : std::strong_ordering::greater;
|
|
|
|
return std::strong_ordering::equivalent;
|
|
}
|
|
#endif
|
|
|
|
bool operator==( LayerProperties const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return ( strcmp( layerName, rhs.layerName ) == 0 ) && ( specVersion == rhs.specVersion ) && ( implementationVersion == rhs.implementationVersion ) &&
|
|
( strcmp( description, rhs.description ) == 0 );
|
|
}
|
|
|
|
bool operator!=( LayerProperties const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::ArrayWrapper1D<char, VK_MAX_EXTENSION_NAME_SIZE> layerName = {};
|
|
uint32_t specVersion = {};
|
|
uint32_t implementationVersion = {};
|
|
VULKAN_HPP_NAMESPACE::ArrayWrapper1D<char, VK_MAX_DESCRIPTION_SIZE> description = {};
|
|
};
|
|
|
|
struct MappedMemoryRange
|
|
{
|
|
using NativeType = VkMappedMemoryRange;
|
|
|
|
static const bool allowDuplicate = false;
|
|
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eMappedMemoryRange;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR MappedMemoryRange( VULKAN_HPP_NAMESPACE::DeviceMemory memory_ = {},
|
|
VULKAN_HPP_NAMESPACE::DeviceSize offset_ = {},
|
|
VULKAN_HPP_NAMESPACE::DeviceSize size_ = {},
|
|
const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
|
|
: pNext( pNext_ )
|
|
, memory( memory_ )
|
|
, offset( offset_ )
|
|
, size( size_ )
|
|
{
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR MappedMemoryRange( MappedMemoryRange const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
MappedMemoryRange( VkMappedMemoryRange const & rhs ) VULKAN_HPP_NOEXCEPT : MappedMemoryRange( *reinterpret_cast<MappedMemoryRange const *>( &rhs ) ) {}
|
|
|
|
MappedMemoryRange & operator=( MappedMemoryRange const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
MappedMemoryRange & operator=( VkMappedMemoryRange const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::MappedMemoryRange const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
|
|
VULKAN_HPP_CONSTEXPR_14 MappedMemoryRange & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pNext = pNext_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 MappedMemoryRange & setMemory( VULKAN_HPP_NAMESPACE::DeviceMemory memory_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
memory = memory_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 MappedMemoryRange & setOffset( VULKAN_HPP_NAMESPACE::DeviceSize offset_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
offset = offset_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 MappedMemoryRange & setSize( VULKAN_HPP_NAMESPACE::DeviceSize size_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
size = size_;
|
|
return *this;
|
|
}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
|
|
|
|
operator VkMappedMemoryRange const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkMappedMemoryRange *>( this );
|
|
}
|
|
|
|
operator VkMappedMemoryRange &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkMappedMemoryRange *>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
# if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
# else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &,
|
|
const void * const &,
|
|
VULKAN_HPP_NAMESPACE::DeviceMemory const &,
|
|
VULKAN_HPP_NAMESPACE::DeviceSize const &,
|
|
VULKAN_HPP_NAMESPACE::DeviceSize const &>
|
|
# endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( sType, pNext, memory, offset, size );
|
|
}
|
|
#endif
|
|
|
|
#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
|
|
auto operator<=>( MappedMemoryRange const & ) const = default;
|
|
#else
|
|
bool operator==( MappedMemoryRange const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
# if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
# else
|
|
return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( memory == rhs.memory ) && ( offset == rhs.offset ) && ( size == rhs.size );
|
|
# endif
|
|
}
|
|
|
|
bool operator!=( MappedMemoryRange const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eMappedMemoryRange;
|
|
const void * pNext = {};
|
|
VULKAN_HPP_NAMESPACE::DeviceMemory memory = {};
|
|
VULKAN_HPP_NAMESPACE::DeviceSize offset = {};
|
|
VULKAN_HPP_NAMESPACE::DeviceSize size = {};
|
|
};
|
|
|
|
template <>
|
|
struct CppType<StructureType, StructureType::eMappedMemoryRange>
|
|
{
|
|
using Type = MappedMemoryRange;
|
|
};
|
|
|
|
struct MemoryAllocateFlagsInfo
|
|
{
|
|
using NativeType = VkMemoryAllocateFlagsInfo;
|
|
|
|
static const bool allowDuplicate = false;
|
|
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eMemoryAllocateFlagsInfo;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR MemoryAllocateFlagsInfo( VULKAN_HPP_NAMESPACE::MemoryAllocateFlags flags_ = {},
|
|
uint32_t deviceMask_ = {},
|
|
const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
|
|
: pNext( pNext_ )
|
|
, flags( flags_ )
|
|
, deviceMask( deviceMask_ )
|
|
{
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR MemoryAllocateFlagsInfo( MemoryAllocateFlagsInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
MemoryAllocateFlagsInfo( VkMemoryAllocateFlagsInfo const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: MemoryAllocateFlagsInfo( *reinterpret_cast<MemoryAllocateFlagsInfo const *>( &rhs ) )
|
|
{
|
|
}
|
|
|
|
MemoryAllocateFlagsInfo & operator=( MemoryAllocateFlagsInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
MemoryAllocateFlagsInfo & operator=( VkMemoryAllocateFlagsInfo const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::MemoryAllocateFlagsInfo const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
|
|
VULKAN_HPP_CONSTEXPR_14 MemoryAllocateFlagsInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pNext = pNext_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 MemoryAllocateFlagsInfo & setFlags( VULKAN_HPP_NAMESPACE::MemoryAllocateFlags flags_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
flags = flags_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 MemoryAllocateFlagsInfo & setDeviceMask( uint32_t deviceMask_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
deviceMask = deviceMask_;
|
|
return *this;
|
|
}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
|
|
|
|
operator VkMemoryAllocateFlagsInfo const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkMemoryAllocateFlagsInfo *>( this );
|
|
}
|
|
|
|
operator VkMemoryAllocateFlagsInfo &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkMemoryAllocateFlagsInfo *>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
# if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
# else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::MemoryAllocateFlags const &, uint32_t const &>
|
|
# endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( sType, pNext, flags, deviceMask );
|
|
}
|
|
#endif
|
|
|
|
#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
|
|
auto operator<=>( MemoryAllocateFlagsInfo const & ) const = default;
|
|
#else
|
|
bool operator==( MemoryAllocateFlagsInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
# if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
# else
|
|
return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && ( deviceMask == rhs.deviceMask );
|
|
# endif
|
|
}
|
|
|
|
bool operator!=( MemoryAllocateFlagsInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eMemoryAllocateFlagsInfo;
|
|
const void * pNext = {};
|
|
VULKAN_HPP_NAMESPACE::MemoryAllocateFlags flags = {};
|
|
uint32_t deviceMask = {};
|
|
};
|
|
|
|
template <>
|
|
struct CppType<StructureType, StructureType::eMemoryAllocateFlagsInfo>
|
|
{
|
|
using Type = MemoryAllocateFlagsInfo;
|
|
};
|
|
|
|
using MemoryAllocateFlagsInfoKHR = MemoryAllocateFlagsInfo;
|
|
|
|
struct MemoryAllocateInfo
|
|
{
|
|
using NativeType = VkMemoryAllocateInfo;
|
|
|
|
static const bool allowDuplicate = false;
|
|
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eMemoryAllocateInfo;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR MemoryAllocateInfo( VULKAN_HPP_NAMESPACE::DeviceSize allocationSize_ = {},
|
|
uint32_t memoryTypeIndex_ = {},
|
|
const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
|
|
: pNext( pNext_ )
|
|
, allocationSize( allocationSize_ )
|
|
, memoryTypeIndex( memoryTypeIndex_ )
|
|
{
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR MemoryAllocateInfo( MemoryAllocateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
MemoryAllocateInfo( VkMemoryAllocateInfo const & rhs ) VULKAN_HPP_NOEXCEPT : MemoryAllocateInfo( *reinterpret_cast<MemoryAllocateInfo const *>( &rhs ) ) {}
|
|
|
|
MemoryAllocateInfo & operator=( MemoryAllocateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
MemoryAllocateInfo & operator=( VkMemoryAllocateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::MemoryAllocateInfo const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
|
|
VULKAN_HPP_CONSTEXPR_14 MemoryAllocateInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pNext = pNext_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 MemoryAllocateInfo & setAllocationSize( VULKAN_HPP_NAMESPACE::DeviceSize allocationSize_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
allocationSize = allocationSize_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 MemoryAllocateInfo & setMemoryTypeIndex( uint32_t memoryTypeIndex_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
memoryTypeIndex = memoryTypeIndex_;
|
|
return *this;
|
|
}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
|
|
|
|
operator VkMemoryAllocateInfo const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkMemoryAllocateInfo *>( this );
|
|
}
|
|
|
|
operator VkMemoryAllocateInfo &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkMemoryAllocateInfo *>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
# if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
# else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::DeviceSize const &, uint32_t const &>
|
|
# endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( sType, pNext, allocationSize, memoryTypeIndex );
|
|
}
|
|
#endif
|
|
|
|
#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
|
|
auto operator<=>( MemoryAllocateInfo const & ) const = default;
|
|
#else
|
|
bool operator==( MemoryAllocateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
# if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
# else
|
|
return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( allocationSize == rhs.allocationSize ) && ( memoryTypeIndex == rhs.memoryTypeIndex );
|
|
# endif
|
|
}
|
|
|
|
bool operator!=( MemoryAllocateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eMemoryAllocateInfo;
|
|
const void * pNext = {};
|
|
VULKAN_HPP_NAMESPACE::DeviceSize allocationSize = {};
|
|
uint32_t memoryTypeIndex = {};
|
|
};
|
|
|
|
template <>
|
|
struct CppType<StructureType, StructureType::eMemoryAllocateInfo>
|
|
{
|
|
using Type = MemoryAllocateInfo;
|
|
};
|
|
|
|
struct MemoryBarrier
|
|
{
|
|
using NativeType = VkMemoryBarrier;
|
|
|
|
static const bool allowDuplicate = false;
|
|
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eMemoryBarrier;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR MemoryBarrier( VULKAN_HPP_NAMESPACE::AccessFlags srcAccessMask_ = {},
|
|
VULKAN_HPP_NAMESPACE::AccessFlags dstAccessMask_ = {},
|
|
const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
|
|
: pNext( pNext_ )
|
|
, srcAccessMask( srcAccessMask_ )
|
|
, dstAccessMask( dstAccessMask_ )
|
|
{
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR MemoryBarrier( MemoryBarrier const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
MemoryBarrier( VkMemoryBarrier const & rhs ) VULKAN_HPP_NOEXCEPT : MemoryBarrier( *reinterpret_cast<MemoryBarrier const *>( &rhs ) ) {}
|
|
|
|
MemoryBarrier & operator=( MemoryBarrier const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
MemoryBarrier & operator=( VkMemoryBarrier const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::MemoryBarrier const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
|
|
VULKAN_HPP_CONSTEXPR_14 MemoryBarrier & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pNext = pNext_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 MemoryBarrier & setSrcAccessMask( VULKAN_HPP_NAMESPACE::AccessFlags srcAccessMask_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
srcAccessMask = srcAccessMask_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 MemoryBarrier & setDstAccessMask( VULKAN_HPP_NAMESPACE::AccessFlags dstAccessMask_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
dstAccessMask = dstAccessMask_;
|
|
return *this;
|
|
}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
|
|
|
|
operator VkMemoryBarrier const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkMemoryBarrier *>( this );
|
|
}
|
|
|
|
operator VkMemoryBarrier &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkMemoryBarrier *>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
# if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
# else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &,
|
|
const void * const &,
|
|
VULKAN_HPP_NAMESPACE::AccessFlags const &,
|
|
VULKAN_HPP_NAMESPACE::AccessFlags const &>
|
|
# endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( sType, pNext, srcAccessMask, dstAccessMask );
|
|
}
|
|
#endif
|
|
|
|
#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
|
|
auto operator<=>( MemoryBarrier const & ) const = default;
|
|
#else
|
|
bool operator==( MemoryBarrier const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
# if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
# else
|
|
return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( srcAccessMask == rhs.srcAccessMask ) && ( dstAccessMask == rhs.dstAccessMask );
|
|
# endif
|
|
}
|
|
|
|
bool operator!=( MemoryBarrier const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eMemoryBarrier;
|
|
const void * pNext = {};
|
|
VULKAN_HPP_NAMESPACE::AccessFlags srcAccessMask = {};
|
|
VULKAN_HPP_NAMESPACE::AccessFlags dstAccessMask = {};
|
|
};
|
|
|
|
template <>
|
|
struct CppType<StructureType, StructureType::eMemoryBarrier>
|
|
{
|
|
using Type = MemoryBarrier;
|
|
};
|
|
|
|
struct MemoryDedicatedAllocateInfo
|
|
{
|
|
using NativeType = VkMemoryDedicatedAllocateInfo;
|
|
|
|
static const bool allowDuplicate = false;
|
|
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eMemoryDedicatedAllocateInfo;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR MemoryDedicatedAllocateInfo( VULKAN_HPP_NAMESPACE::Image image_ = {},
|
|
VULKAN_HPP_NAMESPACE::Buffer buffer_ = {},
|
|
const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
|
|
: pNext( pNext_ )
|
|
, image( image_ )
|
|
, buffer( buffer_ )
|
|
{
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR MemoryDedicatedAllocateInfo( MemoryDedicatedAllocateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
MemoryDedicatedAllocateInfo( VkMemoryDedicatedAllocateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: MemoryDedicatedAllocateInfo( *reinterpret_cast<MemoryDedicatedAllocateInfo const *>( &rhs ) )
|
|
{
|
|
}
|
|
|
|
MemoryDedicatedAllocateInfo & operator=( MemoryDedicatedAllocateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
MemoryDedicatedAllocateInfo & operator=( VkMemoryDedicatedAllocateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::MemoryDedicatedAllocateInfo const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
|
|
VULKAN_HPP_CONSTEXPR_14 MemoryDedicatedAllocateInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pNext = pNext_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 MemoryDedicatedAllocateInfo & setImage( VULKAN_HPP_NAMESPACE::Image image_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
image = image_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 MemoryDedicatedAllocateInfo & setBuffer( VULKAN_HPP_NAMESPACE::Buffer buffer_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
buffer = buffer_;
|
|
return *this;
|
|
}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
|
|
|
|
operator VkMemoryDedicatedAllocateInfo const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkMemoryDedicatedAllocateInfo *>( this );
|
|
}
|
|
|
|
operator VkMemoryDedicatedAllocateInfo &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkMemoryDedicatedAllocateInfo *>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
# if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
# else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::Image const &, VULKAN_HPP_NAMESPACE::Buffer const &>
|
|
# endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( sType, pNext, image, buffer );
|
|
}
|
|
#endif
|
|
|
|
#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
|
|
auto operator<=>( MemoryDedicatedAllocateInfo const & ) const = default;
|
|
#else
|
|
bool operator==( MemoryDedicatedAllocateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
# if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
# else
|
|
return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( image == rhs.image ) && ( buffer == rhs.buffer );
|
|
# endif
|
|
}
|
|
|
|
bool operator!=( MemoryDedicatedAllocateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eMemoryDedicatedAllocateInfo;
|
|
const void * pNext = {};
|
|
VULKAN_HPP_NAMESPACE::Image image = {};
|
|
VULKAN_HPP_NAMESPACE::Buffer buffer = {};
|
|
};
|
|
|
|
template <>
|
|
struct CppType<StructureType, StructureType::eMemoryDedicatedAllocateInfo>
|
|
{
|
|
using Type = MemoryDedicatedAllocateInfo;
|
|
};
|
|
|
|
using MemoryDedicatedAllocateInfoKHR = MemoryDedicatedAllocateInfo;
|
|
|
|
struct MemoryDedicatedRequirements
|
|
{
|
|
using NativeType = VkMemoryDedicatedRequirements;
|
|
|
|
static const bool allowDuplicate = false;
|
|
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eMemoryDedicatedRequirements;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR MemoryDedicatedRequirements( VULKAN_HPP_NAMESPACE::Bool32 prefersDedicatedAllocation_ = {},
|
|
VULKAN_HPP_NAMESPACE::Bool32 requiresDedicatedAllocation_ = {},
|
|
void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
|
|
: pNext( pNext_ )
|
|
, prefersDedicatedAllocation( prefersDedicatedAllocation_ )
|
|
, requiresDedicatedAllocation( requiresDedicatedAllocation_ )
|
|
{
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR MemoryDedicatedRequirements( MemoryDedicatedRequirements const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
MemoryDedicatedRequirements( VkMemoryDedicatedRequirements const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: MemoryDedicatedRequirements( *reinterpret_cast<MemoryDedicatedRequirements const *>( &rhs ) )
|
|
{
|
|
}
|
|
|
|
MemoryDedicatedRequirements & operator=( MemoryDedicatedRequirements const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
MemoryDedicatedRequirements & operator=( VkMemoryDedicatedRequirements const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::MemoryDedicatedRequirements const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
operator VkMemoryDedicatedRequirements const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkMemoryDedicatedRequirements *>( this );
|
|
}
|
|
|
|
operator VkMemoryDedicatedRequirements &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkMemoryDedicatedRequirements *>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
# if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
# else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &>
|
|
# endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( sType, pNext, prefersDedicatedAllocation, requiresDedicatedAllocation );
|
|
}
|
|
#endif
|
|
|
|
#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
|
|
auto operator<=>( MemoryDedicatedRequirements const & ) const = default;
|
|
#else
|
|
bool operator==( MemoryDedicatedRequirements const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
# if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
# else
|
|
return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( prefersDedicatedAllocation == rhs.prefersDedicatedAllocation ) &&
|
|
( requiresDedicatedAllocation == rhs.requiresDedicatedAllocation );
|
|
# endif
|
|
}
|
|
|
|
bool operator!=( MemoryDedicatedRequirements const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eMemoryDedicatedRequirements;
|
|
void * pNext = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 prefersDedicatedAllocation = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 requiresDedicatedAllocation = {};
|
|
};
|
|
|
|
template <>
|
|
struct CppType<StructureType, StructureType::eMemoryDedicatedRequirements>
|
|
{
|
|
using Type = MemoryDedicatedRequirements;
|
|
};
|
|
|
|
using MemoryDedicatedRequirementsKHR = MemoryDedicatedRequirements;
|
|
|
|
struct MemoryFdPropertiesKHR
|
|
{
|
|
using NativeType = VkMemoryFdPropertiesKHR;
|
|
|
|
static const bool allowDuplicate = false;
|
|
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eMemoryFdPropertiesKHR;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR MemoryFdPropertiesKHR( uint32_t memoryTypeBits_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
|
|
: pNext( pNext_ )
|
|
, memoryTypeBits( memoryTypeBits_ )
|
|
{
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR MemoryFdPropertiesKHR( MemoryFdPropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
MemoryFdPropertiesKHR( VkMemoryFdPropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: MemoryFdPropertiesKHR( *reinterpret_cast<MemoryFdPropertiesKHR const *>( &rhs ) )
|
|
{
|
|
}
|
|
|
|
MemoryFdPropertiesKHR & operator=( MemoryFdPropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
MemoryFdPropertiesKHR & operator=( VkMemoryFdPropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::MemoryFdPropertiesKHR const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
operator VkMemoryFdPropertiesKHR const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkMemoryFdPropertiesKHR *>( this );
|
|
}
|
|
|
|
operator VkMemoryFdPropertiesKHR &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkMemoryFdPropertiesKHR *>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
# if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
# else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, uint32_t const &>
|
|
# endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( sType, pNext, memoryTypeBits );
|
|
}
|
|
#endif
|
|
|
|
#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
|
|
auto operator<=>( MemoryFdPropertiesKHR const & ) const = default;
|
|
#else
|
|
bool operator==( MemoryFdPropertiesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
# if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
# else
|
|
return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( memoryTypeBits == rhs.memoryTypeBits );
|
|
# endif
|
|
}
|
|
|
|
bool operator!=( MemoryFdPropertiesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eMemoryFdPropertiesKHR;
|
|
void * pNext = {};
|
|
uint32_t memoryTypeBits = {};
|
|
};
|
|
|
|
template <>
|
|
struct CppType<StructureType, StructureType::eMemoryFdPropertiesKHR>
|
|
{
|
|
using Type = MemoryFdPropertiesKHR;
|
|
};
|
|
|
|
struct MemoryGetFdInfoKHR
|
|
{
|
|
using NativeType = VkMemoryGetFdInfoKHR;
|
|
|
|
static const bool allowDuplicate = false;
|
|
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eMemoryGetFdInfoKHR;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR MemoryGetFdInfoKHR(
|
|
VULKAN_HPP_NAMESPACE::DeviceMemory memory_ = {},
|
|
VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType_ = VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits::eOpaqueFd,
|
|
const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
|
|
: pNext( pNext_ )
|
|
, memory( memory_ )
|
|
, handleType( handleType_ )
|
|
{
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR MemoryGetFdInfoKHR( MemoryGetFdInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
MemoryGetFdInfoKHR( VkMemoryGetFdInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT : MemoryGetFdInfoKHR( *reinterpret_cast<MemoryGetFdInfoKHR const *>( &rhs ) ) {}
|
|
|
|
MemoryGetFdInfoKHR & operator=( MemoryGetFdInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
MemoryGetFdInfoKHR & operator=( VkMemoryGetFdInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::MemoryGetFdInfoKHR const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
|
|
VULKAN_HPP_CONSTEXPR_14 MemoryGetFdInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pNext = pNext_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 MemoryGetFdInfoKHR & setMemory( VULKAN_HPP_NAMESPACE::DeviceMemory memory_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
memory = memory_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 MemoryGetFdInfoKHR & setHandleType( VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
handleType = handleType_;
|
|
return *this;
|
|
}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
|
|
|
|
operator VkMemoryGetFdInfoKHR const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkMemoryGetFdInfoKHR *>( this );
|
|
}
|
|
|
|
operator VkMemoryGetFdInfoKHR &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkMemoryGetFdInfoKHR *>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
# if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
# else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &,
|
|
const void * const &,
|
|
VULKAN_HPP_NAMESPACE::DeviceMemory const &,
|
|
VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits const &>
|
|
# endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( sType, pNext, memory, handleType );
|
|
}
|
|
#endif
|
|
|
|
#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
|
|
auto operator<=>( MemoryGetFdInfoKHR const & ) const = default;
|
|
#else
|
|
bool operator==( MemoryGetFdInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
# if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
# else
|
|
return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( memory == rhs.memory ) && ( handleType == rhs.handleType );
|
|
# endif
|
|
}
|
|
|
|
bool operator!=( MemoryGetFdInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eMemoryGetFdInfoKHR;
|
|
const void * pNext = {};
|
|
VULKAN_HPP_NAMESPACE::DeviceMemory memory = {};
|
|
VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType = VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits::eOpaqueFd;
|
|
};
|
|
|
|
template <>
|
|
struct CppType<StructureType, StructureType::eMemoryGetFdInfoKHR>
|
|
{
|
|
using Type = MemoryGetFdInfoKHR;
|
|
};
|
|
|
|
#if defined( VK_USE_PLATFORM_SCI )
|
|
struct MemoryGetSciBufInfoNV
|
|
{
|
|
using NativeType = VkMemoryGetSciBufInfoNV;
|
|
|
|
static const bool allowDuplicate = false;
|
|
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eMemoryGetSciBufInfoNV;
|
|
|
|
# if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR MemoryGetSciBufInfoNV(
|
|
VULKAN_HPP_NAMESPACE::DeviceMemory memory_ = {},
|
|
VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType_ = VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits::eOpaqueFd,
|
|
const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
|
|
: pNext( pNext_ )
|
|
, memory( memory_ )
|
|
, handleType( handleType_ )
|
|
{
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR MemoryGetSciBufInfoNV( MemoryGetSciBufInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
MemoryGetSciBufInfoNV( VkMemoryGetSciBufInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: MemoryGetSciBufInfoNV( *reinterpret_cast<MemoryGetSciBufInfoNV const *>( &rhs ) )
|
|
{
|
|
}
|
|
|
|
MemoryGetSciBufInfoNV & operator=( MemoryGetSciBufInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
# endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
MemoryGetSciBufInfoNV & operator=( VkMemoryGetSciBufInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::MemoryGetSciBufInfoNV const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
# if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
|
|
VULKAN_HPP_CONSTEXPR_14 MemoryGetSciBufInfoNV & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pNext = pNext_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 MemoryGetSciBufInfoNV & setMemory( VULKAN_HPP_NAMESPACE::DeviceMemory memory_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
memory = memory_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 MemoryGetSciBufInfoNV & setHandleType( VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
handleType = handleType_;
|
|
return *this;
|
|
}
|
|
# endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
|
|
|
|
operator VkMemoryGetSciBufInfoNV const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkMemoryGetSciBufInfoNV *>( this );
|
|
}
|
|
|
|
operator VkMemoryGetSciBufInfoNV &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkMemoryGetSciBufInfoNV *>( this );
|
|
}
|
|
|
|
# if defined( VULKAN_HPP_USE_REFLECT )
|
|
# if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
# else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &,
|
|
const void * const &,
|
|
VULKAN_HPP_NAMESPACE::DeviceMemory const &,
|
|
VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits const &>
|
|
# endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( sType, pNext, memory, handleType );
|
|
}
|
|
# endif
|
|
|
|
# if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
|
|
auto operator<=>( MemoryGetSciBufInfoNV const & ) const = default;
|
|
# else
|
|
bool operator==( MemoryGetSciBufInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
# if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
# else
|
|
return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( memory == rhs.memory ) && ( handleType == rhs.handleType );
|
|
# endif
|
|
}
|
|
|
|
bool operator!=( MemoryGetSciBufInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
# endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eMemoryGetSciBufInfoNV;
|
|
const void * pNext = {};
|
|
VULKAN_HPP_NAMESPACE::DeviceMemory memory = {};
|
|
VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType = VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits::eOpaqueFd;
|
|
};
|
|
|
|
template <>
|
|
struct CppType<StructureType, StructureType::eMemoryGetSciBufInfoNV>
|
|
{
|
|
using Type = MemoryGetSciBufInfoNV;
|
|
};
|
|
#endif /*VK_USE_PLATFORM_SCI*/
|
|
|
|
struct MemoryHeap
|
|
{
|
|
using NativeType = VkMemoryHeap;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR MemoryHeap( VULKAN_HPP_NAMESPACE::DeviceSize size_ = {}, VULKAN_HPP_NAMESPACE::MemoryHeapFlags flags_ = {} ) VULKAN_HPP_NOEXCEPT
|
|
: size( size_ )
|
|
, flags( flags_ )
|
|
{
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR MemoryHeap( MemoryHeap const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
MemoryHeap( VkMemoryHeap const & rhs ) VULKAN_HPP_NOEXCEPT : MemoryHeap( *reinterpret_cast<MemoryHeap const *>( &rhs ) ) {}
|
|
|
|
MemoryHeap & operator=( MemoryHeap const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
MemoryHeap & operator=( VkMemoryHeap const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::MemoryHeap const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
operator VkMemoryHeap const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkMemoryHeap *>( this );
|
|
}
|
|
|
|
operator VkMemoryHeap &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkMemoryHeap *>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
# if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
# else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::DeviceSize const &, VULKAN_HPP_NAMESPACE::MemoryHeapFlags const &>
|
|
# endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( size, flags );
|
|
}
|
|
#endif
|
|
|
|
#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
|
|
auto operator<=>( MemoryHeap const & ) const = default;
|
|
#else
|
|
bool operator==( MemoryHeap const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
# if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
# else
|
|
return ( size == rhs.size ) && ( flags == rhs.flags );
|
|
# endif
|
|
}
|
|
|
|
bool operator!=( MemoryHeap const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::DeviceSize size = {};
|
|
VULKAN_HPP_NAMESPACE::MemoryHeapFlags flags = {};
|
|
};
|
|
|
|
struct MemoryHostPointerPropertiesEXT
|
|
{
|
|
using NativeType = VkMemoryHostPointerPropertiesEXT;
|
|
|
|
static const bool allowDuplicate = false;
|
|
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eMemoryHostPointerPropertiesEXT;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR MemoryHostPointerPropertiesEXT( uint32_t memoryTypeBits_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
|
|
: pNext( pNext_ )
|
|
, memoryTypeBits( memoryTypeBits_ )
|
|
{
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR MemoryHostPointerPropertiesEXT( MemoryHostPointerPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
MemoryHostPointerPropertiesEXT( VkMemoryHostPointerPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: MemoryHostPointerPropertiesEXT( *reinterpret_cast<MemoryHostPointerPropertiesEXT const *>( &rhs ) )
|
|
{
|
|
}
|
|
|
|
MemoryHostPointerPropertiesEXT & operator=( MemoryHostPointerPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
MemoryHostPointerPropertiesEXT & operator=( VkMemoryHostPointerPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::MemoryHostPointerPropertiesEXT const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
operator VkMemoryHostPointerPropertiesEXT const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkMemoryHostPointerPropertiesEXT *>( this );
|
|
}
|
|
|
|
operator VkMemoryHostPointerPropertiesEXT &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkMemoryHostPointerPropertiesEXT *>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
# if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
# else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, uint32_t const &>
|
|
# endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( sType, pNext, memoryTypeBits );
|
|
}
|
|
#endif
|
|
|
|
#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
|
|
auto operator<=>( MemoryHostPointerPropertiesEXT const & ) const = default;
|
|
#else
|
|
bool operator==( MemoryHostPointerPropertiesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
# if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
# else
|
|
return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( memoryTypeBits == rhs.memoryTypeBits );
|
|
# endif
|
|
}
|
|
|
|
bool operator!=( MemoryHostPointerPropertiesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eMemoryHostPointerPropertiesEXT;
|
|
void * pNext = {};
|
|
uint32_t memoryTypeBits = {};
|
|
};
|
|
|
|
template <>
|
|
struct CppType<StructureType, StructureType::eMemoryHostPointerPropertiesEXT>
|
|
{
|
|
using Type = MemoryHostPointerPropertiesEXT;
|
|
};
|
|
|
|
struct MemoryOpaqueCaptureAddressAllocateInfo
|
|
{
|
|
using NativeType = VkMemoryOpaqueCaptureAddressAllocateInfo;
|
|
|
|
static const bool allowDuplicate = false;
|
|
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eMemoryOpaqueCaptureAddressAllocateInfo;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR MemoryOpaqueCaptureAddressAllocateInfo( uint64_t opaqueCaptureAddress_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
|
|
: pNext( pNext_ )
|
|
, opaqueCaptureAddress( opaqueCaptureAddress_ )
|
|
{
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR MemoryOpaqueCaptureAddressAllocateInfo( MemoryOpaqueCaptureAddressAllocateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
MemoryOpaqueCaptureAddressAllocateInfo( VkMemoryOpaqueCaptureAddressAllocateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: MemoryOpaqueCaptureAddressAllocateInfo( *reinterpret_cast<MemoryOpaqueCaptureAddressAllocateInfo const *>( &rhs ) )
|
|
{
|
|
}
|
|
|
|
MemoryOpaqueCaptureAddressAllocateInfo & operator=( MemoryOpaqueCaptureAddressAllocateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
MemoryOpaqueCaptureAddressAllocateInfo & operator=( VkMemoryOpaqueCaptureAddressAllocateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::MemoryOpaqueCaptureAddressAllocateInfo const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
|
|
VULKAN_HPP_CONSTEXPR_14 MemoryOpaqueCaptureAddressAllocateInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pNext = pNext_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 MemoryOpaqueCaptureAddressAllocateInfo & setOpaqueCaptureAddress( uint64_t opaqueCaptureAddress_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
opaqueCaptureAddress = opaqueCaptureAddress_;
|
|
return *this;
|
|
}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
|
|
|
|
operator VkMemoryOpaqueCaptureAddressAllocateInfo const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkMemoryOpaqueCaptureAddressAllocateInfo *>( this );
|
|
}
|
|
|
|
operator VkMemoryOpaqueCaptureAddressAllocateInfo &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkMemoryOpaqueCaptureAddressAllocateInfo *>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
# if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
# else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, uint64_t const &>
|
|
# endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( sType, pNext, opaqueCaptureAddress );
|
|
}
|
|
#endif
|
|
|
|
#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
|
|
auto operator<=>( MemoryOpaqueCaptureAddressAllocateInfo const & ) const = default;
|
|
#else
|
|
bool operator==( MemoryOpaqueCaptureAddressAllocateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
# if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
# else
|
|
return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( opaqueCaptureAddress == rhs.opaqueCaptureAddress );
|
|
# endif
|
|
}
|
|
|
|
bool operator!=( MemoryOpaqueCaptureAddressAllocateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eMemoryOpaqueCaptureAddressAllocateInfo;
|
|
const void * pNext = {};
|
|
uint64_t opaqueCaptureAddress = {};
|
|
};
|
|
|
|
template <>
|
|
struct CppType<StructureType, StructureType::eMemoryOpaqueCaptureAddressAllocateInfo>
|
|
{
|
|
using Type = MemoryOpaqueCaptureAddressAllocateInfo;
|
|
};
|
|
|
|
using MemoryOpaqueCaptureAddressAllocateInfoKHR = MemoryOpaqueCaptureAddressAllocateInfo;
|
|
|
|
struct MemoryRequirements
|
|
{
|
|
using NativeType = VkMemoryRequirements;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR MemoryRequirements( VULKAN_HPP_NAMESPACE::DeviceSize size_ = {},
|
|
VULKAN_HPP_NAMESPACE::DeviceSize alignment_ = {},
|
|
uint32_t memoryTypeBits_ = {} ) VULKAN_HPP_NOEXCEPT
|
|
: size( size_ )
|
|
, alignment( alignment_ )
|
|
, memoryTypeBits( memoryTypeBits_ )
|
|
{
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR MemoryRequirements( MemoryRequirements const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
MemoryRequirements( VkMemoryRequirements const & rhs ) VULKAN_HPP_NOEXCEPT : MemoryRequirements( *reinterpret_cast<MemoryRequirements const *>( &rhs ) ) {}
|
|
|
|
MemoryRequirements & operator=( MemoryRequirements const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
MemoryRequirements & operator=( VkMemoryRequirements const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::MemoryRequirements const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
operator VkMemoryRequirements const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkMemoryRequirements *>( this );
|
|
}
|
|
|
|
operator VkMemoryRequirements &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkMemoryRequirements *>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
# if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
# else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::DeviceSize const &, VULKAN_HPP_NAMESPACE::DeviceSize const &, uint32_t const &>
|
|
# endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( size, alignment, memoryTypeBits );
|
|
}
|
|
#endif
|
|
|
|
#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
|
|
auto operator<=>( MemoryRequirements const & ) const = default;
|
|
#else
|
|
bool operator==( MemoryRequirements const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
# if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
# else
|
|
return ( size == rhs.size ) && ( alignment == rhs.alignment ) && ( memoryTypeBits == rhs.memoryTypeBits );
|
|
# endif
|
|
}
|
|
|
|
bool operator!=( MemoryRequirements const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::DeviceSize size = {};
|
|
VULKAN_HPP_NAMESPACE::DeviceSize alignment = {};
|
|
uint32_t memoryTypeBits = {};
|
|
};
|
|
|
|
struct MemoryRequirements2
|
|
{
|
|
using NativeType = VkMemoryRequirements2;
|
|
|
|
static const bool allowDuplicate = false;
|
|
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eMemoryRequirements2;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR MemoryRequirements2( VULKAN_HPP_NAMESPACE::MemoryRequirements memoryRequirements_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
|
|
: pNext( pNext_ )
|
|
, memoryRequirements( memoryRequirements_ )
|
|
{
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR MemoryRequirements2( MemoryRequirements2 const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
MemoryRequirements2( VkMemoryRequirements2 const & rhs ) VULKAN_HPP_NOEXCEPT : MemoryRequirements2( *reinterpret_cast<MemoryRequirements2 const *>( &rhs ) )
|
|
{
|
|
}
|
|
|
|
MemoryRequirements2 & operator=( MemoryRequirements2 const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
MemoryRequirements2 & operator=( VkMemoryRequirements2 const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::MemoryRequirements2 const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
operator VkMemoryRequirements2 const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkMemoryRequirements2 *>( this );
|
|
}
|
|
|
|
operator VkMemoryRequirements2 &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkMemoryRequirements2 *>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
# if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
# else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::MemoryRequirements const &>
|
|
# endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( sType, pNext, memoryRequirements );
|
|
}
|
|
#endif
|
|
|
|
#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
|
|
auto operator<=>( MemoryRequirements2 const & ) const = default;
|
|
#else
|
|
bool operator==( MemoryRequirements2 const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
# if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
# else
|
|
return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( memoryRequirements == rhs.memoryRequirements );
|
|
# endif
|
|
}
|
|
|
|
bool operator!=( MemoryRequirements2 const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eMemoryRequirements2;
|
|
void * pNext = {};
|
|
VULKAN_HPP_NAMESPACE::MemoryRequirements memoryRequirements = {};
|
|
};
|
|
|
|
template <>
|
|
struct CppType<StructureType, StructureType::eMemoryRequirements2>
|
|
{
|
|
using Type = MemoryRequirements2;
|
|
};
|
|
|
|
using MemoryRequirements2KHR = MemoryRequirements2;
|
|
|
|
#if defined( VK_USE_PLATFORM_SCI )
|
|
struct MemorySciBufPropertiesNV
|
|
{
|
|
using NativeType = VkMemorySciBufPropertiesNV;
|
|
|
|
static const bool allowDuplicate = false;
|
|
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eMemorySciBufPropertiesNV;
|
|
|
|
# if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR MemorySciBufPropertiesNV( uint32_t memoryTypeBits_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
|
|
: pNext( pNext_ )
|
|
, memoryTypeBits( memoryTypeBits_ )
|
|
{
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR MemorySciBufPropertiesNV( MemorySciBufPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
MemorySciBufPropertiesNV( VkMemorySciBufPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: MemorySciBufPropertiesNV( *reinterpret_cast<MemorySciBufPropertiesNV const *>( &rhs ) )
|
|
{
|
|
}
|
|
|
|
MemorySciBufPropertiesNV & operator=( MemorySciBufPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
# endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
MemorySciBufPropertiesNV & operator=( VkMemorySciBufPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::MemorySciBufPropertiesNV const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
# if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
|
|
VULKAN_HPP_CONSTEXPR_14 MemorySciBufPropertiesNV & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pNext = pNext_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 MemorySciBufPropertiesNV & setMemoryTypeBits( uint32_t memoryTypeBits_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
memoryTypeBits = memoryTypeBits_;
|
|
return *this;
|
|
}
|
|
# endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
|
|
|
|
operator VkMemorySciBufPropertiesNV const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkMemorySciBufPropertiesNV *>( this );
|
|
}
|
|
|
|
operator VkMemorySciBufPropertiesNV &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkMemorySciBufPropertiesNV *>( this );
|
|
}
|
|
|
|
# if defined( VULKAN_HPP_USE_REFLECT )
|
|
# if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
# else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, uint32_t const &>
|
|
# endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( sType, pNext, memoryTypeBits );
|
|
}
|
|
# endif
|
|
|
|
# if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
|
|
auto operator<=>( MemorySciBufPropertiesNV const & ) const = default;
|
|
# else
|
|
bool operator==( MemorySciBufPropertiesNV const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
# if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
# else
|
|
return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( memoryTypeBits == rhs.memoryTypeBits );
|
|
# endif
|
|
}
|
|
|
|
bool operator!=( MemorySciBufPropertiesNV const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
# endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eMemorySciBufPropertiesNV;
|
|
const void * pNext = {};
|
|
uint32_t memoryTypeBits = {};
|
|
};
|
|
|
|
template <>
|
|
struct CppType<StructureType, StructureType::eMemorySciBufPropertiesNV>
|
|
{
|
|
using Type = MemorySciBufPropertiesNV;
|
|
};
|
|
#endif /*VK_USE_PLATFORM_SCI*/
|
|
|
|
struct MemoryType
|
|
{
|
|
using NativeType = VkMemoryType;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR MemoryType( VULKAN_HPP_NAMESPACE::MemoryPropertyFlags propertyFlags_ = {}, uint32_t heapIndex_ = {} ) VULKAN_HPP_NOEXCEPT
|
|
: propertyFlags( propertyFlags_ )
|
|
, heapIndex( heapIndex_ )
|
|
{
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR MemoryType( MemoryType const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
MemoryType( VkMemoryType const & rhs ) VULKAN_HPP_NOEXCEPT : MemoryType( *reinterpret_cast<MemoryType const *>( &rhs ) ) {}
|
|
|
|
MemoryType & operator=( MemoryType const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
MemoryType & operator=( VkMemoryType const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::MemoryType const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
operator VkMemoryType const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkMemoryType *>( this );
|
|
}
|
|
|
|
operator VkMemoryType &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkMemoryType *>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
# if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
# else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::MemoryPropertyFlags const &, uint32_t const &>
|
|
# endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( propertyFlags, heapIndex );
|
|
}
|
|
#endif
|
|
|
|
#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
|
|
auto operator<=>( MemoryType const & ) const = default;
|
|
#else
|
|
bool operator==( MemoryType const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
# if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
# else
|
|
return ( propertyFlags == rhs.propertyFlags ) && ( heapIndex == rhs.heapIndex );
|
|
# endif
|
|
}
|
|
|
|
bool operator!=( MemoryType const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::MemoryPropertyFlags propertyFlags = {};
|
|
uint32_t heapIndex = {};
|
|
};
|
|
|
|
struct MultisamplePropertiesEXT
|
|
{
|
|
using NativeType = VkMultisamplePropertiesEXT;
|
|
|
|
static const bool allowDuplicate = false;
|
|
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eMultisamplePropertiesEXT;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR MultisamplePropertiesEXT( VULKAN_HPP_NAMESPACE::Extent2D maxSampleLocationGridSize_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
|
|
: pNext( pNext_ )
|
|
, maxSampleLocationGridSize( maxSampleLocationGridSize_ )
|
|
{
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR MultisamplePropertiesEXT( MultisamplePropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
MultisamplePropertiesEXT( VkMultisamplePropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: MultisamplePropertiesEXT( *reinterpret_cast<MultisamplePropertiesEXT const *>( &rhs ) )
|
|
{
|
|
}
|
|
|
|
MultisamplePropertiesEXT & operator=( MultisamplePropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
MultisamplePropertiesEXT & operator=( VkMultisamplePropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::MultisamplePropertiesEXT const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
operator VkMultisamplePropertiesEXT const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkMultisamplePropertiesEXT *>( this );
|
|
}
|
|
|
|
operator VkMultisamplePropertiesEXT &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkMultisamplePropertiesEXT *>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
# if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
# else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::Extent2D const &>
|
|
# endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( sType, pNext, maxSampleLocationGridSize );
|
|
}
|
|
#endif
|
|
|
|
#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
|
|
auto operator<=>( MultisamplePropertiesEXT const & ) const = default;
|
|
#else
|
|
bool operator==( MultisamplePropertiesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
# if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
# else
|
|
return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( maxSampleLocationGridSize == rhs.maxSampleLocationGridSize );
|
|
# endif
|
|
}
|
|
|
|
bool operator!=( MultisamplePropertiesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eMultisamplePropertiesEXT;
|
|
void * pNext = {};
|
|
VULKAN_HPP_NAMESPACE::Extent2D maxSampleLocationGridSize = {};
|
|
};
|
|
|
|
template <>
|
|
struct CppType<StructureType, StructureType::eMultisamplePropertiesEXT>
|
|
{
|
|
using Type = MultisamplePropertiesEXT;
|
|
};
|
|
|
|
struct PerformanceCounterDescriptionKHR
|
|
{
|
|
using NativeType = VkPerformanceCounterDescriptionKHR;
|
|
|
|
static const bool allowDuplicate = false;
|
|
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePerformanceCounterDescriptionKHR;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR_14 PerformanceCounterDescriptionKHR( VULKAN_HPP_NAMESPACE::PerformanceCounterDescriptionFlagsKHR flags_ = {},
|
|
std::array<char, VK_MAX_DESCRIPTION_SIZE> const & name_ = {},
|
|
std::array<char, VK_MAX_DESCRIPTION_SIZE> const & category_ = {},
|
|
std::array<char, VK_MAX_DESCRIPTION_SIZE> const & description_ = {},
|
|
void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
|
|
: pNext( pNext_ )
|
|
, flags( flags_ )
|
|
, name( name_ )
|
|
, category( category_ )
|
|
, description( description_ )
|
|
{
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PerformanceCounterDescriptionKHR( PerformanceCounterDescriptionKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
PerformanceCounterDescriptionKHR( VkPerformanceCounterDescriptionKHR const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: PerformanceCounterDescriptionKHR( *reinterpret_cast<PerformanceCounterDescriptionKHR const *>( &rhs ) )
|
|
{
|
|
}
|
|
|
|
# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
|
|
PerformanceCounterDescriptionKHR( VULKAN_HPP_NAMESPACE::PerformanceCounterDescriptionFlagsKHR flags_,
|
|
std::string const & name_,
|
|
std::string const & category_ = {},
|
|
std::string const & description_ = {},
|
|
void * pNext_ = nullptr )
|
|
: pNext( pNext_ ), flags( flags_ )
|
|
{
|
|
VULKAN_HPP_ASSERT( name_.size() < VK_MAX_DESCRIPTION_SIZE );
|
|
# if defined( WIN32 )
|
|
strncpy_s( name, VK_MAX_DESCRIPTION_SIZE, name_.data(), name_.size() );
|
|
# else
|
|
strncpy( name, name_.data(), std::min<size_t>( VK_MAX_DESCRIPTION_SIZE, name_.size() ) );
|
|
# endif
|
|
|
|
VULKAN_HPP_ASSERT( category_.size() < VK_MAX_DESCRIPTION_SIZE );
|
|
# if defined( WIN32 )
|
|
strncpy_s( category, VK_MAX_DESCRIPTION_SIZE, category_.data(), category_.size() );
|
|
# else
|
|
strncpy( category, category_.data(), std::min<size_t>( VK_MAX_DESCRIPTION_SIZE, category_.size() ) );
|
|
# endif
|
|
|
|
VULKAN_HPP_ASSERT( description_.size() < VK_MAX_DESCRIPTION_SIZE );
|
|
# if defined( WIN32 )
|
|
strncpy_s( description, VK_MAX_DESCRIPTION_SIZE, description_.data(), description_.size() );
|
|
# else
|
|
strncpy( description, description_.data(), std::min<size_t>( VK_MAX_DESCRIPTION_SIZE, description_.size() ) );
|
|
# endif
|
|
}
|
|
# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
|
|
|
|
PerformanceCounterDescriptionKHR & operator=( PerformanceCounterDescriptionKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
PerformanceCounterDescriptionKHR & operator=( VkPerformanceCounterDescriptionKHR const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PerformanceCounterDescriptionKHR const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
operator VkPerformanceCounterDescriptionKHR const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkPerformanceCounterDescriptionKHR *>( this );
|
|
}
|
|
|
|
operator VkPerformanceCounterDescriptionKHR &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkPerformanceCounterDescriptionKHR *>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
# if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
# else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &,
|
|
void * const &,
|
|
VULKAN_HPP_NAMESPACE::PerformanceCounterDescriptionFlagsKHR const &,
|
|
VULKAN_HPP_NAMESPACE::ArrayWrapper1D<char, VK_MAX_DESCRIPTION_SIZE> const &,
|
|
VULKAN_HPP_NAMESPACE::ArrayWrapper1D<char, VK_MAX_DESCRIPTION_SIZE> const &,
|
|
VULKAN_HPP_NAMESPACE::ArrayWrapper1D<char, VK_MAX_DESCRIPTION_SIZE> const &>
|
|
# endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( sType, pNext, flags, name, category, description );
|
|
}
|
|
#endif
|
|
|
|
#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
|
|
std::strong_ordering operator<=>( PerformanceCounterDescriptionKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
if ( auto cmp = sType <=> rhs.sType; cmp != 0 )
|
|
return cmp;
|
|
if ( auto cmp = pNext <=> rhs.pNext; cmp != 0 )
|
|
return cmp;
|
|
if ( auto cmp = flags <=> rhs.flags; cmp != 0 )
|
|
return cmp;
|
|
if ( auto cmp = strcmp( name, rhs.name ); cmp != 0 )
|
|
return ( cmp < 0 ) ? std::strong_ordering::less : std::strong_ordering::greater;
|
|
if ( auto cmp = strcmp( category, rhs.category ); cmp != 0 )
|
|
return ( cmp < 0 ) ? std::strong_ordering::less : std::strong_ordering::greater;
|
|
if ( auto cmp = strcmp( description, rhs.description ); cmp != 0 )
|
|
return ( cmp < 0 ) ? std::strong_ordering::less : std::strong_ordering::greater;
|
|
|
|
return std::strong_ordering::equivalent;
|
|
}
|
|
#endif
|
|
|
|
bool operator==( PerformanceCounterDescriptionKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && ( strcmp( name, rhs.name ) == 0 ) &&
|
|
( strcmp( category, rhs.category ) == 0 ) && ( strcmp( description, rhs.description ) == 0 );
|
|
}
|
|
|
|
bool operator!=( PerformanceCounterDescriptionKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePerformanceCounterDescriptionKHR;
|
|
void * pNext = {};
|
|
VULKAN_HPP_NAMESPACE::PerformanceCounterDescriptionFlagsKHR flags = {};
|
|
VULKAN_HPP_NAMESPACE::ArrayWrapper1D<char, VK_MAX_DESCRIPTION_SIZE> name = {};
|
|
VULKAN_HPP_NAMESPACE::ArrayWrapper1D<char, VK_MAX_DESCRIPTION_SIZE> category = {};
|
|
VULKAN_HPP_NAMESPACE::ArrayWrapper1D<char, VK_MAX_DESCRIPTION_SIZE> description = {};
|
|
};
|
|
|
|
template <>
|
|
struct CppType<StructureType, StructureType::ePerformanceCounterDescriptionKHR>
|
|
{
|
|
using Type = PerformanceCounterDescriptionKHR;
|
|
};
|
|
|
|
struct PerformanceCounterKHR
|
|
{
|
|
using NativeType = VkPerformanceCounterKHR;
|
|
|
|
static const bool allowDuplicate = false;
|
|
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePerformanceCounterKHR;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR_14
|
|
PerformanceCounterKHR( VULKAN_HPP_NAMESPACE::PerformanceCounterUnitKHR unit_ = VULKAN_HPP_NAMESPACE::PerformanceCounterUnitKHR::eGeneric,
|
|
VULKAN_HPP_NAMESPACE::PerformanceCounterScopeKHR scope_ = VULKAN_HPP_NAMESPACE::PerformanceCounterScopeKHR::eCommandBuffer,
|
|
VULKAN_HPP_NAMESPACE::PerformanceCounterStorageKHR storage_ = VULKAN_HPP_NAMESPACE::PerformanceCounterStorageKHR::eInt32,
|
|
std::array<uint8_t, VK_UUID_SIZE> const & uuid_ = {},
|
|
void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
|
|
: pNext( pNext_ )
|
|
, unit( unit_ )
|
|
, scope( scope_ )
|
|
, storage( storage_ )
|
|
, uuid( uuid_ )
|
|
{
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PerformanceCounterKHR( PerformanceCounterKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
PerformanceCounterKHR( VkPerformanceCounterKHR const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: PerformanceCounterKHR( *reinterpret_cast<PerformanceCounterKHR const *>( &rhs ) )
|
|
{
|
|
}
|
|
|
|
PerformanceCounterKHR & operator=( PerformanceCounterKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
PerformanceCounterKHR & operator=( VkPerformanceCounterKHR const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PerformanceCounterKHR const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
operator VkPerformanceCounterKHR const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkPerformanceCounterKHR *>( this );
|
|
}
|
|
|
|
operator VkPerformanceCounterKHR &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkPerformanceCounterKHR *>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
# if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
# else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &,
|
|
void * const &,
|
|
VULKAN_HPP_NAMESPACE::PerformanceCounterUnitKHR const &,
|
|
VULKAN_HPP_NAMESPACE::PerformanceCounterScopeKHR const &,
|
|
VULKAN_HPP_NAMESPACE::PerformanceCounterStorageKHR const &,
|
|
VULKAN_HPP_NAMESPACE::ArrayWrapper1D<uint8_t, VK_UUID_SIZE> const &>
|
|
# endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( sType, pNext, unit, scope, storage, uuid );
|
|
}
|
|
#endif
|
|
|
|
#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
|
|
auto operator<=>( PerformanceCounterKHR const & ) const = default;
|
|
#else
|
|
bool operator==( PerformanceCounterKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
# if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
# else
|
|
return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( unit == rhs.unit ) && ( scope == rhs.scope ) && ( storage == rhs.storage ) &&
|
|
( uuid == rhs.uuid );
|
|
# endif
|
|
}
|
|
|
|
bool operator!=( PerformanceCounterKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePerformanceCounterKHR;
|
|
void * pNext = {};
|
|
VULKAN_HPP_NAMESPACE::PerformanceCounterUnitKHR unit = VULKAN_HPP_NAMESPACE::PerformanceCounterUnitKHR::eGeneric;
|
|
VULKAN_HPP_NAMESPACE::PerformanceCounterScopeKHR scope = VULKAN_HPP_NAMESPACE::PerformanceCounterScopeKHR::eCommandBuffer;
|
|
VULKAN_HPP_NAMESPACE::PerformanceCounterStorageKHR storage = VULKAN_HPP_NAMESPACE::PerformanceCounterStorageKHR::eInt32;
|
|
VULKAN_HPP_NAMESPACE::ArrayWrapper1D<uint8_t, VK_UUID_SIZE> uuid = {};
|
|
};
|
|
|
|
template <>
|
|
struct CppType<StructureType, StructureType::ePerformanceCounterKHR>
|
|
{
|
|
using Type = PerformanceCounterKHR;
|
|
};
|
|
|
|
union PerformanceCounterResultKHR
|
|
{
|
|
using NativeType = VkPerformanceCounterResultKHR;
|
|
#if !defined( VULKAN_HPP_NO_UNION_CONSTRUCTORS )
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PerformanceCounterResultKHR( int32_t int32_ = {} ) : int32( int32_ ) {}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PerformanceCounterResultKHR( int64_t int64_ ) : int64( int64_ ) {}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PerformanceCounterResultKHR( uint32_t uint32_ ) : uint32( uint32_ ) {}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PerformanceCounterResultKHR( uint64_t uint64_ ) : uint64( uint64_ ) {}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PerformanceCounterResultKHR( float float32_ ) : float32( float32_ ) {}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PerformanceCounterResultKHR( double float64_ ) : float64( float64_ ) {}
|
|
#endif /*VULKAN_HPP_NO_UNION_CONSTRUCTORS*/
|
|
|
|
#if !defined( VULKAN_HPP_NO_UNION_SETTERS )
|
|
VULKAN_HPP_CONSTEXPR_14 PerformanceCounterResultKHR & setInt32( int32_t int32_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
int32 = int32_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PerformanceCounterResultKHR & setInt64( int64_t int64_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
int64 = int64_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PerformanceCounterResultKHR & setUint32( uint32_t uint32_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
uint32 = uint32_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PerformanceCounterResultKHR & setUint64( uint64_t uint64_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
uint64 = uint64_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PerformanceCounterResultKHR & setFloat32( float float32_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
float32 = float32_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PerformanceCounterResultKHR & setFloat64( double float64_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
float64 = float64_;
|
|
return *this;
|
|
}
|
|
#endif /*VULKAN_HPP_NO_UNION_SETTERS*/
|
|
|
|
operator VkPerformanceCounterResultKHR const &() const
|
|
{
|
|
return *reinterpret_cast<const VkPerformanceCounterResultKHR *>( this );
|
|
}
|
|
|
|
operator VkPerformanceCounterResultKHR &()
|
|
{
|
|
return *reinterpret_cast<VkPerformanceCounterResultKHR *>( this );
|
|
}
|
|
|
|
int32_t int32;
|
|
int64_t int64;
|
|
uint32_t uint32;
|
|
uint64_t uint64;
|
|
float float32;
|
|
double float64;
|
|
};
|
|
|
|
struct PerformanceQueryReservationInfoKHR
|
|
{
|
|
using NativeType = VkPerformanceQueryReservationInfoKHR;
|
|
|
|
static const bool allowDuplicate = true;
|
|
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePerformanceQueryReservationInfoKHR;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR PerformanceQueryReservationInfoKHR( uint32_t maxPerformanceQueriesPerPool_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
|
|
: pNext( pNext_ )
|
|
, maxPerformanceQueriesPerPool( maxPerformanceQueriesPerPool_ )
|
|
{
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR PerformanceQueryReservationInfoKHR( PerformanceQueryReservationInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
PerformanceQueryReservationInfoKHR( VkPerformanceQueryReservationInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: PerformanceQueryReservationInfoKHR( *reinterpret_cast<PerformanceQueryReservationInfoKHR const *>( &rhs ) )
|
|
{
|
|
}
|
|
|
|
PerformanceQueryReservationInfoKHR & operator=( PerformanceQueryReservationInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
PerformanceQueryReservationInfoKHR & operator=( VkPerformanceQueryReservationInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PerformanceQueryReservationInfoKHR const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
|
|
VULKAN_HPP_CONSTEXPR_14 PerformanceQueryReservationInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pNext = pNext_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PerformanceQueryReservationInfoKHR & setMaxPerformanceQueriesPerPool( uint32_t maxPerformanceQueriesPerPool_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
maxPerformanceQueriesPerPool = maxPerformanceQueriesPerPool_;
|
|
return *this;
|
|
}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
|
|
|
|
operator VkPerformanceQueryReservationInfoKHR const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkPerformanceQueryReservationInfoKHR *>( this );
|
|
}
|
|
|
|
operator VkPerformanceQueryReservationInfoKHR &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkPerformanceQueryReservationInfoKHR *>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
# if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
# else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, uint32_t const &>
|
|
# endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( sType, pNext, maxPerformanceQueriesPerPool );
|
|
}
|
|
#endif
|
|
|
|
#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
|
|
auto operator<=>( PerformanceQueryReservationInfoKHR const & ) const = default;
|
|
#else
|
|
bool operator==( PerformanceQueryReservationInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
# if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
# else
|
|
return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( maxPerformanceQueriesPerPool == rhs.maxPerformanceQueriesPerPool );
|
|
# endif
|
|
}
|
|
|
|
bool operator!=( PerformanceQueryReservationInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePerformanceQueryReservationInfoKHR;
|
|
const void * pNext = {};
|
|
uint32_t maxPerformanceQueriesPerPool = {};
|
|
};
|
|
|
|
template <>
|
|
struct CppType<StructureType, StructureType::ePerformanceQueryReservationInfoKHR>
|
|
{
|
|
using Type = PerformanceQueryReservationInfoKHR;
|
|
};
|
|
|
|
struct PerformanceQuerySubmitInfoKHR
|
|
{
|
|
using NativeType = VkPerformanceQuerySubmitInfoKHR;
|
|
|
|
static const bool allowDuplicate = false;
|
|
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePerformanceQuerySubmitInfoKHR;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR PerformanceQuerySubmitInfoKHR( uint32_t counterPassIndex_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
|
|
: pNext( pNext_ )
|
|
, counterPassIndex( counterPassIndex_ )
|
|
{
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR PerformanceQuerySubmitInfoKHR( PerformanceQuerySubmitInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
PerformanceQuerySubmitInfoKHR( VkPerformanceQuerySubmitInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: PerformanceQuerySubmitInfoKHR( *reinterpret_cast<PerformanceQuerySubmitInfoKHR const *>( &rhs ) )
|
|
{
|
|
}
|
|
|
|
PerformanceQuerySubmitInfoKHR & operator=( PerformanceQuerySubmitInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
PerformanceQuerySubmitInfoKHR & operator=( VkPerformanceQuerySubmitInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PerformanceQuerySubmitInfoKHR const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
|
|
VULKAN_HPP_CONSTEXPR_14 PerformanceQuerySubmitInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pNext = pNext_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PerformanceQuerySubmitInfoKHR & setCounterPassIndex( uint32_t counterPassIndex_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
counterPassIndex = counterPassIndex_;
|
|
return *this;
|
|
}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
|
|
|
|
operator VkPerformanceQuerySubmitInfoKHR const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkPerformanceQuerySubmitInfoKHR *>( this );
|
|
}
|
|
|
|
operator VkPerformanceQuerySubmitInfoKHR &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkPerformanceQuerySubmitInfoKHR *>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
# if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
# else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, uint32_t const &>
|
|
# endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( sType, pNext, counterPassIndex );
|
|
}
|
|
#endif
|
|
|
|
#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
|
|
auto operator<=>( PerformanceQuerySubmitInfoKHR const & ) const = default;
|
|
#else
|
|
bool operator==( PerformanceQuerySubmitInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
# if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
# else
|
|
return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( counterPassIndex == rhs.counterPassIndex );
|
|
# endif
|
|
}
|
|
|
|
bool operator!=( PerformanceQuerySubmitInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePerformanceQuerySubmitInfoKHR;
|
|
const void * pNext = {};
|
|
uint32_t counterPassIndex = {};
|
|
};
|
|
|
|
template <>
|
|
struct CppType<StructureType, StructureType::ePerformanceQuerySubmitInfoKHR>
|
|
{
|
|
using Type = PerformanceQuerySubmitInfoKHR;
|
|
};
|
|
|
|
struct PhysicalDevice16BitStorageFeatures
|
|
{
|
|
using NativeType = VkPhysicalDevice16BitStorageFeatures;
|
|
|
|
static const bool allowDuplicate = false;
|
|
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDevice16BitStorageFeatures;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR PhysicalDevice16BitStorageFeatures( VULKAN_HPP_NAMESPACE::Bool32 storageBuffer16BitAccess_ = {},
|
|
VULKAN_HPP_NAMESPACE::Bool32 uniformAndStorageBuffer16BitAccess_ = {},
|
|
VULKAN_HPP_NAMESPACE::Bool32 storagePushConstant16_ = {},
|
|
VULKAN_HPP_NAMESPACE::Bool32 storageInputOutput16_ = {},
|
|
void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
|
|
: pNext( pNext_ )
|
|
, storageBuffer16BitAccess( storageBuffer16BitAccess_ )
|
|
, uniformAndStorageBuffer16BitAccess( uniformAndStorageBuffer16BitAccess_ )
|
|
, storagePushConstant16( storagePushConstant16_ )
|
|
, storageInputOutput16( storageInputOutput16_ )
|
|
{
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR PhysicalDevice16BitStorageFeatures( PhysicalDevice16BitStorageFeatures const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
PhysicalDevice16BitStorageFeatures( VkPhysicalDevice16BitStorageFeatures const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: PhysicalDevice16BitStorageFeatures( *reinterpret_cast<PhysicalDevice16BitStorageFeatures const *>( &rhs ) )
|
|
{
|
|
}
|
|
|
|
PhysicalDevice16BitStorageFeatures & operator=( PhysicalDevice16BitStorageFeatures const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
PhysicalDevice16BitStorageFeatures & operator=( VkPhysicalDevice16BitStorageFeatures const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDevice16BitStorageFeatures const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
|
|
VULKAN_HPP_CONSTEXPR_14 PhysicalDevice16BitStorageFeatures & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pNext = pNext_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PhysicalDevice16BitStorageFeatures &
|
|
setStorageBuffer16BitAccess( VULKAN_HPP_NAMESPACE::Bool32 storageBuffer16BitAccess_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
storageBuffer16BitAccess = storageBuffer16BitAccess_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PhysicalDevice16BitStorageFeatures &
|
|
setUniformAndStorageBuffer16BitAccess( VULKAN_HPP_NAMESPACE::Bool32 uniformAndStorageBuffer16BitAccess_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
uniformAndStorageBuffer16BitAccess = uniformAndStorageBuffer16BitAccess_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PhysicalDevice16BitStorageFeatures &
|
|
setStoragePushConstant16( VULKAN_HPP_NAMESPACE::Bool32 storagePushConstant16_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
storagePushConstant16 = storagePushConstant16_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PhysicalDevice16BitStorageFeatures &
|
|
setStorageInputOutput16( VULKAN_HPP_NAMESPACE::Bool32 storageInputOutput16_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
storageInputOutput16 = storageInputOutput16_;
|
|
return *this;
|
|
}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
|
|
|
|
operator VkPhysicalDevice16BitStorageFeatures const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkPhysicalDevice16BitStorageFeatures *>( this );
|
|
}
|
|
|
|
operator VkPhysicalDevice16BitStorageFeatures &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkPhysicalDevice16BitStorageFeatures *>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
# if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
# else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &,
|
|
void * const &,
|
|
VULKAN_HPP_NAMESPACE::Bool32 const &,
|
|
VULKAN_HPP_NAMESPACE::Bool32 const &,
|
|
VULKAN_HPP_NAMESPACE::Bool32 const &,
|
|
VULKAN_HPP_NAMESPACE::Bool32 const &>
|
|
# endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( sType, pNext, storageBuffer16BitAccess, uniformAndStorageBuffer16BitAccess, storagePushConstant16, storageInputOutput16 );
|
|
}
|
|
#endif
|
|
|
|
#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
|
|
auto operator<=>( PhysicalDevice16BitStorageFeatures const & ) const = default;
|
|
#else
|
|
bool operator==( PhysicalDevice16BitStorageFeatures const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
# if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
# else
|
|
return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( storageBuffer16BitAccess == rhs.storageBuffer16BitAccess ) &&
|
|
( uniformAndStorageBuffer16BitAccess == rhs.uniformAndStorageBuffer16BitAccess ) && ( storagePushConstant16 == rhs.storagePushConstant16 ) &&
|
|
( storageInputOutput16 == rhs.storageInputOutput16 );
|
|
# endif
|
|
}
|
|
|
|
bool operator!=( PhysicalDevice16BitStorageFeatures const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDevice16BitStorageFeatures;
|
|
void * pNext = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 storageBuffer16BitAccess = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 uniformAndStorageBuffer16BitAccess = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 storagePushConstant16 = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 storageInputOutput16 = {};
|
|
};
|
|
|
|
template <>
|
|
struct CppType<StructureType, StructureType::ePhysicalDevice16BitStorageFeatures>
|
|
{
|
|
using Type = PhysicalDevice16BitStorageFeatures;
|
|
};
|
|
|
|
using PhysicalDevice16BitStorageFeaturesKHR = PhysicalDevice16BitStorageFeatures;
|
|
|
|
struct PhysicalDevice4444FormatsFeaturesEXT
|
|
{
|
|
using NativeType = VkPhysicalDevice4444FormatsFeaturesEXT;
|
|
|
|
static const bool allowDuplicate = false;
|
|
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDevice4444FormatsFeaturesEXT;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR PhysicalDevice4444FormatsFeaturesEXT( VULKAN_HPP_NAMESPACE::Bool32 formatA4R4G4B4_ = {},
|
|
VULKAN_HPP_NAMESPACE::Bool32 formatA4B4G4R4_ = {},
|
|
void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
|
|
: pNext( pNext_ )
|
|
, formatA4R4G4B4( formatA4R4G4B4_ )
|
|
, formatA4B4G4R4( formatA4B4G4R4_ )
|
|
{
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR PhysicalDevice4444FormatsFeaturesEXT( PhysicalDevice4444FormatsFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
PhysicalDevice4444FormatsFeaturesEXT( VkPhysicalDevice4444FormatsFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: PhysicalDevice4444FormatsFeaturesEXT( *reinterpret_cast<PhysicalDevice4444FormatsFeaturesEXT const *>( &rhs ) )
|
|
{
|
|
}
|
|
|
|
PhysicalDevice4444FormatsFeaturesEXT & operator=( PhysicalDevice4444FormatsFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
PhysicalDevice4444FormatsFeaturesEXT & operator=( VkPhysicalDevice4444FormatsFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDevice4444FormatsFeaturesEXT const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
|
|
VULKAN_HPP_CONSTEXPR_14 PhysicalDevice4444FormatsFeaturesEXT & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pNext = pNext_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PhysicalDevice4444FormatsFeaturesEXT & setFormatA4R4G4B4( VULKAN_HPP_NAMESPACE::Bool32 formatA4R4G4B4_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
formatA4R4G4B4 = formatA4R4G4B4_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PhysicalDevice4444FormatsFeaturesEXT & setFormatA4B4G4R4( VULKAN_HPP_NAMESPACE::Bool32 formatA4B4G4R4_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
formatA4B4G4R4 = formatA4B4G4R4_;
|
|
return *this;
|
|
}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
|
|
|
|
operator VkPhysicalDevice4444FormatsFeaturesEXT const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkPhysicalDevice4444FormatsFeaturesEXT *>( this );
|
|
}
|
|
|
|
operator VkPhysicalDevice4444FormatsFeaturesEXT &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkPhysicalDevice4444FormatsFeaturesEXT *>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
# if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
# else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &>
|
|
# endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( sType, pNext, formatA4R4G4B4, formatA4B4G4R4 );
|
|
}
|
|
#endif
|
|
|
|
#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
|
|
auto operator<=>( PhysicalDevice4444FormatsFeaturesEXT const & ) const = default;
|
|
#else
|
|
bool operator==( PhysicalDevice4444FormatsFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
# if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
# else
|
|
return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( formatA4R4G4B4 == rhs.formatA4R4G4B4 ) && ( formatA4B4G4R4 == rhs.formatA4B4G4R4 );
|
|
# endif
|
|
}
|
|
|
|
bool operator!=( PhysicalDevice4444FormatsFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDevice4444FormatsFeaturesEXT;
|
|
void * pNext = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 formatA4R4G4B4 = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 formatA4B4G4R4 = {};
|
|
};
|
|
|
|
template <>
|
|
struct CppType<StructureType, StructureType::ePhysicalDevice4444FormatsFeaturesEXT>
|
|
{
|
|
using Type = PhysicalDevice4444FormatsFeaturesEXT;
|
|
};
|
|
|
|
struct PhysicalDevice8BitStorageFeatures
|
|
{
|
|
using NativeType = VkPhysicalDevice8BitStorageFeatures;
|
|
|
|
static const bool allowDuplicate = false;
|
|
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDevice8BitStorageFeatures;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR PhysicalDevice8BitStorageFeatures( VULKAN_HPP_NAMESPACE::Bool32 storageBuffer8BitAccess_ = {},
|
|
VULKAN_HPP_NAMESPACE::Bool32 uniformAndStorageBuffer8BitAccess_ = {},
|
|
VULKAN_HPP_NAMESPACE::Bool32 storagePushConstant8_ = {},
|
|
void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
|
|
: pNext( pNext_ )
|
|
, storageBuffer8BitAccess( storageBuffer8BitAccess_ )
|
|
, uniformAndStorageBuffer8BitAccess( uniformAndStorageBuffer8BitAccess_ )
|
|
, storagePushConstant8( storagePushConstant8_ )
|
|
{
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR PhysicalDevice8BitStorageFeatures( PhysicalDevice8BitStorageFeatures const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
PhysicalDevice8BitStorageFeatures( VkPhysicalDevice8BitStorageFeatures const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: PhysicalDevice8BitStorageFeatures( *reinterpret_cast<PhysicalDevice8BitStorageFeatures const *>( &rhs ) )
|
|
{
|
|
}
|
|
|
|
PhysicalDevice8BitStorageFeatures & operator=( PhysicalDevice8BitStorageFeatures const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
PhysicalDevice8BitStorageFeatures & operator=( VkPhysicalDevice8BitStorageFeatures const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDevice8BitStorageFeatures const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
|
|
VULKAN_HPP_CONSTEXPR_14 PhysicalDevice8BitStorageFeatures & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pNext = pNext_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PhysicalDevice8BitStorageFeatures &
|
|
setStorageBuffer8BitAccess( VULKAN_HPP_NAMESPACE::Bool32 storageBuffer8BitAccess_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
storageBuffer8BitAccess = storageBuffer8BitAccess_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PhysicalDevice8BitStorageFeatures &
|
|
setUniformAndStorageBuffer8BitAccess( VULKAN_HPP_NAMESPACE::Bool32 uniformAndStorageBuffer8BitAccess_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
uniformAndStorageBuffer8BitAccess = uniformAndStorageBuffer8BitAccess_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PhysicalDevice8BitStorageFeatures &
|
|
setStoragePushConstant8( VULKAN_HPP_NAMESPACE::Bool32 storagePushConstant8_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
storagePushConstant8 = storagePushConstant8_;
|
|
return *this;
|
|
}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
|
|
|
|
operator VkPhysicalDevice8BitStorageFeatures const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkPhysicalDevice8BitStorageFeatures *>( this );
|
|
}
|
|
|
|
operator VkPhysicalDevice8BitStorageFeatures &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkPhysicalDevice8BitStorageFeatures *>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
# if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
# else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &,
|
|
void * const &,
|
|
VULKAN_HPP_NAMESPACE::Bool32 const &,
|
|
VULKAN_HPP_NAMESPACE::Bool32 const &,
|
|
VULKAN_HPP_NAMESPACE::Bool32 const &>
|
|
# endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( sType, pNext, storageBuffer8BitAccess, uniformAndStorageBuffer8BitAccess, storagePushConstant8 );
|
|
}
|
|
#endif
|
|
|
|
#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
|
|
auto operator<=>( PhysicalDevice8BitStorageFeatures const & ) const = default;
|
|
#else
|
|
bool operator==( PhysicalDevice8BitStorageFeatures const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
# if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
# else
|
|
return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( storageBuffer8BitAccess == rhs.storageBuffer8BitAccess ) &&
|
|
( uniformAndStorageBuffer8BitAccess == rhs.uniformAndStorageBuffer8BitAccess ) && ( storagePushConstant8 == rhs.storagePushConstant8 );
|
|
# endif
|
|
}
|
|
|
|
bool operator!=( PhysicalDevice8BitStorageFeatures const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDevice8BitStorageFeatures;
|
|
void * pNext = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 storageBuffer8BitAccess = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 uniformAndStorageBuffer8BitAccess = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 storagePushConstant8 = {};
|
|
};
|
|
|
|
template <>
|
|
struct CppType<StructureType, StructureType::ePhysicalDevice8BitStorageFeatures>
|
|
{
|
|
using Type = PhysicalDevice8BitStorageFeatures;
|
|
};
|
|
|
|
using PhysicalDevice8BitStorageFeaturesKHR = PhysicalDevice8BitStorageFeatures;
|
|
|
|
struct PhysicalDeviceASTCDecodeFeaturesEXT
|
|
{
|
|
using NativeType = VkPhysicalDeviceASTCDecodeFeaturesEXT;
|
|
|
|
static const bool allowDuplicate = false;
|
|
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceAstcDecodeFeaturesEXT;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR PhysicalDeviceASTCDecodeFeaturesEXT( VULKAN_HPP_NAMESPACE::Bool32 decodeModeSharedExponent_ = {},
|
|
void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
|
|
: pNext( pNext_ )
|
|
, decodeModeSharedExponent( decodeModeSharedExponent_ )
|
|
{
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR PhysicalDeviceASTCDecodeFeaturesEXT( PhysicalDeviceASTCDecodeFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
PhysicalDeviceASTCDecodeFeaturesEXT( VkPhysicalDeviceASTCDecodeFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: PhysicalDeviceASTCDecodeFeaturesEXT( *reinterpret_cast<PhysicalDeviceASTCDecodeFeaturesEXT const *>( &rhs ) )
|
|
{
|
|
}
|
|
|
|
PhysicalDeviceASTCDecodeFeaturesEXT & operator=( PhysicalDeviceASTCDecodeFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
PhysicalDeviceASTCDecodeFeaturesEXT & operator=( VkPhysicalDeviceASTCDecodeFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceASTCDecodeFeaturesEXT const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
|
|
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceASTCDecodeFeaturesEXT & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pNext = pNext_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceASTCDecodeFeaturesEXT &
|
|
setDecodeModeSharedExponent( VULKAN_HPP_NAMESPACE::Bool32 decodeModeSharedExponent_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
decodeModeSharedExponent = decodeModeSharedExponent_;
|
|
return *this;
|
|
}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
|
|
|
|
operator VkPhysicalDeviceASTCDecodeFeaturesEXT const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkPhysicalDeviceASTCDecodeFeaturesEXT *>( this );
|
|
}
|
|
|
|
operator VkPhysicalDeviceASTCDecodeFeaturesEXT &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkPhysicalDeviceASTCDecodeFeaturesEXT *>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
# if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
# else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::Bool32 const &>
|
|
# endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( sType, pNext, decodeModeSharedExponent );
|
|
}
|
|
#endif
|
|
|
|
#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
|
|
auto operator<=>( PhysicalDeviceASTCDecodeFeaturesEXT const & ) const = default;
|
|
#else
|
|
bool operator==( PhysicalDeviceASTCDecodeFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
# if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
# else
|
|
return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( decodeModeSharedExponent == rhs.decodeModeSharedExponent );
|
|
# endif
|
|
}
|
|
|
|
bool operator!=( PhysicalDeviceASTCDecodeFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceAstcDecodeFeaturesEXT;
|
|
void * pNext = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 decodeModeSharedExponent = {};
|
|
};
|
|
|
|
template <>
|
|
struct CppType<StructureType, StructureType::ePhysicalDeviceAstcDecodeFeaturesEXT>
|
|
{
|
|
using Type = PhysicalDeviceASTCDecodeFeaturesEXT;
|
|
};
|
|
|
|
struct PhysicalDeviceBlendOperationAdvancedFeaturesEXT
|
|
{
|
|
using NativeType = VkPhysicalDeviceBlendOperationAdvancedFeaturesEXT;
|
|
|
|
static const bool allowDuplicate = false;
|
|
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceBlendOperationAdvancedFeaturesEXT;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR PhysicalDeviceBlendOperationAdvancedFeaturesEXT( VULKAN_HPP_NAMESPACE::Bool32 advancedBlendCoherentOperations_ = {},
|
|
void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
|
|
: pNext( pNext_ )
|
|
, advancedBlendCoherentOperations( advancedBlendCoherentOperations_ )
|
|
{
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR
|
|
PhysicalDeviceBlendOperationAdvancedFeaturesEXT( PhysicalDeviceBlendOperationAdvancedFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
PhysicalDeviceBlendOperationAdvancedFeaturesEXT( VkPhysicalDeviceBlendOperationAdvancedFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: PhysicalDeviceBlendOperationAdvancedFeaturesEXT( *reinterpret_cast<PhysicalDeviceBlendOperationAdvancedFeaturesEXT const *>( &rhs ) )
|
|
{
|
|
}
|
|
|
|
PhysicalDeviceBlendOperationAdvancedFeaturesEXT & operator=( PhysicalDeviceBlendOperationAdvancedFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
PhysicalDeviceBlendOperationAdvancedFeaturesEXT & operator=( VkPhysicalDeviceBlendOperationAdvancedFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceBlendOperationAdvancedFeaturesEXT const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
|
|
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceBlendOperationAdvancedFeaturesEXT & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pNext = pNext_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceBlendOperationAdvancedFeaturesEXT &
|
|
setAdvancedBlendCoherentOperations( VULKAN_HPP_NAMESPACE::Bool32 advancedBlendCoherentOperations_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
advancedBlendCoherentOperations = advancedBlendCoherentOperations_;
|
|
return *this;
|
|
}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
|
|
|
|
operator VkPhysicalDeviceBlendOperationAdvancedFeaturesEXT const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkPhysicalDeviceBlendOperationAdvancedFeaturesEXT *>( this );
|
|
}
|
|
|
|
operator VkPhysicalDeviceBlendOperationAdvancedFeaturesEXT &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkPhysicalDeviceBlendOperationAdvancedFeaturesEXT *>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
# if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
# else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::Bool32 const &>
|
|
# endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( sType, pNext, advancedBlendCoherentOperations );
|
|
}
|
|
#endif
|
|
|
|
#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
|
|
auto operator<=>( PhysicalDeviceBlendOperationAdvancedFeaturesEXT const & ) const = default;
|
|
#else
|
|
bool operator==( PhysicalDeviceBlendOperationAdvancedFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
# if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
# else
|
|
return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( advancedBlendCoherentOperations == rhs.advancedBlendCoherentOperations );
|
|
# endif
|
|
}
|
|
|
|
bool operator!=( PhysicalDeviceBlendOperationAdvancedFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceBlendOperationAdvancedFeaturesEXT;
|
|
void * pNext = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 advancedBlendCoherentOperations = {};
|
|
};
|
|
|
|
template <>
|
|
struct CppType<StructureType, StructureType::ePhysicalDeviceBlendOperationAdvancedFeaturesEXT>
|
|
{
|
|
using Type = PhysicalDeviceBlendOperationAdvancedFeaturesEXT;
|
|
};
|
|
|
|
struct PhysicalDeviceBlendOperationAdvancedPropertiesEXT
|
|
{
|
|
using NativeType = VkPhysicalDeviceBlendOperationAdvancedPropertiesEXT;
|
|
|
|
static const bool allowDuplicate = false;
|
|
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceBlendOperationAdvancedPropertiesEXT;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR PhysicalDeviceBlendOperationAdvancedPropertiesEXT( uint32_t advancedBlendMaxColorAttachments_ = {},
|
|
VULKAN_HPP_NAMESPACE::Bool32 advancedBlendIndependentBlend_ = {},
|
|
VULKAN_HPP_NAMESPACE::Bool32 advancedBlendNonPremultipliedSrcColor_ = {},
|
|
VULKAN_HPP_NAMESPACE::Bool32 advancedBlendNonPremultipliedDstColor_ = {},
|
|
VULKAN_HPP_NAMESPACE::Bool32 advancedBlendCorrelatedOverlap_ = {},
|
|
VULKAN_HPP_NAMESPACE::Bool32 advancedBlendAllOperations_ = {},
|
|
void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
|
|
: pNext( pNext_ )
|
|
, advancedBlendMaxColorAttachments( advancedBlendMaxColorAttachments_ )
|
|
, advancedBlendIndependentBlend( advancedBlendIndependentBlend_ )
|
|
, advancedBlendNonPremultipliedSrcColor( advancedBlendNonPremultipliedSrcColor_ )
|
|
, advancedBlendNonPremultipliedDstColor( advancedBlendNonPremultipliedDstColor_ )
|
|
, advancedBlendCorrelatedOverlap( advancedBlendCorrelatedOverlap_ )
|
|
, advancedBlendAllOperations( advancedBlendAllOperations_ )
|
|
{
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR
|
|
PhysicalDeviceBlendOperationAdvancedPropertiesEXT( PhysicalDeviceBlendOperationAdvancedPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
PhysicalDeviceBlendOperationAdvancedPropertiesEXT( VkPhysicalDeviceBlendOperationAdvancedPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: PhysicalDeviceBlendOperationAdvancedPropertiesEXT( *reinterpret_cast<PhysicalDeviceBlendOperationAdvancedPropertiesEXT const *>( &rhs ) )
|
|
{
|
|
}
|
|
|
|
PhysicalDeviceBlendOperationAdvancedPropertiesEXT &
|
|
operator=( PhysicalDeviceBlendOperationAdvancedPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
PhysicalDeviceBlendOperationAdvancedPropertiesEXT & operator=( VkPhysicalDeviceBlendOperationAdvancedPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceBlendOperationAdvancedPropertiesEXT const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
operator VkPhysicalDeviceBlendOperationAdvancedPropertiesEXT const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkPhysicalDeviceBlendOperationAdvancedPropertiesEXT *>( this );
|
|
}
|
|
|
|
operator VkPhysicalDeviceBlendOperationAdvancedPropertiesEXT &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkPhysicalDeviceBlendOperationAdvancedPropertiesEXT *>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
# if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
# else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &,
|
|
void * const &,
|
|
uint32_t const &,
|
|
VULKAN_HPP_NAMESPACE::Bool32 const &,
|
|
VULKAN_HPP_NAMESPACE::Bool32 const &,
|
|
VULKAN_HPP_NAMESPACE::Bool32 const &,
|
|
VULKAN_HPP_NAMESPACE::Bool32 const &,
|
|
VULKAN_HPP_NAMESPACE::Bool32 const &>
|
|
# endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( sType,
|
|
pNext,
|
|
advancedBlendMaxColorAttachments,
|
|
advancedBlendIndependentBlend,
|
|
advancedBlendNonPremultipliedSrcColor,
|
|
advancedBlendNonPremultipliedDstColor,
|
|
advancedBlendCorrelatedOverlap,
|
|
advancedBlendAllOperations );
|
|
}
|
|
#endif
|
|
|
|
#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
|
|
auto operator<=>( PhysicalDeviceBlendOperationAdvancedPropertiesEXT const & ) const = default;
|
|
#else
|
|
bool operator==( PhysicalDeviceBlendOperationAdvancedPropertiesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
# if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
# else
|
|
return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( advancedBlendMaxColorAttachments == rhs.advancedBlendMaxColorAttachments ) &&
|
|
( advancedBlendIndependentBlend == rhs.advancedBlendIndependentBlend ) &&
|
|
( advancedBlendNonPremultipliedSrcColor == rhs.advancedBlendNonPremultipliedSrcColor ) &&
|
|
( advancedBlendNonPremultipliedDstColor == rhs.advancedBlendNonPremultipliedDstColor ) &&
|
|
( advancedBlendCorrelatedOverlap == rhs.advancedBlendCorrelatedOverlap ) && ( advancedBlendAllOperations == rhs.advancedBlendAllOperations );
|
|
# endif
|
|
}
|
|
|
|
bool operator!=( PhysicalDeviceBlendOperationAdvancedPropertiesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceBlendOperationAdvancedPropertiesEXT;
|
|
void * pNext = {};
|
|
uint32_t advancedBlendMaxColorAttachments = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 advancedBlendIndependentBlend = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 advancedBlendNonPremultipliedSrcColor = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 advancedBlendNonPremultipliedDstColor = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 advancedBlendCorrelatedOverlap = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 advancedBlendAllOperations = {};
|
|
};
|
|
|
|
template <>
|
|
struct CppType<StructureType, StructureType::ePhysicalDeviceBlendOperationAdvancedPropertiesEXT>
|
|
{
|
|
using Type = PhysicalDeviceBlendOperationAdvancedPropertiesEXT;
|
|
};
|
|
|
|
struct PhysicalDeviceBufferDeviceAddressFeatures
|
|
{
|
|
using NativeType = VkPhysicalDeviceBufferDeviceAddressFeatures;
|
|
|
|
static const bool allowDuplicate = false;
|
|
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceBufferDeviceAddressFeatures;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR PhysicalDeviceBufferDeviceAddressFeatures( VULKAN_HPP_NAMESPACE::Bool32 bufferDeviceAddress_ = {},
|
|
VULKAN_HPP_NAMESPACE::Bool32 bufferDeviceAddressCaptureReplay_ = {},
|
|
VULKAN_HPP_NAMESPACE::Bool32 bufferDeviceAddressMultiDevice_ = {},
|
|
void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
|
|
: pNext( pNext_ )
|
|
, bufferDeviceAddress( bufferDeviceAddress_ )
|
|
, bufferDeviceAddressCaptureReplay( bufferDeviceAddressCaptureReplay_ )
|
|
, bufferDeviceAddressMultiDevice( bufferDeviceAddressMultiDevice_ )
|
|
{
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR PhysicalDeviceBufferDeviceAddressFeatures( PhysicalDeviceBufferDeviceAddressFeatures const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
PhysicalDeviceBufferDeviceAddressFeatures( VkPhysicalDeviceBufferDeviceAddressFeatures const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: PhysicalDeviceBufferDeviceAddressFeatures( *reinterpret_cast<PhysicalDeviceBufferDeviceAddressFeatures const *>( &rhs ) )
|
|
{
|
|
}
|
|
|
|
PhysicalDeviceBufferDeviceAddressFeatures & operator=( PhysicalDeviceBufferDeviceAddressFeatures const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
PhysicalDeviceBufferDeviceAddressFeatures & operator=( VkPhysicalDeviceBufferDeviceAddressFeatures const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceBufferDeviceAddressFeatures const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
|
|
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceBufferDeviceAddressFeatures & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pNext = pNext_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceBufferDeviceAddressFeatures &
|
|
setBufferDeviceAddress( VULKAN_HPP_NAMESPACE::Bool32 bufferDeviceAddress_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
bufferDeviceAddress = bufferDeviceAddress_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceBufferDeviceAddressFeatures &
|
|
setBufferDeviceAddressCaptureReplay( VULKAN_HPP_NAMESPACE::Bool32 bufferDeviceAddressCaptureReplay_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
bufferDeviceAddressCaptureReplay = bufferDeviceAddressCaptureReplay_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceBufferDeviceAddressFeatures &
|
|
setBufferDeviceAddressMultiDevice( VULKAN_HPP_NAMESPACE::Bool32 bufferDeviceAddressMultiDevice_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
bufferDeviceAddressMultiDevice = bufferDeviceAddressMultiDevice_;
|
|
return *this;
|
|
}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
|
|
|
|
operator VkPhysicalDeviceBufferDeviceAddressFeatures const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkPhysicalDeviceBufferDeviceAddressFeatures *>( this );
|
|
}
|
|
|
|
operator VkPhysicalDeviceBufferDeviceAddressFeatures &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkPhysicalDeviceBufferDeviceAddressFeatures *>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
# if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
# else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &,
|
|
void * const &,
|
|
VULKAN_HPP_NAMESPACE::Bool32 const &,
|
|
VULKAN_HPP_NAMESPACE::Bool32 const &,
|
|
VULKAN_HPP_NAMESPACE::Bool32 const &>
|
|
# endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( sType, pNext, bufferDeviceAddress, bufferDeviceAddressCaptureReplay, bufferDeviceAddressMultiDevice );
|
|
}
|
|
#endif
|
|
|
|
#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
|
|
auto operator<=>( PhysicalDeviceBufferDeviceAddressFeatures const & ) const = default;
|
|
#else
|
|
bool operator==( PhysicalDeviceBufferDeviceAddressFeatures const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
# if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
# else
|
|
return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( bufferDeviceAddress == rhs.bufferDeviceAddress ) &&
|
|
( bufferDeviceAddressCaptureReplay == rhs.bufferDeviceAddressCaptureReplay ) &&
|
|
( bufferDeviceAddressMultiDevice == rhs.bufferDeviceAddressMultiDevice );
|
|
# endif
|
|
}
|
|
|
|
bool operator!=( PhysicalDeviceBufferDeviceAddressFeatures const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceBufferDeviceAddressFeatures;
|
|
void * pNext = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 bufferDeviceAddress = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 bufferDeviceAddressCaptureReplay = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 bufferDeviceAddressMultiDevice = {};
|
|
};
|
|
|
|
template <>
|
|
struct CppType<StructureType, StructureType::ePhysicalDeviceBufferDeviceAddressFeatures>
|
|
{
|
|
using Type = PhysicalDeviceBufferDeviceAddressFeatures;
|
|
};
|
|
|
|
using PhysicalDeviceBufferDeviceAddressFeaturesKHR = PhysicalDeviceBufferDeviceAddressFeatures;
|
|
|
|
struct PhysicalDeviceColorWriteEnableFeaturesEXT
|
|
{
|
|
using NativeType = VkPhysicalDeviceColorWriteEnableFeaturesEXT;
|
|
|
|
static const bool allowDuplicate = false;
|
|
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceColorWriteEnableFeaturesEXT;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR PhysicalDeviceColorWriteEnableFeaturesEXT( VULKAN_HPP_NAMESPACE::Bool32 colorWriteEnable_ = {},
|
|
void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
|
|
: pNext( pNext_ )
|
|
, colorWriteEnable( colorWriteEnable_ )
|
|
{
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR PhysicalDeviceColorWriteEnableFeaturesEXT( PhysicalDeviceColorWriteEnableFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
PhysicalDeviceColorWriteEnableFeaturesEXT( VkPhysicalDeviceColorWriteEnableFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: PhysicalDeviceColorWriteEnableFeaturesEXT( *reinterpret_cast<PhysicalDeviceColorWriteEnableFeaturesEXT const *>( &rhs ) )
|
|
{
|
|
}
|
|
|
|
PhysicalDeviceColorWriteEnableFeaturesEXT & operator=( PhysicalDeviceColorWriteEnableFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
PhysicalDeviceColorWriteEnableFeaturesEXT & operator=( VkPhysicalDeviceColorWriteEnableFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceColorWriteEnableFeaturesEXT const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
|
|
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceColorWriteEnableFeaturesEXT & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pNext = pNext_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceColorWriteEnableFeaturesEXT &
|
|
setColorWriteEnable( VULKAN_HPP_NAMESPACE::Bool32 colorWriteEnable_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
colorWriteEnable = colorWriteEnable_;
|
|
return *this;
|
|
}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
|
|
|
|
operator VkPhysicalDeviceColorWriteEnableFeaturesEXT const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkPhysicalDeviceColorWriteEnableFeaturesEXT *>( this );
|
|
}
|
|
|
|
operator VkPhysicalDeviceColorWriteEnableFeaturesEXT &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkPhysicalDeviceColorWriteEnableFeaturesEXT *>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
# if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
# else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::Bool32 const &>
|
|
# endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( sType, pNext, colorWriteEnable );
|
|
}
|
|
#endif
|
|
|
|
#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
|
|
auto operator<=>( PhysicalDeviceColorWriteEnableFeaturesEXT const & ) const = default;
|
|
#else
|
|
bool operator==( PhysicalDeviceColorWriteEnableFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
# if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
# else
|
|
return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( colorWriteEnable == rhs.colorWriteEnable );
|
|
# endif
|
|
}
|
|
|
|
bool operator!=( PhysicalDeviceColorWriteEnableFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceColorWriteEnableFeaturesEXT;
|
|
void * pNext = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 colorWriteEnable = {};
|
|
};
|
|
|
|
template <>
|
|
struct CppType<StructureType, StructureType::ePhysicalDeviceColorWriteEnableFeaturesEXT>
|
|
{
|
|
using Type = PhysicalDeviceColorWriteEnableFeaturesEXT;
|
|
};
|
|
|
|
struct PhysicalDeviceConservativeRasterizationPropertiesEXT
|
|
{
|
|
using NativeType = VkPhysicalDeviceConservativeRasterizationPropertiesEXT;
|
|
|
|
static const bool allowDuplicate = false;
|
|
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceConservativeRasterizationPropertiesEXT;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR PhysicalDeviceConservativeRasterizationPropertiesEXT( float primitiveOverestimationSize_ = {},
|
|
float maxExtraPrimitiveOverestimationSize_ = {},
|
|
float extraPrimitiveOverestimationSizeGranularity_ = {},
|
|
VULKAN_HPP_NAMESPACE::Bool32 primitiveUnderestimation_ = {},
|
|
VULKAN_HPP_NAMESPACE::Bool32 conservativePointAndLineRasterization_ = {},
|
|
VULKAN_HPP_NAMESPACE::Bool32 degenerateTrianglesRasterized_ = {},
|
|
VULKAN_HPP_NAMESPACE::Bool32 degenerateLinesRasterized_ = {},
|
|
VULKAN_HPP_NAMESPACE::Bool32 fullyCoveredFragmentShaderInputVariable_ = {},
|
|
VULKAN_HPP_NAMESPACE::Bool32 conservativeRasterizationPostDepthCoverage_ = {},
|
|
void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
|
|
: pNext( pNext_ )
|
|
, primitiveOverestimationSize( primitiveOverestimationSize_ )
|
|
, maxExtraPrimitiveOverestimationSize( maxExtraPrimitiveOverestimationSize_ )
|
|
, extraPrimitiveOverestimationSizeGranularity( extraPrimitiveOverestimationSizeGranularity_ )
|
|
, primitiveUnderestimation( primitiveUnderestimation_ )
|
|
, conservativePointAndLineRasterization( conservativePointAndLineRasterization_ )
|
|
, degenerateTrianglesRasterized( degenerateTrianglesRasterized_ )
|
|
, degenerateLinesRasterized( degenerateLinesRasterized_ )
|
|
, fullyCoveredFragmentShaderInputVariable( fullyCoveredFragmentShaderInputVariable_ )
|
|
, conservativeRasterizationPostDepthCoverage( conservativeRasterizationPostDepthCoverage_ )
|
|
{
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR
|
|
PhysicalDeviceConservativeRasterizationPropertiesEXT( PhysicalDeviceConservativeRasterizationPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
PhysicalDeviceConservativeRasterizationPropertiesEXT( VkPhysicalDeviceConservativeRasterizationPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: PhysicalDeviceConservativeRasterizationPropertiesEXT( *reinterpret_cast<PhysicalDeviceConservativeRasterizationPropertiesEXT const *>( &rhs ) )
|
|
{
|
|
}
|
|
|
|
PhysicalDeviceConservativeRasterizationPropertiesEXT &
|
|
operator=( PhysicalDeviceConservativeRasterizationPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
PhysicalDeviceConservativeRasterizationPropertiesEXT & operator=( VkPhysicalDeviceConservativeRasterizationPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceConservativeRasterizationPropertiesEXT const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
operator VkPhysicalDeviceConservativeRasterizationPropertiesEXT const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkPhysicalDeviceConservativeRasterizationPropertiesEXT *>( this );
|
|
}
|
|
|
|
operator VkPhysicalDeviceConservativeRasterizationPropertiesEXT &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkPhysicalDeviceConservativeRasterizationPropertiesEXT *>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
# if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
# else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &,
|
|
void * const &,
|
|
float const &,
|
|
float const &,
|
|
float const &,
|
|
VULKAN_HPP_NAMESPACE::Bool32 const &,
|
|
VULKAN_HPP_NAMESPACE::Bool32 const &,
|
|
VULKAN_HPP_NAMESPACE::Bool32 const &,
|
|
VULKAN_HPP_NAMESPACE::Bool32 const &,
|
|
VULKAN_HPP_NAMESPACE::Bool32 const &,
|
|
VULKAN_HPP_NAMESPACE::Bool32 const &>
|
|
# endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( sType,
|
|
pNext,
|
|
primitiveOverestimationSize,
|
|
maxExtraPrimitiveOverestimationSize,
|
|
extraPrimitiveOverestimationSizeGranularity,
|
|
primitiveUnderestimation,
|
|
conservativePointAndLineRasterization,
|
|
degenerateTrianglesRasterized,
|
|
degenerateLinesRasterized,
|
|
fullyCoveredFragmentShaderInputVariable,
|
|
conservativeRasterizationPostDepthCoverage );
|
|
}
|
|
#endif
|
|
|
|
#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
|
|
auto operator<=>( PhysicalDeviceConservativeRasterizationPropertiesEXT const & ) const = default;
|
|
#else
|
|
bool operator==( PhysicalDeviceConservativeRasterizationPropertiesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
# if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
# else
|
|
return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( primitiveOverestimationSize == rhs.primitiveOverestimationSize ) &&
|
|
( maxExtraPrimitiveOverestimationSize == rhs.maxExtraPrimitiveOverestimationSize ) &&
|
|
( extraPrimitiveOverestimationSizeGranularity == rhs.extraPrimitiveOverestimationSizeGranularity ) &&
|
|
( primitiveUnderestimation == rhs.primitiveUnderestimation ) &&
|
|
( conservativePointAndLineRasterization == rhs.conservativePointAndLineRasterization ) &&
|
|
( degenerateTrianglesRasterized == rhs.degenerateTrianglesRasterized ) && ( degenerateLinesRasterized == rhs.degenerateLinesRasterized ) &&
|
|
( fullyCoveredFragmentShaderInputVariable == rhs.fullyCoveredFragmentShaderInputVariable ) &&
|
|
( conservativeRasterizationPostDepthCoverage == rhs.conservativeRasterizationPostDepthCoverage );
|
|
# endif
|
|
}
|
|
|
|
bool operator!=( PhysicalDeviceConservativeRasterizationPropertiesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceConservativeRasterizationPropertiesEXT;
|
|
void * pNext = {};
|
|
float primitiveOverestimationSize = {};
|
|
float maxExtraPrimitiveOverestimationSize = {};
|
|
float extraPrimitiveOverestimationSizeGranularity = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 primitiveUnderestimation = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 conservativePointAndLineRasterization = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 degenerateTrianglesRasterized = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 degenerateLinesRasterized = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 fullyCoveredFragmentShaderInputVariable = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 conservativeRasterizationPostDepthCoverage = {};
|
|
};
|
|
|
|
template <>
|
|
struct CppType<StructureType, StructureType::ePhysicalDeviceConservativeRasterizationPropertiesEXT>
|
|
{
|
|
using Type = PhysicalDeviceConservativeRasterizationPropertiesEXT;
|
|
};
|
|
|
|
struct PhysicalDeviceCustomBorderColorFeaturesEXT
|
|
{
|
|
using NativeType = VkPhysicalDeviceCustomBorderColorFeaturesEXT;
|
|
|
|
static const bool allowDuplicate = false;
|
|
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceCustomBorderColorFeaturesEXT;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR PhysicalDeviceCustomBorderColorFeaturesEXT( VULKAN_HPP_NAMESPACE::Bool32 customBorderColors_ = {},
|
|
VULKAN_HPP_NAMESPACE::Bool32 customBorderColorWithoutFormat_ = {},
|
|
void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
|
|
: pNext( pNext_ )
|
|
, customBorderColors( customBorderColors_ )
|
|
, customBorderColorWithoutFormat( customBorderColorWithoutFormat_ )
|
|
{
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR PhysicalDeviceCustomBorderColorFeaturesEXT( PhysicalDeviceCustomBorderColorFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
PhysicalDeviceCustomBorderColorFeaturesEXT( VkPhysicalDeviceCustomBorderColorFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: PhysicalDeviceCustomBorderColorFeaturesEXT( *reinterpret_cast<PhysicalDeviceCustomBorderColorFeaturesEXT const *>( &rhs ) )
|
|
{
|
|
}
|
|
|
|
PhysicalDeviceCustomBorderColorFeaturesEXT & operator=( PhysicalDeviceCustomBorderColorFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
PhysicalDeviceCustomBorderColorFeaturesEXT & operator=( VkPhysicalDeviceCustomBorderColorFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceCustomBorderColorFeaturesEXT const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
|
|
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceCustomBorderColorFeaturesEXT & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pNext = pNext_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceCustomBorderColorFeaturesEXT &
|
|
setCustomBorderColors( VULKAN_HPP_NAMESPACE::Bool32 customBorderColors_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
customBorderColors = customBorderColors_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceCustomBorderColorFeaturesEXT &
|
|
setCustomBorderColorWithoutFormat( VULKAN_HPP_NAMESPACE::Bool32 customBorderColorWithoutFormat_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
customBorderColorWithoutFormat = customBorderColorWithoutFormat_;
|
|
return *this;
|
|
}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
|
|
|
|
operator VkPhysicalDeviceCustomBorderColorFeaturesEXT const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkPhysicalDeviceCustomBorderColorFeaturesEXT *>( this );
|
|
}
|
|
|
|
operator VkPhysicalDeviceCustomBorderColorFeaturesEXT &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkPhysicalDeviceCustomBorderColorFeaturesEXT *>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
# if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
# else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &>
|
|
# endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( sType, pNext, customBorderColors, customBorderColorWithoutFormat );
|
|
}
|
|
#endif
|
|
|
|
#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
|
|
auto operator<=>( PhysicalDeviceCustomBorderColorFeaturesEXT const & ) const = default;
|
|
#else
|
|
bool operator==( PhysicalDeviceCustomBorderColorFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
# if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
# else
|
|
return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( customBorderColors == rhs.customBorderColors ) &&
|
|
( customBorderColorWithoutFormat == rhs.customBorderColorWithoutFormat );
|
|
# endif
|
|
}
|
|
|
|
bool operator!=( PhysicalDeviceCustomBorderColorFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceCustomBorderColorFeaturesEXT;
|
|
void * pNext = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 customBorderColors = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 customBorderColorWithoutFormat = {};
|
|
};
|
|
|
|
template <>
|
|
struct CppType<StructureType, StructureType::ePhysicalDeviceCustomBorderColorFeaturesEXT>
|
|
{
|
|
using Type = PhysicalDeviceCustomBorderColorFeaturesEXT;
|
|
};
|
|
|
|
struct PhysicalDeviceCustomBorderColorPropertiesEXT
|
|
{
|
|
using NativeType = VkPhysicalDeviceCustomBorderColorPropertiesEXT;
|
|
|
|
static const bool allowDuplicate = false;
|
|
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceCustomBorderColorPropertiesEXT;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR PhysicalDeviceCustomBorderColorPropertiesEXT( uint32_t maxCustomBorderColorSamplers_ = {},
|
|
void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
|
|
: pNext( pNext_ )
|
|
, maxCustomBorderColorSamplers( maxCustomBorderColorSamplers_ )
|
|
{
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR PhysicalDeviceCustomBorderColorPropertiesEXT( PhysicalDeviceCustomBorderColorPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
PhysicalDeviceCustomBorderColorPropertiesEXT( VkPhysicalDeviceCustomBorderColorPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: PhysicalDeviceCustomBorderColorPropertiesEXT( *reinterpret_cast<PhysicalDeviceCustomBorderColorPropertiesEXT const *>( &rhs ) )
|
|
{
|
|
}
|
|
|
|
PhysicalDeviceCustomBorderColorPropertiesEXT & operator=( PhysicalDeviceCustomBorderColorPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
PhysicalDeviceCustomBorderColorPropertiesEXT & operator=( VkPhysicalDeviceCustomBorderColorPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceCustomBorderColorPropertiesEXT const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
operator VkPhysicalDeviceCustomBorderColorPropertiesEXT const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkPhysicalDeviceCustomBorderColorPropertiesEXT *>( this );
|
|
}
|
|
|
|
operator VkPhysicalDeviceCustomBorderColorPropertiesEXT &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkPhysicalDeviceCustomBorderColorPropertiesEXT *>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
# if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
# else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, uint32_t const &>
|
|
# endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( sType, pNext, maxCustomBorderColorSamplers );
|
|
}
|
|
#endif
|
|
|
|
#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
|
|
auto operator<=>( PhysicalDeviceCustomBorderColorPropertiesEXT const & ) const = default;
|
|
#else
|
|
bool operator==( PhysicalDeviceCustomBorderColorPropertiesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
# if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
# else
|
|
return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( maxCustomBorderColorSamplers == rhs.maxCustomBorderColorSamplers );
|
|
# endif
|
|
}
|
|
|
|
bool operator!=( PhysicalDeviceCustomBorderColorPropertiesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceCustomBorderColorPropertiesEXT;
|
|
void * pNext = {};
|
|
uint32_t maxCustomBorderColorSamplers = {};
|
|
};
|
|
|
|
template <>
|
|
struct CppType<StructureType, StructureType::ePhysicalDeviceCustomBorderColorPropertiesEXT>
|
|
{
|
|
using Type = PhysicalDeviceCustomBorderColorPropertiesEXT;
|
|
};
|
|
|
|
struct PhysicalDeviceDepthClipEnableFeaturesEXT
|
|
{
|
|
using NativeType = VkPhysicalDeviceDepthClipEnableFeaturesEXT;
|
|
|
|
static const bool allowDuplicate = false;
|
|
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceDepthClipEnableFeaturesEXT;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR PhysicalDeviceDepthClipEnableFeaturesEXT( VULKAN_HPP_NAMESPACE::Bool32 depthClipEnable_ = {},
|
|
void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
|
|
: pNext( pNext_ )
|
|
, depthClipEnable( depthClipEnable_ )
|
|
{
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR PhysicalDeviceDepthClipEnableFeaturesEXT( PhysicalDeviceDepthClipEnableFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
PhysicalDeviceDepthClipEnableFeaturesEXT( VkPhysicalDeviceDepthClipEnableFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: PhysicalDeviceDepthClipEnableFeaturesEXT( *reinterpret_cast<PhysicalDeviceDepthClipEnableFeaturesEXT const *>( &rhs ) )
|
|
{
|
|
}
|
|
|
|
PhysicalDeviceDepthClipEnableFeaturesEXT & operator=( PhysicalDeviceDepthClipEnableFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
PhysicalDeviceDepthClipEnableFeaturesEXT & operator=( VkPhysicalDeviceDepthClipEnableFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceDepthClipEnableFeaturesEXT const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
|
|
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceDepthClipEnableFeaturesEXT & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pNext = pNext_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceDepthClipEnableFeaturesEXT & setDepthClipEnable( VULKAN_HPP_NAMESPACE::Bool32 depthClipEnable_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
depthClipEnable = depthClipEnable_;
|
|
return *this;
|
|
}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
|
|
|
|
operator VkPhysicalDeviceDepthClipEnableFeaturesEXT const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkPhysicalDeviceDepthClipEnableFeaturesEXT *>( this );
|
|
}
|
|
|
|
operator VkPhysicalDeviceDepthClipEnableFeaturesEXT &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkPhysicalDeviceDepthClipEnableFeaturesEXT *>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
# if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
# else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::Bool32 const &>
|
|
# endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( sType, pNext, depthClipEnable );
|
|
}
|
|
#endif
|
|
|
|
#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
|
|
auto operator<=>( PhysicalDeviceDepthClipEnableFeaturesEXT const & ) const = default;
|
|
#else
|
|
bool operator==( PhysicalDeviceDepthClipEnableFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
# if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
# else
|
|
return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( depthClipEnable == rhs.depthClipEnable );
|
|
# endif
|
|
}
|
|
|
|
bool operator!=( PhysicalDeviceDepthClipEnableFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceDepthClipEnableFeaturesEXT;
|
|
void * pNext = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 depthClipEnable = {};
|
|
};
|
|
|
|
template <>
|
|
struct CppType<StructureType, StructureType::ePhysicalDeviceDepthClipEnableFeaturesEXT>
|
|
{
|
|
using Type = PhysicalDeviceDepthClipEnableFeaturesEXT;
|
|
};
|
|
|
|
struct PhysicalDeviceDepthStencilResolveProperties
|
|
{
|
|
using NativeType = VkPhysicalDeviceDepthStencilResolveProperties;
|
|
|
|
static const bool allowDuplicate = false;
|
|
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceDepthStencilResolveProperties;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR PhysicalDeviceDepthStencilResolveProperties( VULKAN_HPP_NAMESPACE::ResolveModeFlags supportedDepthResolveModes_ = {},
|
|
VULKAN_HPP_NAMESPACE::ResolveModeFlags supportedStencilResolveModes_ = {},
|
|
VULKAN_HPP_NAMESPACE::Bool32 independentResolveNone_ = {},
|
|
VULKAN_HPP_NAMESPACE::Bool32 independentResolve_ = {},
|
|
void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
|
|
: pNext( pNext_ )
|
|
, supportedDepthResolveModes( supportedDepthResolveModes_ )
|
|
, supportedStencilResolveModes( supportedStencilResolveModes_ )
|
|
, independentResolveNone( independentResolveNone_ )
|
|
, independentResolve( independentResolve_ )
|
|
{
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR PhysicalDeviceDepthStencilResolveProperties( PhysicalDeviceDepthStencilResolveProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
PhysicalDeviceDepthStencilResolveProperties( VkPhysicalDeviceDepthStencilResolveProperties const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: PhysicalDeviceDepthStencilResolveProperties( *reinterpret_cast<PhysicalDeviceDepthStencilResolveProperties const *>( &rhs ) )
|
|
{
|
|
}
|
|
|
|
PhysicalDeviceDepthStencilResolveProperties & operator=( PhysicalDeviceDepthStencilResolveProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
PhysicalDeviceDepthStencilResolveProperties & operator=( VkPhysicalDeviceDepthStencilResolveProperties const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceDepthStencilResolveProperties const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
operator VkPhysicalDeviceDepthStencilResolveProperties const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkPhysicalDeviceDepthStencilResolveProperties *>( this );
|
|
}
|
|
|
|
operator VkPhysicalDeviceDepthStencilResolveProperties &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkPhysicalDeviceDepthStencilResolveProperties *>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
# if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
# else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &,
|
|
void * const &,
|
|
VULKAN_HPP_NAMESPACE::ResolveModeFlags const &,
|
|
VULKAN_HPP_NAMESPACE::ResolveModeFlags const &,
|
|
VULKAN_HPP_NAMESPACE::Bool32 const &,
|
|
VULKAN_HPP_NAMESPACE::Bool32 const &>
|
|
# endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( sType, pNext, supportedDepthResolveModes, supportedStencilResolveModes, independentResolveNone, independentResolve );
|
|
}
|
|
#endif
|
|
|
|
#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
|
|
auto operator<=>( PhysicalDeviceDepthStencilResolveProperties const & ) const = default;
|
|
#else
|
|
bool operator==( PhysicalDeviceDepthStencilResolveProperties const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
# if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
# else
|
|
return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( supportedDepthResolveModes == rhs.supportedDepthResolveModes ) &&
|
|
( supportedStencilResolveModes == rhs.supportedStencilResolveModes ) && ( independentResolveNone == rhs.independentResolveNone ) &&
|
|
( independentResolve == rhs.independentResolve );
|
|
# endif
|
|
}
|
|
|
|
bool operator!=( PhysicalDeviceDepthStencilResolveProperties const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceDepthStencilResolveProperties;
|
|
void * pNext = {};
|
|
VULKAN_HPP_NAMESPACE::ResolveModeFlags supportedDepthResolveModes = {};
|
|
VULKAN_HPP_NAMESPACE::ResolveModeFlags supportedStencilResolveModes = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 independentResolveNone = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 independentResolve = {};
|
|
};
|
|
|
|
template <>
|
|
struct CppType<StructureType, StructureType::ePhysicalDeviceDepthStencilResolveProperties>
|
|
{
|
|
using Type = PhysicalDeviceDepthStencilResolveProperties;
|
|
};
|
|
|
|
using PhysicalDeviceDepthStencilResolvePropertiesKHR = PhysicalDeviceDepthStencilResolveProperties;
|
|
|
|
struct PhysicalDeviceDescriptorIndexingFeatures
|
|
{
|
|
using NativeType = VkPhysicalDeviceDescriptorIndexingFeatures;
|
|
|
|
static const bool allowDuplicate = false;
|
|
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceDescriptorIndexingFeatures;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR PhysicalDeviceDescriptorIndexingFeatures( VULKAN_HPP_NAMESPACE::Bool32 shaderInputAttachmentArrayDynamicIndexing_ = {},
|
|
VULKAN_HPP_NAMESPACE::Bool32 shaderUniformTexelBufferArrayDynamicIndexing_ = {},
|
|
VULKAN_HPP_NAMESPACE::Bool32 shaderStorageTexelBufferArrayDynamicIndexing_ = {},
|
|
VULKAN_HPP_NAMESPACE::Bool32 shaderUniformBufferArrayNonUniformIndexing_ = {},
|
|
VULKAN_HPP_NAMESPACE::Bool32 shaderSampledImageArrayNonUniformIndexing_ = {},
|
|
VULKAN_HPP_NAMESPACE::Bool32 shaderStorageBufferArrayNonUniformIndexing_ = {},
|
|
VULKAN_HPP_NAMESPACE::Bool32 shaderStorageImageArrayNonUniformIndexing_ = {},
|
|
VULKAN_HPP_NAMESPACE::Bool32 shaderInputAttachmentArrayNonUniformIndexing_ = {},
|
|
VULKAN_HPP_NAMESPACE::Bool32 shaderUniformTexelBufferArrayNonUniformIndexing_ = {},
|
|
VULKAN_HPP_NAMESPACE::Bool32 shaderStorageTexelBufferArrayNonUniformIndexing_ = {},
|
|
VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingUniformBufferUpdateAfterBind_ = {},
|
|
VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingSampledImageUpdateAfterBind_ = {},
|
|
VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingStorageImageUpdateAfterBind_ = {},
|
|
VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingStorageBufferUpdateAfterBind_ = {},
|
|
VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingUniformTexelBufferUpdateAfterBind_ = {},
|
|
VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingStorageTexelBufferUpdateAfterBind_ = {},
|
|
VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingUpdateUnusedWhilePending_ = {},
|
|
VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingPartiallyBound_ = {},
|
|
VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingVariableDescriptorCount_ = {},
|
|
VULKAN_HPP_NAMESPACE::Bool32 runtimeDescriptorArray_ = {},
|
|
void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
|
|
: pNext( pNext_ )
|
|
, shaderInputAttachmentArrayDynamicIndexing( shaderInputAttachmentArrayDynamicIndexing_ )
|
|
, shaderUniformTexelBufferArrayDynamicIndexing( shaderUniformTexelBufferArrayDynamicIndexing_ )
|
|
, shaderStorageTexelBufferArrayDynamicIndexing( shaderStorageTexelBufferArrayDynamicIndexing_ )
|
|
, shaderUniformBufferArrayNonUniformIndexing( shaderUniformBufferArrayNonUniformIndexing_ )
|
|
, shaderSampledImageArrayNonUniformIndexing( shaderSampledImageArrayNonUniformIndexing_ )
|
|
, shaderStorageBufferArrayNonUniformIndexing( shaderStorageBufferArrayNonUniformIndexing_ )
|
|
, shaderStorageImageArrayNonUniformIndexing( shaderStorageImageArrayNonUniformIndexing_ )
|
|
, shaderInputAttachmentArrayNonUniformIndexing( shaderInputAttachmentArrayNonUniformIndexing_ )
|
|
, shaderUniformTexelBufferArrayNonUniformIndexing( shaderUniformTexelBufferArrayNonUniformIndexing_ )
|
|
, shaderStorageTexelBufferArrayNonUniformIndexing( shaderStorageTexelBufferArrayNonUniformIndexing_ )
|
|
, descriptorBindingUniformBufferUpdateAfterBind( descriptorBindingUniformBufferUpdateAfterBind_ )
|
|
, descriptorBindingSampledImageUpdateAfterBind( descriptorBindingSampledImageUpdateAfterBind_ )
|
|
, descriptorBindingStorageImageUpdateAfterBind( descriptorBindingStorageImageUpdateAfterBind_ )
|
|
, descriptorBindingStorageBufferUpdateAfterBind( descriptorBindingStorageBufferUpdateAfterBind_ )
|
|
, descriptorBindingUniformTexelBufferUpdateAfterBind( descriptorBindingUniformTexelBufferUpdateAfterBind_ )
|
|
, descriptorBindingStorageTexelBufferUpdateAfterBind( descriptorBindingStorageTexelBufferUpdateAfterBind_ )
|
|
, descriptorBindingUpdateUnusedWhilePending( descriptorBindingUpdateUnusedWhilePending_ )
|
|
, descriptorBindingPartiallyBound( descriptorBindingPartiallyBound_ )
|
|
, descriptorBindingVariableDescriptorCount( descriptorBindingVariableDescriptorCount_ )
|
|
, runtimeDescriptorArray( runtimeDescriptorArray_ )
|
|
{
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR PhysicalDeviceDescriptorIndexingFeatures( PhysicalDeviceDescriptorIndexingFeatures const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
PhysicalDeviceDescriptorIndexingFeatures( VkPhysicalDeviceDescriptorIndexingFeatures const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: PhysicalDeviceDescriptorIndexingFeatures( *reinterpret_cast<PhysicalDeviceDescriptorIndexingFeatures const *>( &rhs ) )
|
|
{
|
|
}
|
|
|
|
PhysicalDeviceDescriptorIndexingFeatures & operator=( PhysicalDeviceDescriptorIndexingFeatures const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
PhysicalDeviceDescriptorIndexingFeatures & operator=( VkPhysicalDeviceDescriptorIndexingFeatures const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceDescriptorIndexingFeatures const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
|
|
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceDescriptorIndexingFeatures & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pNext = pNext_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceDescriptorIndexingFeatures &
|
|
setShaderInputAttachmentArrayDynamicIndexing( VULKAN_HPP_NAMESPACE::Bool32 shaderInputAttachmentArrayDynamicIndexing_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
shaderInputAttachmentArrayDynamicIndexing = shaderInputAttachmentArrayDynamicIndexing_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceDescriptorIndexingFeatures &
|
|
setShaderUniformTexelBufferArrayDynamicIndexing( VULKAN_HPP_NAMESPACE::Bool32 shaderUniformTexelBufferArrayDynamicIndexing_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
shaderUniformTexelBufferArrayDynamicIndexing = shaderUniformTexelBufferArrayDynamicIndexing_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceDescriptorIndexingFeatures &
|
|
setShaderStorageTexelBufferArrayDynamicIndexing( VULKAN_HPP_NAMESPACE::Bool32 shaderStorageTexelBufferArrayDynamicIndexing_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
shaderStorageTexelBufferArrayDynamicIndexing = shaderStorageTexelBufferArrayDynamicIndexing_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceDescriptorIndexingFeatures &
|
|
setShaderUniformBufferArrayNonUniformIndexing( VULKAN_HPP_NAMESPACE::Bool32 shaderUniformBufferArrayNonUniformIndexing_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
shaderUniformBufferArrayNonUniformIndexing = shaderUniformBufferArrayNonUniformIndexing_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceDescriptorIndexingFeatures &
|
|
setShaderSampledImageArrayNonUniformIndexing( VULKAN_HPP_NAMESPACE::Bool32 shaderSampledImageArrayNonUniformIndexing_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
shaderSampledImageArrayNonUniformIndexing = shaderSampledImageArrayNonUniformIndexing_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceDescriptorIndexingFeatures &
|
|
setShaderStorageBufferArrayNonUniformIndexing( VULKAN_HPP_NAMESPACE::Bool32 shaderStorageBufferArrayNonUniformIndexing_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
shaderStorageBufferArrayNonUniformIndexing = shaderStorageBufferArrayNonUniformIndexing_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceDescriptorIndexingFeatures &
|
|
setShaderStorageImageArrayNonUniformIndexing( VULKAN_HPP_NAMESPACE::Bool32 shaderStorageImageArrayNonUniformIndexing_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
shaderStorageImageArrayNonUniformIndexing = shaderStorageImageArrayNonUniformIndexing_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceDescriptorIndexingFeatures &
|
|
setShaderInputAttachmentArrayNonUniformIndexing( VULKAN_HPP_NAMESPACE::Bool32 shaderInputAttachmentArrayNonUniformIndexing_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
shaderInputAttachmentArrayNonUniformIndexing = shaderInputAttachmentArrayNonUniformIndexing_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceDescriptorIndexingFeatures &
|
|
setShaderUniformTexelBufferArrayNonUniformIndexing( VULKAN_HPP_NAMESPACE::Bool32 shaderUniformTexelBufferArrayNonUniformIndexing_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
shaderUniformTexelBufferArrayNonUniformIndexing = shaderUniformTexelBufferArrayNonUniformIndexing_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceDescriptorIndexingFeatures &
|
|
setShaderStorageTexelBufferArrayNonUniformIndexing( VULKAN_HPP_NAMESPACE::Bool32 shaderStorageTexelBufferArrayNonUniformIndexing_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
shaderStorageTexelBufferArrayNonUniformIndexing = shaderStorageTexelBufferArrayNonUniformIndexing_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceDescriptorIndexingFeatures &
|
|
setDescriptorBindingUniformBufferUpdateAfterBind( VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingUniformBufferUpdateAfterBind_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
descriptorBindingUniformBufferUpdateAfterBind = descriptorBindingUniformBufferUpdateAfterBind_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceDescriptorIndexingFeatures &
|
|
setDescriptorBindingSampledImageUpdateAfterBind( VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingSampledImageUpdateAfterBind_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
descriptorBindingSampledImageUpdateAfterBind = descriptorBindingSampledImageUpdateAfterBind_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceDescriptorIndexingFeatures &
|
|
setDescriptorBindingStorageImageUpdateAfterBind( VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingStorageImageUpdateAfterBind_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
descriptorBindingStorageImageUpdateAfterBind = descriptorBindingStorageImageUpdateAfterBind_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceDescriptorIndexingFeatures &
|
|
setDescriptorBindingStorageBufferUpdateAfterBind( VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingStorageBufferUpdateAfterBind_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
descriptorBindingStorageBufferUpdateAfterBind = descriptorBindingStorageBufferUpdateAfterBind_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceDescriptorIndexingFeatures & setDescriptorBindingUniformTexelBufferUpdateAfterBind(
|
|
VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingUniformTexelBufferUpdateAfterBind_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
descriptorBindingUniformTexelBufferUpdateAfterBind = descriptorBindingUniformTexelBufferUpdateAfterBind_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceDescriptorIndexingFeatures & setDescriptorBindingStorageTexelBufferUpdateAfterBind(
|
|
VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingStorageTexelBufferUpdateAfterBind_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
descriptorBindingStorageTexelBufferUpdateAfterBind = descriptorBindingStorageTexelBufferUpdateAfterBind_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceDescriptorIndexingFeatures &
|
|
setDescriptorBindingUpdateUnusedWhilePending( VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingUpdateUnusedWhilePending_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
descriptorBindingUpdateUnusedWhilePending = descriptorBindingUpdateUnusedWhilePending_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceDescriptorIndexingFeatures &
|
|
setDescriptorBindingPartiallyBound( VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingPartiallyBound_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
descriptorBindingPartiallyBound = descriptorBindingPartiallyBound_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceDescriptorIndexingFeatures &
|
|
setDescriptorBindingVariableDescriptorCount( VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingVariableDescriptorCount_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
descriptorBindingVariableDescriptorCount = descriptorBindingVariableDescriptorCount_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceDescriptorIndexingFeatures &
|
|
setRuntimeDescriptorArray( VULKAN_HPP_NAMESPACE::Bool32 runtimeDescriptorArray_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
runtimeDescriptorArray = runtimeDescriptorArray_;
|
|
return *this;
|
|
}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
|
|
|
|
operator VkPhysicalDeviceDescriptorIndexingFeatures const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkPhysicalDeviceDescriptorIndexingFeatures *>( this );
|
|
}
|
|
|
|
operator VkPhysicalDeviceDescriptorIndexingFeatures &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkPhysicalDeviceDescriptorIndexingFeatures *>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
# if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
# else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &,
|
|
void * const &,
|
|
VULKAN_HPP_NAMESPACE::Bool32 const &,
|
|
VULKAN_HPP_NAMESPACE::Bool32 const &,
|
|
VULKAN_HPP_NAMESPACE::Bool32 const &,
|
|
VULKAN_HPP_NAMESPACE::Bool32 const &,
|
|
VULKAN_HPP_NAMESPACE::Bool32 const &,
|
|
VULKAN_HPP_NAMESPACE::Bool32 const &,
|
|
VULKAN_HPP_NAMESPACE::Bool32 const &,
|
|
VULKAN_HPP_NAMESPACE::Bool32 const &,
|
|
VULKAN_HPP_NAMESPACE::Bool32 const &,
|
|
VULKAN_HPP_NAMESPACE::Bool32 const &,
|
|
VULKAN_HPP_NAMESPACE::Bool32 const &,
|
|
VULKAN_HPP_NAMESPACE::Bool32 const &,
|
|
VULKAN_HPP_NAMESPACE::Bool32 const &,
|
|
VULKAN_HPP_NAMESPACE::Bool32 const &,
|
|
VULKAN_HPP_NAMESPACE::Bool32 const &,
|
|
VULKAN_HPP_NAMESPACE::Bool32 const &,
|
|
VULKAN_HPP_NAMESPACE::Bool32 const &,
|
|
VULKAN_HPP_NAMESPACE::Bool32 const &,
|
|
VULKAN_HPP_NAMESPACE::Bool32 const &,
|
|
VULKAN_HPP_NAMESPACE::Bool32 const &>
|
|
# endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( sType,
|
|
pNext,
|
|
shaderInputAttachmentArrayDynamicIndexing,
|
|
shaderUniformTexelBufferArrayDynamicIndexing,
|
|
shaderStorageTexelBufferArrayDynamicIndexing,
|
|
shaderUniformBufferArrayNonUniformIndexing,
|
|
shaderSampledImageArrayNonUniformIndexing,
|
|
shaderStorageBufferArrayNonUniformIndexing,
|
|
shaderStorageImageArrayNonUniformIndexing,
|
|
shaderInputAttachmentArrayNonUniformIndexing,
|
|
shaderUniformTexelBufferArrayNonUniformIndexing,
|
|
shaderStorageTexelBufferArrayNonUniformIndexing,
|
|
descriptorBindingUniformBufferUpdateAfterBind,
|
|
descriptorBindingSampledImageUpdateAfterBind,
|
|
descriptorBindingStorageImageUpdateAfterBind,
|
|
descriptorBindingStorageBufferUpdateAfterBind,
|
|
descriptorBindingUniformTexelBufferUpdateAfterBind,
|
|
descriptorBindingStorageTexelBufferUpdateAfterBind,
|
|
descriptorBindingUpdateUnusedWhilePending,
|
|
descriptorBindingPartiallyBound,
|
|
descriptorBindingVariableDescriptorCount,
|
|
runtimeDescriptorArray );
|
|
}
|
|
#endif
|
|
|
|
#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
|
|
auto operator<=>( PhysicalDeviceDescriptorIndexingFeatures const & ) const = default;
|
|
#else
|
|
bool operator==( PhysicalDeviceDescriptorIndexingFeatures const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
# if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
# else
|
|
return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) &&
|
|
( shaderInputAttachmentArrayDynamicIndexing == rhs.shaderInputAttachmentArrayDynamicIndexing ) &&
|
|
( shaderUniformTexelBufferArrayDynamicIndexing == rhs.shaderUniformTexelBufferArrayDynamicIndexing ) &&
|
|
( shaderStorageTexelBufferArrayDynamicIndexing == rhs.shaderStorageTexelBufferArrayDynamicIndexing ) &&
|
|
( shaderUniformBufferArrayNonUniformIndexing == rhs.shaderUniformBufferArrayNonUniformIndexing ) &&
|
|
( shaderSampledImageArrayNonUniformIndexing == rhs.shaderSampledImageArrayNonUniformIndexing ) &&
|
|
( shaderStorageBufferArrayNonUniformIndexing == rhs.shaderStorageBufferArrayNonUniformIndexing ) &&
|
|
( shaderStorageImageArrayNonUniformIndexing == rhs.shaderStorageImageArrayNonUniformIndexing ) &&
|
|
( shaderInputAttachmentArrayNonUniformIndexing == rhs.shaderInputAttachmentArrayNonUniformIndexing ) &&
|
|
( shaderUniformTexelBufferArrayNonUniformIndexing == rhs.shaderUniformTexelBufferArrayNonUniformIndexing ) &&
|
|
( shaderStorageTexelBufferArrayNonUniformIndexing == rhs.shaderStorageTexelBufferArrayNonUniformIndexing ) &&
|
|
( descriptorBindingUniformBufferUpdateAfterBind == rhs.descriptorBindingUniformBufferUpdateAfterBind ) &&
|
|
( descriptorBindingSampledImageUpdateAfterBind == rhs.descriptorBindingSampledImageUpdateAfterBind ) &&
|
|
( descriptorBindingStorageImageUpdateAfterBind == rhs.descriptorBindingStorageImageUpdateAfterBind ) &&
|
|
( descriptorBindingStorageBufferUpdateAfterBind == rhs.descriptorBindingStorageBufferUpdateAfterBind ) &&
|
|
( descriptorBindingUniformTexelBufferUpdateAfterBind == rhs.descriptorBindingUniformTexelBufferUpdateAfterBind ) &&
|
|
( descriptorBindingStorageTexelBufferUpdateAfterBind == rhs.descriptorBindingStorageTexelBufferUpdateAfterBind ) &&
|
|
( descriptorBindingUpdateUnusedWhilePending == rhs.descriptorBindingUpdateUnusedWhilePending ) &&
|
|
( descriptorBindingPartiallyBound == rhs.descriptorBindingPartiallyBound ) &&
|
|
( descriptorBindingVariableDescriptorCount == rhs.descriptorBindingVariableDescriptorCount ) &&
|
|
( runtimeDescriptorArray == rhs.runtimeDescriptorArray );
|
|
# endif
|
|
}
|
|
|
|
bool operator!=( PhysicalDeviceDescriptorIndexingFeatures const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceDescriptorIndexingFeatures;
|
|
void * pNext = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 shaderInputAttachmentArrayDynamicIndexing = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 shaderUniformTexelBufferArrayDynamicIndexing = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 shaderStorageTexelBufferArrayDynamicIndexing = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 shaderUniformBufferArrayNonUniformIndexing = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 shaderSampledImageArrayNonUniformIndexing = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 shaderStorageBufferArrayNonUniformIndexing = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 shaderStorageImageArrayNonUniformIndexing = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 shaderInputAttachmentArrayNonUniformIndexing = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 shaderUniformTexelBufferArrayNonUniformIndexing = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 shaderStorageTexelBufferArrayNonUniformIndexing = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingUniformBufferUpdateAfterBind = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingSampledImageUpdateAfterBind = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingStorageImageUpdateAfterBind = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingStorageBufferUpdateAfterBind = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingUniformTexelBufferUpdateAfterBind = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingStorageTexelBufferUpdateAfterBind = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingUpdateUnusedWhilePending = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingPartiallyBound = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingVariableDescriptorCount = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 runtimeDescriptorArray = {};
|
|
};
|
|
|
|
template <>
|
|
struct CppType<StructureType, StructureType::ePhysicalDeviceDescriptorIndexingFeatures>
|
|
{
|
|
using Type = PhysicalDeviceDescriptorIndexingFeatures;
|
|
};
|
|
|
|
using PhysicalDeviceDescriptorIndexingFeaturesEXT = PhysicalDeviceDescriptorIndexingFeatures;
|
|
|
|
struct PhysicalDeviceDescriptorIndexingProperties
|
|
{
|
|
using NativeType = VkPhysicalDeviceDescriptorIndexingProperties;
|
|
|
|
static const bool allowDuplicate = false;
|
|
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceDescriptorIndexingProperties;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR PhysicalDeviceDescriptorIndexingProperties( uint32_t maxUpdateAfterBindDescriptorsInAllPools_ = {},
|
|
VULKAN_HPP_NAMESPACE::Bool32 shaderUniformBufferArrayNonUniformIndexingNative_ = {},
|
|
VULKAN_HPP_NAMESPACE::Bool32 shaderSampledImageArrayNonUniformIndexingNative_ = {},
|
|
VULKAN_HPP_NAMESPACE::Bool32 shaderStorageBufferArrayNonUniformIndexingNative_ = {},
|
|
VULKAN_HPP_NAMESPACE::Bool32 shaderStorageImageArrayNonUniformIndexingNative_ = {},
|
|
VULKAN_HPP_NAMESPACE::Bool32 shaderInputAttachmentArrayNonUniformIndexingNative_ = {},
|
|
VULKAN_HPP_NAMESPACE::Bool32 robustBufferAccessUpdateAfterBind_ = {},
|
|
VULKAN_HPP_NAMESPACE::Bool32 quadDivergentImplicitLod_ = {},
|
|
uint32_t maxPerStageDescriptorUpdateAfterBindSamplers_ = {},
|
|
uint32_t maxPerStageDescriptorUpdateAfterBindUniformBuffers_ = {},
|
|
uint32_t maxPerStageDescriptorUpdateAfterBindStorageBuffers_ = {},
|
|
uint32_t maxPerStageDescriptorUpdateAfterBindSampledImages_ = {},
|
|
uint32_t maxPerStageDescriptorUpdateAfterBindStorageImages_ = {},
|
|
uint32_t maxPerStageDescriptorUpdateAfterBindInputAttachments_ = {},
|
|
uint32_t maxPerStageUpdateAfterBindResources_ = {},
|
|
uint32_t maxDescriptorSetUpdateAfterBindSamplers_ = {},
|
|
uint32_t maxDescriptorSetUpdateAfterBindUniformBuffers_ = {},
|
|
uint32_t maxDescriptorSetUpdateAfterBindUniformBuffersDynamic_ = {},
|
|
uint32_t maxDescriptorSetUpdateAfterBindStorageBuffers_ = {},
|
|
uint32_t maxDescriptorSetUpdateAfterBindStorageBuffersDynamic_ = {},
|
|
uint32_t maxDescriptorSetUpdateAfterBindSampledImages_ = {},
|
|
uint32_t maxDescriptorSetUpdateAfterBindStorageImages_ = {},
|
|
uint32_t maxDescriptorSetUpdateAfterBindInputAttachments_ = {},
|
|
void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
|
|
: pNext( pNext_ )
|
|
, maxUpdateAfterBindDescriptorsInAllPools( maxUpdateAfterBindDescriptorsInAllPools_ )
|
|
, shaderUniformBufferArrayNonUniformIndexingNative( shaderUniformBufferArrayNonUniformIndexingNative_ )
|
|
, shaderSampledImageArrayNonUniformIndexingNative( shaderSampledImageArrayNonUniformIndexingNative_ )
|
|
, shaderStorageBufferArrayNonUniformIndexingNative( shaderStorageBufferArrayNonUniformIndexingNative_ )
|
|
, shaderStorageImageArrayNonUniformIndexingNative( shaderStorageImageArrayNonUniformIndexingNative_ )
|
|
, shaderInputAttachmentArrayNonUniformIndexingNative( shaderInputAttachmentArrayNonUniformIndexingNative_ )
|
|
, robustBufferAccessUpdateAfterBind( robustBufferAccessUpdateAfterBind_ )
|
|
, quadDivergentImplicitLod( quadDivergentImplicitLod_ )
|
|
, maxPerStageDescriptorUpdateAfterBindSamplers( maxPerStageDescriptorUpdateAfterBindSamplers_ )
|
|
, maxPerStageDescriptorUpdateAfterBindUniformBuffers( maxPerStageDescriptorUpdateAfterBindUniformBuffers_ )
|
|
, maxPerStageDescriptorUpdateAfterBindStorageBuffers( maxPerStageDescriptorUpdateAfterBindStorageBuffers_ )
|
|
, maxPerStageDescriptorUpdateAfterBindSampledImages( maxPerStageDescriptorUpdateAfterBindSampledImages_ )
|
|
, maxPerStageDescriptorUpdateAfterBindStorageImages( maxPerStageDescriptorUpdateAfterBindStorageImages_ )
|
|
, maxPerStageDescriptorUpdateAfterBindInputAttachments( maxPerStageDescriptorUpdateAfterBindInputAttachments_ )
|
|
, maxPerStageUpdateAfterBindResources( maxPerStageUpdateAfterBindResources_ )
|
|
, maxDescriptorSetUpdateAfterBindSamplers( maxDescriptorSetUpdateAfterBindSamplers_ )
|
|
, maxDescriptorSetUpdateAfterBindUniformBuffers( maxDescriptorSetUpdateAfterBindUniformBuffers_ )
|
|
, maxDescriptorSetUpdateAfterBindUniformBuffersDynamic( maxDescriptorSetUpdateAfterBindUniformBuffersDynamic_ )
|
|
, maxDescriptorSetUpdateAfterBindStorageBuffers( maxDescriptorSetUpdateAfterBindStorageBuffers_ )
|
|
, maxDescriptorSetUpdateAfterBindStorageBuffersDynamic( maxDescriptorSetUpdateAfterBindStorageBuffersDynamic_ )
|
|
, maxDescriptorSetUpdateAfterBindSampledImages( maxDescriptorSetUpdateAfterBindSampledImages_ )
|
|
, maxDescriptorSetUpdateAfterBindStorageImages( maxDescriptorSetUpdateAfterBindStorageImages_ )
|
|
, maxDescriptorSetUpdateAfterBindInputAttachments( maxDescriptorSetUpdateAfterBindInputAttachments_ )
|
|
{
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR PhysicalDeviceDescriptorIndexingProperties( PhysicalDeviceDescriptorIndexingProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
PhysicalDeviceDescriptorIndexingProperties( VkPhysicalDeviceDescriptorIndexingProperties const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: PhysicalDeviceDescriptorIndexingProperties( *reinterpret_cast<PhysicalDeviceDescriptorIndexingProperties const *>( &rhs ) )
|
|
{
|
|
}
|
|
|
|
PhysicalDeviceDescriptorIndexingProperties & operator=( PhysicalDeviceDescriptorIndexingProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
PhysicalDeviceDescriptorIndexingProperties & operator=( VkPhysicalDeviceDescriptorIndexingProperties const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceDescriptorIndexingProperties const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
operator VkPhysicalDeviceDescriptorIndexingProperties const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkPhysicalDeviceDescriptorIndexingProperties *>( this );
|
|
}
|
|
|
|
operator VkPhysicalDeviceDescriptorIndexingProperties &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkPhysicalDeviceDescriptorIndexingProperties *>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
# if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
# else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &,
|
|
void * const &,
|
|
uint32_t const &,
|
|
VULKAN_HPP_NAMESPACE::Bool32 const &,
|
|
VULKAN_HPP_NAMESPACE::Bool32 const &,
|
|
VULKAN_HPP_NAMESPACE::Bool32 const &,
|
|
VULKAN_HPP_NAMESPACE::Bool32 const &,
|
|
VULKAN_HPP_NAMESPACE::Bool32 const &,
|
|
VULKAN_HPP_NAMESPACE::Bool32 const &,
|
|
VULKAN_HPP_NAMESPACE::Bool32 const &,
|
|
uint32_t const &,
|
|
uint32_t const &,
|
|
uint32_t const &,
|
|
uint32_t const &,
|
|
uint32_t const &,
|
|
uint32_t const &,
|
|
uint32_t const &,
|
|
uint32_t const &,
|
|
uint32_t const &,
|
|
uint32_t const &,
|
|
uint32_t const &,
|
|
uint32_t const &,
|
|
uint32_t const &,
|
|
uint32_t const &,
|
|
uint32_t const &>
|
|
# endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( sType,
|
|
pNext,
|
|
maxUpdateAfterBindDescriptorsInAllPools,
|
|
shaderUniformBufferArrayNonUniformIndexingNative,
|
|
shaderSampledImageArrayNonUniformIndexingNative,
|
|
shaderStorageBufferArrayNonUniformIndexingNative,
|
|
shaderStorageImageArrayNonUniformIndexingNative,
|
|
shaderInputAttachmentArrayNonUniformIndexingNative,
|
|
robustBufferAccessUpdateAfterBind,
|
|
quadDivergentImplicitLod,
|
|
maxPerStageDescriptorUpdateAfterBindSamplers,
|
|
maxPerStageDescriptorUpdateAfterBindUniformBuffers,
|
|
maxPerStageDescriptorUpdateAfterBindStorageBuffers,
|
|
maxPerStageDescriptorUpdateAfterBindSampledImages,
|
|
maxPerStageDescriptorUpdateAfterBindStorageImages,
|
|
maxPerStageDescriptorUpdateAfterBindInputAttachments,
|
|
maxPerStageUpdateAfterBindResources,
|
|
maxDescriptorSetUpdateAfterBindSamplers,
|
|
maxDescriptorSetUpdateAfterBindUniformBuffers,
|
|
maxDescriptorSetUpdateAfterBindUniformBuffersDynamic,
|
|
maxDescriptorSetUpdateAfterBindStorageBuffers,
|
|
maxDescriptorSetUpdateAfterBindStorageBuffersDynamic,
|
|
maxDescriptorSetUpdateAfterBindSampledImages,
|
|
maxDescriptorSetUpdateAfterBindStorageImages,
|
|
maxDescriptorSetUpdateAfterBindInputAttachments );
|
|
}
|
|
#endif
|
|
|
|
#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
|
|
auto operator<=>( PhysicalDeviceDescriptorIndexingProperties const & ) const = default;
|
|
#else
|
|
bool operator==( PhysicalDeviceDescriptorIndexingProperties const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
# if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
# else
|
|
return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( maxUpdateAfterBindDescriptorsInAllPools == rhs.maxUpdateAfterBindDescriptorsInAllPools ) &&
|
|
( shaderUniformBufferArrayNonUniformIndexingNative == rhs.shaderUniformBufferArrayNonUniformIndexingNative ) &&
|
|
( shaderSampledImageArrayNonUniformIndexingNative == rhs.shaderSampledImageArrayNonUniformIndexingNative ) &&
|
|
( shaderStorageBufferArrayNonUniformIndexingNative == rhs.shaderStorageBufferArrayNonUniformIndexingNative ) &&
|
|
( shaderStorageImageArrayNonUniformIndexingNative == rhs.shaderStorageImageArrayNonUniformIndexingNative ) &&
|
|
( shaderInputAttachmentArrayNonUniformIndexingNative == rhs.shaderInputAttachmentArrayNonUniformIndexingNative ) &&
|
|
( robustBufferAccessUpdateAfterBind == rhs.robustBufferAccessUpdateAfterBind ) && ( quadDivergentImplicitLod == rhs.quadDivergentImplicitLod ) &&
|
|
( maxPerStageDescriptorUpdateAfterBindSamplers == rhs.maxPerStageDescriptorUpdateAfterBindSamplers ) &&
|
|
( maxPerStageDescriptorUpdateAfterBindUniformBuffers == rhs.maxPerStageDescriptorUpdateAfterBindUniformBuffers ) &&
|
|
( maxPerStageDescriptorUpdateAfterBindStorageBuffers == rhs.maxPerStageDescriptorUpdateAfterBindStorageBuffers ) &&
|
|
( maxPerStageDescriptorUpdateAfterBindSampledImages == rhs.maxPerStageDescriptorUpdateAfterBindSampledImages ) &&
|
|
( maxPerStageDescriptorUpdateAfterBindStorageImages == rhs.maxPerStageDescriptorUpdateAfterBindStorageImages ) &&
|
|
( maxPerStageDescriptorUpdateAfterBindInputAttachments == rhs.maxPerStageDescriptorUpdateAfterBindInputAttachments ) &&
|
|
( maxPerStageUpdateAfterBindResources == rhs.maxPerStageUpdateAfterBindResources ) &&
|
|
( maxDescriptorSetUpdateAfterBindSamplers == rhs.maxDescriptorSetUpdateAfterBindSamplers ) &&
|
|
( maxDescriptorSetUpdateAfterBindUniformBuffers == rhs.maxDescriptorSetUpdateAfterBindUniformBuffers ) &&
|
|
( maxDescriptorSetUpdateAfterBindUniformBuffersDynamic == rhs.maxDescriptorSetUpdateAfterBindUniformBuffersDynamic ) &&
|
|
( maxDescriptorSetUpdateAfterBindStorageBuffers == rhs.maxDescriptorSetUpdateAfterBindStorageBuffers ) &&
|
|
( maxDescriptorSetUpdateAfterBindStorageBuffersDynamic == rhs.maxDescriptorSetUpdateAfterBindStorageBuffersDynamic ) &&
|
|
( maxDescriptorSetUpdateAfterBindSampledImages == rhs.maxDescriptorSetUpdateAfterBindSampledImages ) &&
|
|
( maxDescriptorSetUpdateAfterBindStorageImages == rhs.maxDescriptorSetUpdateAfterBindStorageImages ) &&
|
|
( maxDescriptorSetUpdateAfterBindInputAttachments == rhs.maxDescriptorSetUpdateAfterBindInputAttachments );
|
|
# endif
|
|
}
|
|
|
|
bool operator!=( PhysicalDeviceDescriptorIndexingProperties const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceDescriptorIndexingProperties;
|
|
void * pNext = {};
|
|
uint32_t maxUpdateAfterBindDescriptorsInAllPools = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 shaderUniformBufferArrayNonUniformIndexingNative = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 shaderSampledImageArrayNonUniformIndexingNative = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 shaderStorageBufferArrayNonUniformIndexingNative = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 shaderStorageImageArrayNonUniformIndexingNative = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 shaderInputAttachmentArrayNonUniformIndexingNative = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 robustBufferAccessUpdateAfterBind = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 quadDivergentImplicitLod = {};
|
|
uint32_t maxPerStageDescriptorUpdateAfterBindSamplers = {};
|
|
uint32_t maxPerStageDescriptorUpdateAfterBindUniformBuffers = {};
|
|
uint32_t maxPerStageDescriptorUpdateAfterBindStorageBuffers = {};
|
|
uint32_t maxPerStageDescriptorUpdateAfterBindSampledImages = {};
|
|
uint32_t maxPerStageDescriptorUpdateAfterBindStorageImages = {};
|
|
uint32_t maxPerStageDescriptorUpdateAfterBindInputAttachments = {};
|
|
uint32_t maxPerStageUpdateAfterBindResources = {};
|
|
uint32_t maxDescriptorSetUpdateAfterBindSamplers = {};
|
|
uint32_t maxDescriptorSetUpdateAfterBindUniformBuffers = {};
|
|
uint32_t maxDescriptorSetUpdateAfterBindUniformBuffersDynamic = {};
|
|
uint32_t maxDescriptorSetUpdateAfterBindStorageBuffers = {};
|
|
uint32_t maxDescriptorSetUpdateAfterBindStorageBuffersDynamic = {};
|
|
uint32_t maxDescriptorSetUpdateAfterBindSampledImages = {};
|
|
uint32_t maxDescriptorSetUpdateAfterBindStorageImages = {};
|
|
uint32_t maxDescriptorSetUpdateAfterBindInputAttachments = {};
|
|
};
|
|
|
|
template <>
|
|
struct CppType<StructureType, StructureType::ePhysicalDeviceDescriptorIndexingProperties>
|
|
{
|
|
using Type = PhysicalDeviceDescriptorIndexingProperties;
|
|
};
|
|
|
|
using PhysicalDeviceDescriptorIndexingPropertiesEXT = PhysicalDeviceDescriptorIndexingProperties;
|
|
|
|
struct PhysicalDeviceDiscardRectanglePropertiesEXT
|
|
{
|
|
using NativeType = VkPhysicalDeviceDiscardRectanglePropertiesEXT;
|
|
|
|
static const bool allowDuplicate = false;
|
|
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceDiscardRectanglePropertiesEXT;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR PhysicalDeviceDiscardRectanglePropertiesEXT( uint32_t maxDiscardRectangles_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
|
|
: pNext( pNext_ )
|
|
, maxDiscardRectangles( maxDiscardRectangles_ )
|
|
{
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR PhysicalDeviceDiscardRectanglePropertiesEXT( PhysicalDeviceDiscardRectanglePropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
PhysicalDeviceDiscardRectanglePropertiesEXT( VkPhysicalDeviceDiscardRectanglePropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: PhysicalDeviceDiscardRectanglePropertiesEXT( *reinterpret_cast<PhysicalDeviceDiscardRectanglePropertiesEXT const *>( &rhs ) )
|
|
{
|
|
}
|
|
|
|
PhysicalDeviceDiscardRectanglePropertiesEXT & operator=( PhysicalDeviceDiscardRectanglePropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
PhysicalDeviceDiscardRectanglePropertiesEXT & operator=( VkPhysicalDeviceDiscardRectanglePropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceDiscardRectanglePropertiesEXT const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
operator VkPhysicalDeviceDiscardRectanglePropertiesEXT const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkPhysicalDeviceDiscardRectanglePropertiesEXT *>( this );
|
|
}
|
|
|
|
operator VkPhysicalDeviceDiscardRectanglePropertiesEXT &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkPhysicalDeviceDiscardRectanglePropertiesEXT *>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
# if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
# else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, uint32_t const &>
|
|
# endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( sType, pNext, maxDiscardRectangles );
|
|
}
|
|
#endif
|
|
|
|
#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
|
|
auto operator<=>( PhysicalDeviceDiscardRectanglePropertiesEXT const & ) const = default;
|
|
#else
|
|
bool operator==( PhysicalDeviceDiscardRectanglePropertiesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
# if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
# else
|
|
return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( maxDiscardRectangles == rhs.maxDiscardRectangles );
|
|
# endif
|
|
}
|
|
|
|
bool operator!=( PhysicalDeviceDiscardRectanglePropertiesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceDiscardRectanglePropertiesEXT;
|
|
void * pNext = {};
|
|
uint32_t maxDiscardRectangles = {};
|
|
};
|
|
|
|
template <>
|
|
struct CppType<StructureType, StructureType::ePhysicalDeviceDiscardRectanglePropertiesEXT>
|
|
{
|
|
using Type = PhysicalDeviceDiscardRectanglePropertiesEXT;
|
|
};
|
|
|
|
struct PhysicalDeviceDriverProperties
|
|
{
|
|
using NativeType = VkPhysicalDeviceDriverProperties;
|
|
|
|
static const bool allowDuplicate = false;
|
|
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceDriverProperties;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceDriverProperties( VULKAN_HPP_NAMESPACE::DriverId driverID_ = VULKAN_HPP_NAMESPACE::DriverId::eAmdProprietary,
|
|
std::array<char, VK_MAX_DRIVER_NAME_SIZE> const & driverName_ = {},
|
|
std::array<char, VK_MAX_DRIVER_INFO_SIZE> const & driverInfo_ = {},
|
|
VULKAN_HPP_NAMESPACE::ConformanceVersion conformanceVersion_ = {},
|
|
void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
|
|
: pNext( pNext_ )
|
|
, driverID( driverID_ )
|
|
, driverName( driverName_ )
|
|
, driverInfo( driverInfo_ )
|
|
, conformanceVersion( conformanceVersion_ )
|
|
{
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceDriverProperties( PhysicalDeviceDriverProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
PhysicalDeviceDriverProperties( VkPhysicalDeviceDriverProperties const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: PhysicalDeviceDriverProperties( *reinterpret_cast<PhysicalDeviceDriverProperties const *>( &rhs ) )
|
|
{
|
|
}
|
|
|
|
# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
|
|
PhysicalDeviceDriverProperties( VULKAN_HPP_NAMESPACE::DriverId driverID_,
|
|
std::string const & driverName_,
|
|
std::string const & driverInfo_ = {},
|
|
VULKAN_HPP_NAMESPACE::ConformanceVersion conformanceVersion_ = {},
|
|
void * pNext_ = nullptr )
|
|
: pNext( pNext_ ), driverID( driverID_ ), conformanceVersion( conformanceVersion_ )
|
|
{
|
|
VULKAN_HPP_ASSERT( driverName_.size() < VK_MAX_DRIVER_NAME_SIZE );
|
|
# if defined( WIN32 )
|
|
strncpy_s( driverName, VK_MAX_DRIVER_NAME_SIZE, driverName_.data(), driverName_.size() );
|
|
# else
|
|
strncpy( driverName, driverName_.data(), std::min<size_t>( VK_MAX_DRIVER_NAME_SIZE, driverName_.size() ) );
|
|
# endif
|
|
|
|
VULKAN_HPP_ASSERT( driverInfo_.size() < VK_MAX_DRIVER_INFO_SIZE );
|
|
# if defined( WIN32 )
|
|
strncpy_s( driverInfo, VK_MAX_DRIVER_INFO_SIZE, driverInfo_.data(), driverInfo_.size() );
|
|
# else
|
|
strncpy( driverInfo, driverInfo_.data(), std::min<size_t>( VK_MAX_DRIVER_INFO_SIZE, driverInfo_.size() ) );
|
|
# endif
|
|
}
|
|
# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
|
|
|
|
PhysicalDeviceDriverProperties & operator=( PhysicalDeviceDriverProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
PhysicalDeviceDriverProperties & operator=( VkPhysicalDeviceDriverProperties const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceDriverProperties const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
operator VkPhysicalDeviceDriverProperties const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkPhysicalDeviceDriverProperties *>( this );
|
|
}
|
|
|
|
operator VkPhysicalDeviceDriverProperties &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkPhysicalDeviceDriverProperties *>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
# if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
# else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &,
|
|
void * const &,
|
|
VULKAN_HPP_NAMESPACE::DriverId const &,
|
|
VULKAN_HPP_NAMESPACE::ArrayWrapper1D<char, VK_MAX_DRIVER_NAME_SIZE> const &,
|
|
VULKAN_HPP_NAMESPACE::ArrayWrapper1D<char, VK_MAX_DRIVER_INFO_SIZE> const &,
|
|
VULKAN_HPP_NAMESPACE::ConformanceVersion const &>
|
|
# endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( sType, pNext, driverID, driverName, driverInfo, conformanceVersion );
|
|
}
|
|
#endif
|
|
|
|
#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
|
|
std::strong_ordering operator<=>( PhysicalDeviceDriverProperties const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
if ( auto cmp = sType <=> rhs.sType; cmp != 0 )
|
|
return cmp;
|
|
if ( auto cmp = pNext <=> rhs.pNext; cmp != 0 )
|
|
return cmp;
|
|
if ( auto cmp = driverID <=> rhs.driverID; cmp != 0 )
|
|
return cmp;
|
|
if ( auto cmp = strcmp( driverName, rhs.driverName ); cmp != 0 )
|
|
return ( cmp < 0 ) ? std::strong_ordering::less : std::strong_ordering::greater;
|
|
if ( auto cmp = strcmp( driverInfo, rhs.driverInfo ); cmp != 0 )
|
|
return ( cmp < 0 ) ? std::strong_ordering::less : std::strong_ordering::greater;
|
|
if ( auto cmp = conformanceVersion <=> rhs.conformanceVersion; cmp != 0 )
|
|
return cmp;
|
|
|
|
return std::strong_ordering::equivalent;
|
|
}
|
|
#endif
|
|
|
|
bool operator==( PhysicalDeviceDriverProperties const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( driverID == rhs.driverID ) && ( strcmp( driverName, rhs.driverName ) == 0 ) &&
|
|
( strcmp( driverInfo, rhs.driverInfo ) == 0 ) && ( conformanceVersion == rhs.conformanceVersion );
|
|
}
|
|
|
|
bool operator!=( PhysicalDeviceDriverProperties const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceDriverProperties;
|
|
void * pNext = {};
|
|
VULKAN_HPP_NAMESPACE::DriverId driverID = VULKAN_HPP_NAMESPACE::DriverId::eAmdProprietary;
|
|
VULKAN_HPP_NAMESPACE::ArrayWrapper1D<char, VK_MAX_DRIVER_NAME_SIZE> driverName = {};
|
|
VULKAN_HPP_NAMESPACE::ArrayWrapper1D<char, VK_MAX_DRIVER_INFO_SIZE> driverInfo = {};
|
|
VULKAN_HPP_NAMESPACE::ConformanceVersion conformanceVersion = {};
|
|
};
|
|
|
|
template <>
|
|
struct CppType<StructureType, StructureType::ePhysicalDeviceDriverProperties>
|
|
{
|
|
using Type = PhysicalDeviceDriverProperties;
|
|
};
|
|
|
|
using PhysicalDeviceDriverPropertiesKHR = PhysicalDeviceDriverProperties;
|
|
|
|
struct PhysicalDeviceDynamicRenderingFeatures
|
|
{
|
|
using NativeType = VkPhysicalDeviceDynamicRenderingFeatures;
|
|
|
|
static const bool allowDuplicate = false;
|
|
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceDynamicRenderingFeatures;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR PhysicalDeviceDynamicRenderingFeatures( VULKAN_HPP_NAMESPACE::Bool32 dynamicRendering_ = {},
|
|
void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
|
|
: pNext( pNext_ )
|
|
, dynamicRendering( dynamicRendering_ )
|
|
{
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR PhysicalDeviceDynamicRenderingFeatures( PhysicalDeviceDynamicRenderingFeatures const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
PhysicalDeviceDynamicRenderingFeatures( VkPhysicalDeviceDynamicRenderingFeatures const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: PhysicalDeviceDynamicRenderingFeatures( *reinterpret_cast<PhysicalDeviceDynamicRenderingFeatures const *>( &rhs ) )
|
|
{
|
|
}
|
|
|
|
PhysicalDeviceDynamicRenderingFeatures & operator=( PhysicalDeviceDynamicRenderingFeatures const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
PhysicalDeviceDynamicRenderingFeatures & operator=( VkPhysicalDeviceDynamicRenderingFeatures const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceDynamicRenderingFeatures const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
|
|
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceDynamicRenderingFeatures & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pNext = pNext_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceDynamicRenderingFeatures & setDynamicRendering( VULKAN_HPP_NAMESPACE::Bool32 dynamicRendering_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
dynamicRendering = dynamicRendering_;
|
|
return *this;
|
|
}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
|
|
|
|
operator VkPhysicalDeviceDynamicRenderingFeatures const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkPhysicalDeviceDynamicRenderingFeatures *>( this );
|
|
}
|
|
|
|
operator VkPhysicalDeviceDynamicRenderingFeatures &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkPhysicalDeviceDynamicRenderingFeatures *>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
# if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
# else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::Bool32 const &>
|
|
# endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( sType, pNext, dynamicRendering );
|
|
}
|
|
#endif
|
|
|
|
#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
|
|
auto operator<=>( PhysicalDeviceDynamicRenderingFeatures const & ) const = default;
|
|
#else
|
|
bool operator==( PhysicalDeviceDynamicRenderingFeatures const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
# if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
# else
|
|
return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( dynamicRendering == rhs.dynamicRendering );
|
|
# endif
|
|
}
|
|
|
|
bool operator!=( PhysicalDeviceDynamicRenderingFeatures const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceDynamicRenderingFeatures;
|
|
void * pNext = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 dynamicRendering = {};
|
|
};
|
|
|
|
template <>
|
|
struct CppType<StructureType, StructureType::ePhysicalDeviceDynamicRenderingFeatures>
|
|
{
|
|
using Type = PhysicalDeviceDynamicRenderingFeatures;
|
|
};
|
|
|
|
using PhysicalDeviceDynamicRenderingFeaturesKHR = PhysicalDeviceDynamicRenderingFeatures;
|
|
|
|
struct PhysicalDeviceExtendedDynamicState2FeaturesEXT
|
|
{
|
|
using NativeType = VkPhysicalDeviceExtendedDynamicState2FeaturesEXT;
|
|
|
|
static const bool allowDuplicate = false;
|
|
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceExtendedDynamicState2FeaturesEXT;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR PhysicalDeviceExtendedDynamicState2FeaturesEXT( VULKAN_HPP_NAMESPACE::Bool32 extendedDynamicState2_ = {},
|
|
VULKAN_HPP_NAMESPACE::Bool32 extendedDynamicState2LogicOp_ = {},
|
|
VULKAN_HPP_NAMESPACE::Bool32 extendedDynamicState2PatchControlPoints_ = {},
|
|
void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
|
|
: pNext( pNext_ )
|
|
, extendedDynamicState2( extendedDynamicState2_ )
|
|
, extendedDynamicState2LogicOp( extendedDynamicState2LogicOp_ )
|
|
, extendedDynamicState2PatchControlPoints( extendedDynamicState2PatchControlPoints_ )
|
|
{
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR
|
|
PhysicalDeviceExtendedDynamicState2FeaturesEXT( PhysicalDeviceExtendedDynamicState2FeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
PhysicalDeviceExtendedDynamicState2FeaturesEXT( VkPhysicalDeviceExtendedDynamicState2FeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: PhysicalDeviceExtendedDynamicState2FeaturesEXT( *reinterpret_cast<PhysicalDeviceExtendedDynamicState2FeaturesEXT const *>( &rhs ) )
|
|
{
|
|
}
|
|
|
|
PhysicalDeviceExtendedDynamicState2FeaturesEXT & operator=( PhysicalDeviceExtendedDynamicState2FeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
PhysicalDeviceExtendedDynamicState2FeaturesEXT & operator=( VkPhysicalDeviceExtendedDynamicState2FeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceExtendedDynamicState2FeaturesEXT const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
|
|
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceExtendedDynamicState2FeaturesEXT & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pNext = pNext_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceExtendedDynamicState2FeaturesEXT &
|
|
setExtendedDynamicState2( VULKAN_HPP_NAMESPACE::Bool32 extendedDynamicState2_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
extendedDynamicState2 = extendedDynamicState2_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceExtendedDynamicState2FeaturesEXT &
|
|
setExtendedDynamicState2LogicOp( VULKAN_HPP_NAMESPACE::Bool32 extendedDynamicState2LogicOp_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
extendedDynamicState2LogicOp = extendedDynamicState2LogicOp_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceExtendedDynamicState2FeaturesEXT &
|
|
setExtendedDynamicState2PatchControlPoints( VULKAN_HPP_NAMESPACE::Bool32 extendedDynamicState2PatchControlPoints_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
extendedDynamicState2PatchControlPoints = extendedDynamicState2PatchControlPoints_;
|
|
return *this;
|
|
}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
|
|
|
|
operator VkPhysicalDeviceExtendedDynamicState2FeaturesEXT const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkPhysicalDeviceExtendedDynamicState2FeaturesEXT *>( this );
|
|
}
|
|
|
|
operator VkPhysicalDeviceExtendedDynamicState2FeaturesEXT &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkPhysicalDeviceExtendedDynamicState2FeaturesEXT *>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
# if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
# else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &,
|
|
void * const &,
|
|
VULKAN_HPP_NAMESPACE::Bool32 const &,
|
|
VULKAN_HPP_NAMESPACE::Bool32 const &,
|
|
VULKAN_HPP_NAMESPACE::Bool32 const &>
|
|
# endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( sType, pNext, extendedDynamicState2, extendedDynamicState2LogicOp, extendedDynamicState2PatchControlPoints );
|
|
}
|
|
#endif
|
|
|
|
#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
|
|
auto operator<=>( PhysicalDeviceExtendedDynamicState2FeaturesEXT const & ) const = default;
|
|
#else
|
|
bool operator==( PhysicalDeviceExtendedDynamicState2FeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
# if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
# else
|
|
return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( extendedDynamicState2 == rhs.extendedDynamicState2 ) &&
|
|
( extendedDynamicState2LogicOp == rhs.extendedDynamicState2LogicOp ) &&
|
|
( extendedDynamicState2PatchControlPoints == rhs.extendedDynamicState2PatchControlPoints );
|
|
# endif
|
|
}
|
|
|
|
bool operator!=( PhysicalDeviceExtendedDynamicState2FeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceExtendedDynamicState2FeaturesEXT;
|
|
void * pNext = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 extendedDynamicState2 = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 extendedDynamicState2LogicOp = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 extendedDynamicState2PatchControlPoints = {};
|
|
};
|
|
|
|
template <>
|
|
struct CppType<StructureType, StructureType::ePhysicalDeviceExtendedDynamicState2FeaturesEXT>
|
|
{
|
|
using Type = PhysicalDeviceExtendedDynamicState2FeaturesEXT;
|
|
};
|
|
|
|
struct PhysicalDeviceExtendedDynamicStateFeaturesEXT
|
|
{
|
|
using NativeType = VkPhysicalDeviceExtendedDynamicStateFeaturesEXT;
|
|
|
|
static const bool allowDuplicate = false;
|
|
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceExtendedDynamicStateFeaturesEXT;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR PhysicalDeviceExtendedDynamicStateFeaturesEXT( VULKAN_HPP_NAMESPACE::Bool32 extendedDynamicState_ = {},
|
|
void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
|
|
: pNext( pNext_ )
|
|
, extendedDynamicState( extendedDynamicState_ )
|
|
{
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR
|
|
PhysicalDeviceExtendedDynamicStateFeaturesEXT( PhysicalDeviceExtendedDynamicStateFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
PhysicalDeviceExtendedDynamicStateFeaturesEXT( VkPhysicalDeviceExtendedDynamicStateFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: PhysicalDeviceExtendedDynamicStateFeaturesEXT( *reinterpret_cast<PhysicalDeviceExtendedDynamicStateFeaturesEXT const *>( &rhs ) )
|
|
{
|
|
}
|
|
|
|
PhysicalDeviceExtendedDynamicStateFeaturesEXT & operator=( PhysicalDeviceExtendedDynamicStateFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
PhysicalDeviceExtendedDynamicStateFeaturesEXT & operator=( VkPhysicalDeviceExtendedDynamicStateFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceExtendedDynamicStateFeaturesEXT const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
|
|
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceExtendedDynamicStateFeaturesEXT & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pNext = pNext_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceExtendedDynamicStateFeaturesEXT &
|
|
setExtendedDynamicState( VULKAN_HPP_NAMESPACE::Bool32 extendedDynamicState_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
extendedDynamicState = extendedDynamicState_;
|
|
return *this;
|
|
}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
|
|
|
|
operator VkPhysicalDeviceExtendedDynamicStateFeaturesEXT const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkPhysicalDeviceExtendedDynamicStateFeaturesEXT *>( this );
|
|
}
|
|
|
|
operator VkPhysicalDeviceExtendedDynamicStateFeaturesEXT &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkPhysicalDeviceExtendedDynamicStateFeaturesEXT *>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
# if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
# else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::Bool32 const &>
|
|
# endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( sType, pNext, extendedDynamicState );
|
|
}
|
|
#endif
|
|
|
|
#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
|
|
auto operator<=>( PhysicalDeviceExtendedDynamicStateFeaturesEXT const & ) const = default;
|
|
#else
|
|
bool operator==( PhysicalDeviceExtendedDynamicStateFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
# if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
# else
|
|
return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( extendedDynamicState == rhs.extendedDynamicState );
|
|
# endif
|
|
}
|
|
|
|
bool operator!=( PhysicalDeviceExtendedDynamicStateFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceExtendedDynamicStateFeaturesEXT;
|
|
void * pNext = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 extendedDynamicState = {};
|
|
};
|
|
|
|
template <>
|
|
struct CppType<StructureType, StructureType::ePhysicalDeviceExtendedDynamicStateFeaturesEXT>
|
|
{
|
|
using Type = PhysicalDeviceExtendedDynamicStateFeaturesEXT;
|
|
};
|
|
|
|
struct PhysicalDeviceExternalBufferInfo
|
|
{
|
|
using NativeType = VkPhysicalDeviceExternalBufferInfo;
|
|
|
|
static const bool allowDuplicate = false;
|
|
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceExternalBufferInfo;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR PhysicalDeviceExternalBufferInfo(
|
|
VULKAN_HPP_NAMESPACE::BufferCreateFlags flags_ = {},
|
|
VULKAN_HPP_NAMESPACE::BufferUsageFlags usage_ = {},
|
|
VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType_ = VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits::eOpaqueFd,
|
|
const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
|
|
: pNext( pNext_ )
|
|
, flags( flags_ )
|
|
, usage( usage_ )
|
|
, handleType( handleType_ )
|
|
{
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR PhysicalDeviceExternalBufferInfo( PhysicalDeviceExternalBufferInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
PhysicalDeviceExternalBufferInfo( VkPhysicalDeviceExternalBufferInfo const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: PhysicalDeviceExternalBufferInfo( *reinterpret_cast<PhysicalDeviceExternalBufferInfo const *>( &rhs ) )
|
|
{
|
|
}
|
|
|
|
PhysicalDeviceExternalBufferInfo & operator=( PhysicalDeviceExternalBufferInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
PhysicalDeviceExternalBufferInfo & operator=( VkPhysicalDeviceExternalBufferInfo const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalBufferInfo const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
|
|
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceExternalBufferInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pNext = pNext_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceExternalBufferInfo & setFlags( VULKAN_HPP_NAMESPACE::BufferCreateFlags flags_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
flags = flags_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceExternalBufferInfo & setUsage( VULKAN_HPP_NAMESPACE::BufferUsageFlags usage_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
usage = usage_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceExternalBufferInfo &
|
|
setHandleType( VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
handleType = handleType_;
|
|
return *this;
|
|
}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
|
|
|
|
operator VkPhysicalDeviceExternalBufferInfo const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkPhysicalDeviceExternalBufferInfo *>( this );
|
|
}
|
|
|
|
operator VkPhysicalDeviceExternalBufferInfo &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkPhysicalDeviceExternalBufferInfo *>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
# if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
# else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &,
|
|
const void * const &,
|
|
VULKAN_HPP_NAMESPACE::BufferCreateFlags const &,
|
|
VULKAN_HPP_NAMESPACE::BufferUsageFlags const &,
|
|
VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits const &>
|
|
# endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( sType, pNext, flags, usage, handleType );
|
|
}
|
|
#endif
|
|
|
|
#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
|
|
auto operator<=>( PhysicalDeviceExternalBufferInfo const & ) const = default;
|
|
#else
|
|
bool operator==( PhysicalDeviceExternalBufferInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
# if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
# else
|
|
return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && ( usage == rhs.usage ) && ( handleType == rhs.handleType );
|
|
# endif
|
|
}
|
|
|
|
bool operator!=( PhysicalDeviceExternalBufferInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceExternalBufferInfo;
|
|
const void * pNext = {};
|
|
VULKAN_HPP_NAMESPACE::BufferCreateFlags flags = {};
|
|
VULKAN_HPP_NAMESPACE::BufferUsageFlags usage = {};
|
|
VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType = VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits::eOpaqueFd;
|
|
};
|
|
|
|
template <>
|
|
struct CppType<StructureType, StructureType::ePhysicalDeviceExternalBufferInfo>
|
|
{
|
|
using Type = PhysicalDeviceExternalBufferInfo;
|
|
};
|
|
|
|
using PhysicalDeviceExternalBufferInfoKHR = PhysicalDeviceExternalBufferInfo;
|
|
|
|
struct PhysicalDeviceExternalFenceInfo
|
|
{
|
|
using NativeType = VkPhysicalDeviceExternalFenceInfo;
|
|
|
|
static const bool allowDuplicate = false;
|
|
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceExternalFenceInfo;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR PhysicalDeviceExternalFenceInfo(
|
|
VULKAN_HPP_NAMESPACE::ExternalFenceHandleTypeFlagBits handleType_ = VULKAN_HPP_NAMESPACE::ExternalFenceHandleTypeFlagBits::eOpaqueFd,
|
|
const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
|
|
: pNext( pNext_ )
|
|
, handleType( handleType_ )
|
|
{
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR PhysicalDeviceExternalFenceInfo( PhysicalDeviceExternalFenceInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
PhysicalDeviceExternalFenceInfo( VkPhysicalDeviceExternalFenceInfo const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: PhysicalDeviceExternalFenceInfo( *reinterpret_cast<PhysicalDeviceExternalFenceInfo const *>( &rhs ) )
|
|
{
|
|
}
|
|
|
|
PhysicalDeviceExternalFenceInfo & operator=( PhysicalDeviceExternalFenceInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
PhysicalDeviceExternalFenceInfo & operator=( VkPhysicalDeviceExternalFenceInfo const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalFenceInfo const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
|
|
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceExternalFenceInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pNext = pNext_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceExternalFenceInfo &
|
|
setHandleType( VULKAN_HPP_NAMESPACE::ExternalFenceHandleTypeFlagBits handleType_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
handleType = handleType_;
|
|
return *this;
|
|
}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
|
|
|
|
operator VkPhysicalDeviceExternalFenceInfo const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkPhysicalDeviceExternalFenceInfo *>( this );
|
|
}
|
|
|
|
operator VkPhysicalDeviceExternalFenceInfo &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkPhysicalDeviceExternalFenceInfo *>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
# if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
# else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::ExternalFenceHandleTypeFlagBits const &>
|
|
# endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( sType, pNext, handleType );
|
|
}
|
|
#endif
|
|
|
|
#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
|
|
auto operator<=>( PhysicalDeviceExternalFenceInfo const & ) const = default;
|
|
#else
|
|
bool operator==( PhysicalDeviceExternalFenceInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
# if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
# else
|
|
return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( handleType == rhs.handleType );
|
|
# endif
|
|
}
|
|
|
|
bool operator!=( PhysicalDeviceExternalFenceInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceExternalFenceInfo;
|
|
const void * pNext = {};
|
|
VULKAN_HPP_NAMESPACE::ExternalFenceHandleTypeFlagBits handleType = VULKAN_HPP_NAMESPACE::ExternalFenceHandleTypeFlagBits::eOpaqueFd;
|
|
};
|
|
|
|
template <>
|
|
struct CppType<StructureType, StructureType::ePhysicalDeviceExternalFenceInfo>
|
|
{
|
|
using Type = PhysicalDeviceExternalFenceInfo;
|
|
};
|
|
|
|
using PhysicalDeviceExternalFenceInfoKHR = PhysicalDeviceExternalFenceInfo;
|
|
|
|
struct PhysicalDeviceExternalImageFormatInfo
|
|
{
|
|
using NativeType = VkPhysicalDeviceExternalImageFormatInfo;
|
|
|
|
static const bool allowDuplicate = false;
|
|
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceExternalImageFormatInfo;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR PhysicalDeviceExternalImageFormatInfo(
|
|
VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType_ = VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits::eOpaqueFd,
|
|
const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
|
|
: pNext( pNext_ )
|
|
, handleType( handleType_ )
|
|
{
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR PhysicalDeviceExternalImageFormatInfo( PhysicalDeviceExternalImageFormatInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
PhysicalDeviceExternalImageFormatInfo( VkPhysicalDeviceExternalImageFormatInfo const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: PhysicalDeviceExternalImageFormatInfo( *reinterpret_cast<PhysicalDeviceExternalImageFormatInfo const *>( &rhs ) )
|
|
{
|
|
}
|
|
|
|
PhysicalDeviceExternalImageFormatInfo & operator=( PhysicalDeviceExternalImageFormatInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
PhysicalDeviceExternalImageFormatInfo & operator=( VkPhysicalDeviceExternalImageFormatInfo const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalImageFormatInfo const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
|
|
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceExternalImageFormatInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pNext = pNext_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceExternalImageFormatInfo &
|
|
setHandleType( VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
handleType = handleType_;
|
|
return *this;
|
|
}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
|
|
|
|
operator VkPhysicalDeviceExternalImageFormatInfo const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkPhysicalDeviceExternalImageFormatInfo *>( this );
|
|
}
|
|
|
|
operator VkPhysicalDeviceExternalImageFormatInfo &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkPhysicalDeviceExternalImageFormatInfo *>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
# if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
# else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits const &>
|
|
# endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( sType, pNext, handleType );
|
|
}
|
|
#endif
|
|
|
|
#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
|
|
auto operator<=>( PhysicalDeviceExternalImageFormatInfo const & ) const = default;
|
|
#else
|
|
bool operator==( PhysicalDeviceExternalImageFormatInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
# if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
# else
|
|
return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( handleType == rhs.handleType );
|
|
# endif
|
|
}
|
|
|
|
bool operator!=( PhysicalDeviceExternalImageFormatInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceExternalImageFormatInfo;
|
|
const void * pNext = {};
|
|
VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType = VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits::eOpaqueFd;
|
|
};
|
|
|
|
template <>
|
|
struct CppType<StructureType, StructureType::ePhysicalDeviceExternalImageFormatInfo>
|
|
{
|
|
using Type = PhysicalDeviceExternalImageFormatInfo;
|
|
};
|
|
|
|
using PhysicalDeviceExternalImageFormatInfoKHR = PhysicalDeviceExternalImageFormatInfo;
|
|
|
|
struct PhysicalDeviceExternalMemoryHostPropertiesEXT
|
|
{
|
|
using NativeType = VkPhysicalDeviceExternalMemoryHostPropertiesEXT;
|
|
|
|
static const bool allowDuplicate = false;
|
|
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceExternalMemoryHostPropertiesEXT;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR PhysicalDeviceExternalMemoryHostPropertiesEXT( VULKAN_HPP_NAMESPACE::DeviceSize minImportedHostPointerAlignment_ = {},
|
|
void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
|
|
: pNext( pNext_ )
|
|
, minImportedHostPointerAlignment( minImportedHostPointerAlignment_ )
|
|
{
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR
|
|
PhysicalDeviceExternalMemoryHostPropertiesEXT( PhysicalDeviceExternalMemoryHostPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
PhysicalDeviceExternalMemoryHostPropertiesEXT( VkPhysicalDeviceExternalMemoryHostPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: PhysicalDeviceExternalMemoryHostPropertiesEXT( *reinterpret_cast<PhysicalDeviceExternalMemoryHostPropertiesEXT const *>( &rhs ) )
|
|
{
|
|
}
|
|
|
|
PhysicalDeviceExternalMemoryHostPropertiesEXT & operator=( PhysicalDeviceExternalMemoryHostPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
PhysicalDeviceExternalMemoryHostPropertiesEXT & operator=( VkPhysicalDeviceExternalMemoryHostPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalMemoryHostPropertiesEXT const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
operator VkPhysicalDeviceExternalMemoryHostPropertiesEXT const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkPhysicalDeviceExternalMemoryHostPropertiesEXT *>( this );
|
|
}
|
|
|
|
operator VkPhysicalDeviceExternalMemoryHostPropertiesEXT &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkPhysicalDeviceExternalMemoryHostPropertiesEXT *>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
# if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
# else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::DeviceSize const &>
|
|
# endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( sType, pNext, minImportedHostPointerAlignment );
|
|
}
|
|
#endif
|
|
|
|
#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
|
|
auto operator<=>( PhysicalDeviceExternalMemoryHostPropertiesEXT const & ) const = default;
|
|
#else
|
|
bool operator==( PhysicalDeviceExternalMemoryHostPropertiesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
# if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
# else
|
|
return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( minImportedHostPointerAlignment == rhs.minImportedHostPointerAlignment );
|
|
# endif
|
|
}
|
|
|
|
bool operator!=( PhysicalDeviceExternalMemoryHostPropertiesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceExternalMemoryHostPropertiesEXT;
|
|
void * pNext = {};
|
|
VULKAN_HPP_NAMESPACE::DeviceSize minImportedHostPointerAlignment = {};
|
|
};
|
|
|
|
template <>
|
|
struct CppType<StructureType, StructureType::ePhysicalDeviceExternalMemoryHostPropertiesEXT>
|
|
{
|
|
using Type = PhysicalDeviceExternalMemoryHostPropertiesEXT;
|
|
};
|
|
|
|
#if defined( VK_USE_PLATFORM_SCI )
|
|
struct PhysicalDeviceExternalMemorySciBufFeaturesNV
|
|
{
|
|
using NativeType = VkPhysicalDeviceExternalMemorySciBufFeaturesNV;
|
|
|
|
static const bool allowDuplicate = false;
|
|
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceExternalMemorySciBufFeaturesNV;
|
|
|
|
# if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR PhysicalDeviceExternalMemorySciBufFeaturesNV( VULKAN_HPP_NAMESPACE::Bool32 sciBufImport_ = {},
|
|
VULKAN_HPP_NAMESPACE::Bool32 sciBufExport_ = {},
|
|
void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
|
|
: pNext( pNext_ )
|
|
, sciBufImport( sciBufImport_ )
|
|
, sciBufExport( sciBufExport_ )
|
|
{
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR PhysicalDeviceExternalMemorySciBufFeaturesNV( PhysicalDeviceExternalMemorySciBufFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
PhysicalDeviceExternalMemorySciBufFeaturesNV( VkPhysicalDeviceExternalMemorySciBufFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: PhysicalDeviceExternalMemorySciBufFeaturesNV( *reinterpret_cast<PhysicalDeviceExternalMemorySciBufFeaturesNV const *>( &rhs ) )
|
|
{
|
|
}
|
|
|
|
PhysicalDeviceExternalMemorySciBufFeaturesNV & operator=( PhysicalDeviceExternalMemorySciBufFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
# endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
PhysicalDeviceExternalMemorySciBufFeaturesNV & operator=( VkPhysicalDeviceExternalMemorySciBufFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalMemorySciBufFeaturesNV const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
# if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
|
|
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceExternalMemorySciBufFeaturesNV & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pNext = pNext_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceExternalMemorySciBufFeaturesNV & setSciBufImport( VULKAN_HPP_NAMESPACE::Bool32 sciBufImport_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
sciBufImport = sciBufImport_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceExternalMemorySciBufFeaturesNV & setSciBufExport( VULKAN_HPP_NAMESPACE::Bool32 sciBufExport_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
sciBufExport = sciBufExport_;
|
|
return *this;
|
|
}
|
|
# endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
|
|
|
|
operator VkPhysicalDeviceExternalMemorySciBufFeaturesNV const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkPhysicalDeviceExternalMemorySciBufFeaturesNV *>( this );
|
|
}
|
|
|
|
operator VkPhysicalDeviceExternalMemorySciBufFeaturesNV &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkPhysicalDeviceExternalMemorySciBufFeaturesNV *>( this );
|
|
}
|
|
|
|
# if defined( VULKAN_HPP_USE_REFLECT )
|
|
# if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
# else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &>
|
|
# endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( sType, pNext, sciBufImport, sciBufExport );
|
|
}
|
|
# endif
|
|
|
|
# if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
|
|
auto operator<=>( PhysicalDeviceExternalMemorySciBufFeaturesNV const & ) const = default;
|
|
# else
|
|
bool operator==( PhysicalDeviceExternalMemorySciBufFeaturesNV const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
# if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
# else
|
|
return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( sciBufImport == rhs.sciBufImport ) && ( sciBufExport == rhs.sciBufExport );
|
|
# endif
|
|
}
|
|
|
|
bool operator!=( PhysicalDeviceExternalMemorySciBufFeaturesNV const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
# endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceExternalMemorySciBufFeaturesNV;
|
|
void * pNext = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 sciBufImport = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 sciBufExport = {};
|
|
};
|
|
|
|
template <>
|
|
struct CppType<StructureType, StructureType::ePhysicalDeviceExternalMemorySciBufFeaturesNV>
|
|
{
|
|
using Type = PhysicalDeviceExternalMemorySciBufFeaturesNV;
|
|
};
|
|
|
|
using PhysicalDeviceExternalSciBufFeaturesNV = PhysicalDeviceExternalMemorySciBufFeaturesNV;
|
|
#endif /*VK_USE_PLATFORM_SCI*/
|
|
|
|
#if defined( VK_USE_PLATFORM_SCREEN_QNX )
|
|
struct PhysicalDeviceExternalMemoryScreenBufferFeaturesQNX
|
|
{
|
|
using NativeType = VkPhysicalDeviceExternalMemoryScreenBufferFeaturesQNX;
|
|
|
|
static const bool allowDuplicate = false;
|
|
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceExternalMemoryScreenBufferFeaturesQNX;
|
|
|
|
# if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR PhysicalDeviceExternalMemoryScreenBufferFeaturesQNX( VULKAN_HPP_NAMESPACE::Bool32 screenBufferImport_ = {},
|
|
void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
|
|
: pNext( pNext_ )
|
|
, screenBufferImport( screenBufferImport_ )
|
|
{
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR
|
|
PhysicalDeviceExternalMemoryScreenBufferFeaturesQNX( PhysicalDeviceExternalMemoryScreenBufferFeaturesQNX const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
PhysicalDeviceExternalMemoryScreenBufferFeaturesQNX( VkPhysicalDeviceExternalMemoryScreenBufferFeaturesQNX const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: PhysicalDeviceExternalMemoryScreenBufferFeaturesQNX( *reinterpret_cast<PhysicalDeviceExternalMemoryScreenBufferFeaturesQNX const *>( &rhs ) )
|
|
{
|
|
}
|
|
|
|
PhysicalDeviceExternalMemoryScreenBufferFeaturesQNX &
|
|
operator=( PhysicalDeviceExternalMemoryScreenBufferFeaturesQNX const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
# endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
PhysicalDeviceExternalMemoryScreenBufferFeaturesQNX & operator=( VkPhysicalDeviceExternalMemoryScreenBufferFeaturesQNX const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalMemoryScreenBufferFeaturesQNX const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
# if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
|
|
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceExternalMemoryScreenBufferFeaturesQNX & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pNext = pNext_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceExternalMemoryScreenBufferFeaturesQNX &
|
|
setScreenBufferImport( VULKAN_HPP_NAMESPACE::Bool32 screenBufferImport_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
screenBufferImport = screenBufferImport_;
|
|
return *this;
|
|
}
|
|
# endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
|
|
|
|
operator VkPhysicalDeviceExternalMemoryScreenBufferFeaturesQNX const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkPhysicalDeviceExternalMemoryScreenBufferFeaturesQNX *>( this );
|
|
}
|
|
|
|
operator VkPhysicalDeviceExternalMemoryScreenBufferFeaturesQNX &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkPhysicalDeviceExternalMemoryScreenBufferFeaturesQNX *>( this );
|
|
}
|
|
|
|
# if defined( VULKAN_HPP_USE_REFLECT )
|
|
# if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
# else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::Bool32 const &>
|
|
# endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( sType, pNext, screenBufferImport );
|
|
}
|
|
# endif
|
|
|
|
# if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
|
|
auto operator<=>( PhysicalDeviceExternalMemoryScreenBufferFeaturesQNX const & ) const = default;
|
|
# else
|
|
bool operator==( PhysicalDeviceExternalMemoryScreenBufferFeaturesQNX const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
# if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
# else
|
|
return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( screenBufferImport == rhs.screenBufferImport );
|
|
# endif
|
|
}
|
|
|
|
bool operator!=( PhysicalDeviceExternalMemoryScreenBufferFeaturesQNX const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
# endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceExternalMemoryScreenBufferFeaturesQNX;
|
|
void * pNext = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 screenBufferImport = {};
|
|
};
|
|
|
|
template <>
|
|
struct CppType<StructureType, StructureType::ePhysicalDeviceExternalMemoryScreenBufferFeaturesQNX>
|
|
{
|
|
using Type = PhysicalDeviceExternalMemoryScreenBufferFeaturesQNX;
|
|
};
|
|
#endif /*VK_USE_PLATFORM_SCREEN_QNX*/
|
|
|
|
#if defined( VK_USE_PLATFORM_SCI )
|
|
struct PhysicalDeviceExternalSciSync2FeaturesNV
|
|
{
|
|
using NativeType = VkPhysicalDeviceExternalSciSync2FeaturesNV;
|
|
|
|
static const bool allowDuplicate = false;
|
|
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceExternalSciSync2FeaturesNV;
|
|
|
|
# if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR PhysicalDeviceExternalSciSync2FeaturesNV( VULKAN_HPP_NAMESPACE::Bool32 sciSyncFence_ = {},
|
|
VULKAN_HPP_NAMESPACE::Bool32 sciSyncSemaphore2_ = {},
|
|
VULKAN_HPP_NAMESPACE::Bool32 sciSyncImport_ = {},
|
|
VULKAN_HPP_NAMESPACE::Bool32 sciSyncExport_ = {},
|
|
void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
|
|
: pNext( pNext_ )
|
|
, sciSyncFence( sciSyncFence_ )
|
|
, sciSyncSemaphore2( sciSyncSemaphore2_ )
|
|
, sciSyncImport( sciSyncImport_ )
|
|
, sciSyncExport( sciSyncExport_ )
|
|
{
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR PhysicalDeviceExternalSciSync2FeaturesNV( PhysicalDeviceExternalSciSync2FeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
PhysicalDeviceExternalSciSync2FeaturesNV( VkPhysicalDeviceExternalSciSync2FeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: PhysicalDeviceExternalSciSync2FeaturesNV( *reinterpret_cast<PhysicalDeviceExternalSciSync2FeaturesNV const *>( &rhs ) )
|
|
{
|
|
}
|
|
|
|
PhysicalDeviceExternalSciSync2FeaturesNV & operator=( PhysicalDeviceExternalSciSync2FeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
# endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
PhysicalDeviceExternalSciSync2FeaturesNV & operator=( VkPhysicalDeviceExternalSciSync2FeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalSciSync2FeaturesNV const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
# if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
|
|
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceExternalSciSync2FeaturesNV & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pNext = pNext_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceExternalSciSync2FeaturesNV & setSciSyncFence( VULKAN_HPP_NAMESPACE::Bool32 sciSyncFence_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
sciSyncFence = sciSyncFence_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceExternalSciSync2FeaturesNV &
|
|
setSciSyncSemaphore2( VULKAN_HPP_NAMESPACE::Bool32 sciSyncSemaphore2_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
sciSyncSemaphore2 = sciSyncSemaphore2_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceExternalSciSync2FeaturesNV & setSciSyncImport( VULKAN_HPP_NAMESPACE::Bool32 sciSyncImport_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
sciSyncImport = sciSyncImport_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceExternalSciSync2FeaturesNV & setSciSyncExport( VULKAN_HPP_NAMESPACE::Bool32 sciSyncExport_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
sciSyncExport = sciSyncExport_;
|
|
return *this;
|
|
}
|
|
# endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
|
|
|
|
operator VkPhysicalDeviceExternalSciSync2FeaturesNV const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkPhysicalDeviceExternalSciSync2FeaturesNV *>( this );
|
|
}
|
|
|
|
operator VkPhysicalDeviceExternalSciSync2FeaturesNV &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkPhysicalDeviceExternalSciSync2FeaturesNV *>( this );
|
|
}
|
|
|
|
# if defined( VULKAN_HPP_USE_REFLECT )
|
|
# if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
# else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &,
|
|
void * const &,
|
|
VULKAN_HPP_NAMESPACE::Bool32 const &,
|
|
VULKAN_HPP_NAMESPACE::Bool32 const &,
|
|
VULKAN_HPP_NAMESPACE::Bool32 const &,
|
|
VULKAN_HPP_NAMESPACE::Bool32 const &>
|
|
# endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( sType, pNext, sciSyncFence, sciSyncSemaphore2, sciSyncImport, sciSyncExport );
|
|
}
|
|
# endif
|
|
|
|
# if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
|
|
auto operator<=>( PhysicalDeviceExternalSciSync2FeaturesNV const & ) const = default;
|
|
# else
|
|
bool operator==( PhysicalDeviceExternalSciSync2FeaturesNV const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
# if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
# else
|
|
return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( sciSyncFence == rhs.sciSyncFence ) && ( sciSyncSemaphore2 == rhs.sciSyncSemaphore2 ) &&
|
|
( sciSyncImport == rhs.sciSyncImport ) && ( sciSyncExport == rhs.sciSyncExport );
|
|
# endif
|
|
}
|
|
|
|
bool operator!=( PhysicalDeviceExternalSciSync2FeaturesNV const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
# endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceExternalSciSync2FeaturesNV;
|
|
void * pNext = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 sciSyncFence = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 sciSyncSemaphore2 = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 sciSyncImport = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 sciSyncExport = {};
|
|
};
|
|
|
|
template <>
|
|
struct CppType<StructureType, StructureType::ePhysicalDeviceExternalSciSync2FeaturesNV>
|
|
{
|
|
using Type = PhysicalDeviceExternalSciSync2FeaturesNV;
|
|
};
|
|
#endif /*VK_USE_PLATFORM_SCI*/
|
|
|
|
#if defined( VK_USE_PLATFORM_SCI )
|
|
struct PhysicalDeviceExternalSciSyncFeaturesNV
|
|
{
|
|
using NativeType = VkPhysicalDeviceExternalSciSyncFeaturesNV;
|
|
|
|
static const bool allowDuplicate = false;
|
|
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceExternalSciSyncFeaturesNV;
|
|
|
|
# if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR PhysicalDeviceExternalSciSyncFeaturesNV( VULKAN_HPP_NAMESPACE::Bool32 sciSyncFence_ = {},
|
|
VULKAN_HPP_NAMESPACE::Bool32 sciSyncSemaphore_ = {},
|
|
VULKAN_HPP_NAMESPACE::Bool32 sciSyncImport_ = {},
|
|
VULKAN_HPP_NAMESPACE::Bool32 sciSyncExport_ = {},
|
|
void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
|
|
: pNext( pNext_ )
|
|
, sciSyncFence( sciSyncFence_ )
|
|
, sciSyncSemaphore( sciSyncSemaphore_ )
|
|
, sciSyncImport( sciSyncImport_ )
|
|
, sciSyncExport( sciSyncExport_ )
|
|
{
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR PhysicalDeviceExternalSciSyncFeaturesNV( PhysicalDeviceExternalSciSyncFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
PhysicalDeviceExternalSciSyncFeaturesNV( VkPhysicalDeviceExternalSciSyncFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: PhysicalDeviceExternalSciSyncFeaturesNV( *reinterpret_cast<PhysicalDeviceExternalSciSyncFeaturesNV const *>( &rhs ) )
|
|
{
|
|
}
|
|
|
|
PhysicalDeviceExternalSciSyncFeaturesNV & operator=( PhysicalDeviceExternalSciSyncFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
# endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
PhysicalDeviceExternalSciSyncFeaturesNV & operator=( VkPhysicalDeviceExternalSciSyncFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalSciSyncFeaturesNV const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
# if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
|
|
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceExternalSciSyncFeaturesNV & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pNext = pNext_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceExternalSciSyncFeaturesNV & setSciSyncFence( VULKAN_HPP_NAMESPACE::Bool32 sciSyncFence_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
sciSyncFence = sciSyncFence_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceExternalSciSyncFeaturesNV & setSciSyncSemaphore( VULKAN_HPP_NAMESPACE::Bool32 sciSyncSemaphore_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
sciSyncSemaphore = sciSyncSemaphore_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceExternalSciSyncFeaturesNV & setSciSyncImport( VULKAN_HPP_NAMESPACE::Bool32 sciSyncImport_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
sciSyncImport = sciSyncImport_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceExternalSciSyncFeaturesNV & setSciSyncExport( VULKAN_HPP_NAMESPACE::Bool32 sciSyncExport_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
sciSyncExport = sciSyncExport_;
|
|
return *this;
|
|
}
|
|
# endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
|
|
|
|
operator VkPhysicalDeviceExternalSciSyncFeaturesNV const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkPhysicalDeviceExternalSciSyncFeaturesNV *>( this );
|
|
}
|
|
|
|
operator VkPhysicalDeviceExternalSciSyncFeaturesNV &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkPhysicalDeviceExternalSciSyncFeaturesNV *>( this );
|
|
}
|
|
|
|
# if defined( VULKAN_HPP_USE_REFLECT )
|
|
# if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
# else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &,
|
|
void * const &,
|
|
VULKAN_HPP_NAMESPACE::Bool32 const &,
|
|
VULKAN_HPP_NAMESPACE::Bool32 const &,
|
|
VULKAN_HPP_NAMESPACE::Bool32 const &,
|
|
VULKAN_HPP_NAMESPACE::Bool32 const &>
|
|
# endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( sType, pNext, sciSyncFence, sciSyncSemaphore, sciSyncImport, sciSyncExport );
|
|
}
|
|
# endif
|
|
|
|
# if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
|
|
auto operator<=>( PhysicalDeviceExternalSciSyncFeaturesNV const & ) const = default;
|
|
# else
|
|
bool operator==( PhysicalDeviceExternalSciSyncFeaturesNV const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
# if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
# else
|
|
return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( sciSyncFence == rhs.sciSyncFence ) && ( sciSyncSemaphore == rhs.sciSyncSemaphore ) &&
|
|
( sciSyncImport == rhs.sciSyncImport ) && ( sciSyncExport == rhs.sciSyncExport );
|
|
# endif
|
|
}
|
|
|
|
bool operator!=( PhysicalDeviceExternalSciSyncFeaturesNV const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
# endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceExternalSciSyncFeaturesNV;
|
|
void * pNext = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 sciSyncFence = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 sciSyncSemaphore = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 sciSyncImport = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 sciSyncExport = {};
|
|
};
|
|
|
|
template <>
|
|
struct CppType<StructureType, StructureType::ePhysicalDeviceExternalSciSyncFeaturesNV>
|
|
{
|
|
using Type = PhysicalDeviceExternalSciSyncFeaturesNV;
|
|
};
|
|
#endif /*VK_USE_PLATFORM_SCI*/
|
|
|
|
struct PhysicalDeviceExternalSemaphoreInfo
|
|
{
|
|
using NativeType = VkPhysicalDeviceExternalSemaphoreInfo;
|
|
|
|
static const bool allowDuplicate = false;
|
|
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceExternalSemaphoreInfo;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR PhysicalDeviceExternalSemaphoreInfo(
|
|
VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlagBits handleType_ = VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlagBits::eOpaqueFd,
|
|
const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
|
|
: pNext( pNext_ )
|
|
, handleType( handleType_ )
|
|
{
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR PhysicalDeviceExternalSemaphoreInfo( PhysicalDeviceExternalSemaphoreInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
PhysicalDeviceExternalSemaphoreInfo( VkPhysicalDeviceExternalSemaphoreInfo const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: PhysicalDeviceExternalSemaphoreInfo( *reinterpret_cast<PhysicalDeviceExternalSemaphoreInfo const *>( &rhs ) )
|
|
{
|
|
}
|
|
|
|
PhysicalDeviceExternalSemaphoreInfo & operator=( PhysicalDeviceExternalSemaphoreInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
PhysicalDeviceExternalSemaphoreInfo & operator=( VkPhysicalDeviceExternalSemaphoreInfo const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalSemaphoreInfo const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
|
|
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceExternalSemaphoreInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pNext = pNext_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceExternalSemaphoreInfo &
|
|
setHandleType( VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlagBits handleType_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
handleType = handleType_;
|
|
return *this;
|
|
}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
|
|
|
|
operator VkPhysicalDeviceExternalSemaphoreInfo const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkPhysicalDeviceExternalSemaphoreInfo *>( this );
|
|
}
|
|
|
|
operator VkPhysicalDeviceExternalSemaphoreInfo &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkPhysicalDeviceExternalSemaphoreInfo *>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
# if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
# else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlagBits const &>
|
|
# endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( sType, pNext, handleType );
|
|
}
|
|
#endif
|
|
|
|
#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
|
|
auto operator<=>( PhysicalDeviceExternalSemaphoreInfo const & ) const = default;
|
|
#else
|
|
bool operator==( PhysicalDeviceExternalSemaphoreInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
# if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
# else
|
|
return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( handleType == rhs.handleType );
|
|
# endif
|
|
}
|
|
|
|
bool operator!=( PhysicalDeviceExternalSemaphoreInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceExternalSemaphoreInfo;
|
|
const void * pNext = {};
|
|
VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlagBits handleType = VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlagBits::eOpaqueFd;
|
|
};
|
|
|
|
template <>
|
|
struct CppType<StructureType, StructureType::ePhysicalDeviceExternalSemaphoreInfo>
|
|
{
|
|
using Type = PhysicalDeviceExternalSemaphoreInfo;
|
|
};
|
|
|
|
using PhysicalDeviceExternalSemaphoreInfoKHR = PhysicalDeviceExternalSemaphoreInfo;
|
|
|
|
struct PhysicalDeviceFeatures2
|
|
{
|
|
using NativeType = VkPhysicalDeviceFeatures2;
|
|
|
|
static const bool allowDuplicate = false;
|
|
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceFeatures2;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR PhysicalDeviceFeatures2( VULKAN_HPP_NAMESPACE::PhysicalDeviceFeatures features_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
|
|
: pNext( pNext_ )
|
|
, features( features_ )
|
|
{
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR PhysicalDeviceFeatures2( PhysicalDeviceFeatures2 const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
PhysicalDeviceFeatures2( VkPhysicalDeviceFeatures2 const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: PhysicalDeviceFeatures2( *reinterpret_cast<PhysicalDeviceFeatures2 const *>( &rhs ) )
|
|
{
|
|
}
|
|
|
|
PhysicalDeviceFeatures2 & operator=( PhysicalDeviceFeatures2 const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
PhysicalDeviceFeatures2 & operator=( VkPhysicalDeviceFeatures2 const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceFeatures2 const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
|
|
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFeatures2 & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pNext = pNext_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFeatures2 & setFeatures( VULKAN_HPP_NAMESPACE::PhysicalDeviceFeatures const & features_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
features = features_;
|
|
return *this;
|
|
}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
|
|
|
|
operator VkPhysicalDeviceFeatures2 const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkPhysicalDeviceFeatures2 *>( this );
|
|
}
|
|
|
|
operator VkPhysicalDeviceFeatures2 &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkPhysicalDeviceFeatures2 *>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
# if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
# else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::PhysicalDeviceFeatures const &>
|
|
# endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( sType, pNext, features );
|
|
}
|
|
#endif
|
|
|
|
#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
|
|
auto operator<=>( PhysicalDeviceFeatures2 const & ) const = default;
|
|
#else
|
|
bool operator==( PhysicalDeviceFeatures2 const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
# if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
# else
|
|
return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( features == rhs.features );
|
|
# endif
|
|
}
|
|
|
|
bool operator!=( PhysicalDeviceFeatures2 const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceFeatures2;
|
|
void * pNext = {};
|
|
VULKAN_HPP_NAMESPACE::PhysicalDeviceFeatures features = {};
|
|
};
|
|
|
|
template <>
|
|
struct CppType<StructureType, StructureType::ePhysicalDeviceFeatures2>
|
|
{
|
|
using Type = PhysicalDeviceFeatures2;
|
|
};
|
|
|
|
using PhysicalDeviceFeatures2KHR = PhysicalDeviceFeatures2;
|
|
|
|
struct PhysicalDeviceFloatControlsProperties
|
|
{
|
|
using NativeType = VkPhysicalDeviceFloatControlsProperties;
|
|
|
|
static const bool allowDuplicate = false;
|
|
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceFloatControlsProperties;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR PhysicalDeviceFloatControlsProperties(
|
|
VULKAN_HPP_NAMESPACE::ShaderFloatControlsIndependence denormBehaviorIndependence_ = VULKAN_HPP_NAMESPACE::ShaderFloatControlsIndependence::e32BitOnly,
|
|
VULKAN_HPP_NAMESPACE::ShaderFloatControlsIndependence roundingModeIndependence_ = VULKAN_HPP_NAMESPACE::ShaderFloatControlsIndependence::e32BitOnly,
|
|
VULKAN_HPP_NAMESPACE::Bool32 shaderSignedZeroInfNanPreserveFloat16_ = {},
|
|
VULKAN_HPP_NAMESPACE::Bool32 shaderSignedZeroInfNanPreserveFloat32_ = {},
|
|
VULKAN_HPP_NAMESPACE::Bool32 shaderSignedZeroInfNanPreserveFloat64_ = {},
|
|
VULKAN_HPP_NAMESPACE::Bool32 shaderDenormPreserveFloat16_ = {},
|
|
VULKAN_HPP_NAMESPACE::Bool32 shaderDenormPreserveFloat32_ = {},
|
|
VULKAN_HPP_NAMESPACE::Bool32 shaderDenormPreserveFloat64_ = {},
|
|
VULKAN_HPP_NAMESPACE::Bool32 shaderDenormFlushToZeroFloat16_ = {},
|
|
VULKAN_HPP_NAMESPACE::Bool32 shaderDenormFlushToZeroFloat32_ = {},
|
|
VULKAN_HPP_NAMESPACE::Bool32 shaderDenormFlushToZeroFloat64_ = {},
|
|
VULKAN_HPP_NAMESPACE::Bool32 shaderRoundingModeRTEFloat16_ = {},
|
|
VULKAN_HPP_NAMESPACE::Bool32 shaderRoundingModeRTEFloat32_ = {},
|
|
VULKAN_HPP_NAMESPACE::Bool32 shaderRoundingModeRTEFloat64_ = {},
|
|
VULKAN_HPP_NAMESPACE::Bool32 shaderRoundingModeRTZFloat16_ = {},
|
|
VULKAN_HPP_NAMESPACE::Bool32 shaderRoundingModeRTZFloat32_ = {},
|
|
VULKAN_HPP_NAMESPACE::Bool32 shaderRoundingModeRTZFloat64_ = {},
|
|
void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
|
|
: pNext( pNext_ )
|
|
, denormBehaviorIndependence( denormBehaviorIndependence_ )
|
|
, roundingModeIndependence( roundingModeIndependence_ )
|
|
, shaderSignedZeroInfNanPreserveFloat16( shaderSignedZeroInfNanPreserveFloat16_ )
|
|
, shaderSignedZeroInfNanPreserveFloat32( shaderSignedZeroInfNanPreserveFloat32_ )
|
|
, shaderSignedZeroInfNanPreserveFloat64( shaderSignedZeroInfNanPreserveFloat64_ )
|
|
, shaderDenormPreserveFloat16( shaderDenormPreserveFloat16_ )
|
|
, shaderDenormPreserveFloat32( shaderDenormPreserveFloat32_ )
|
|
, shaderDenormPreserveFloat64( shaderDenormPreserveFloat64_ )
|
|
, shaderDenormFlushToZeroFloat16( shaderDenormFlushToZeroFloat16_ )
|
|
, shaderDenormFlushToZeroFloat32( shaderDenormFlushToZeroFloat32_ )
|
|
, shaderDenormFlushToZeroFloat64( shaderDenormFlushToZeroFloat64_ )
|
|
, shaderRoundingModeRTEFloat16( shaderRoundingModeRTEFloat16_ )
|
|
, shaderRoundingModeRTEFloat32( shaderRoundingModeRTEFloat32_ )
|
|
, shaderRoundingModeRTEFloat64( shaderRoundingModeRTEFloat64_ )
|
|
, shaderRoundingModeRTZFloat16( shaderRoundingModeRTZFloat16_ )
|
|
, shaderRoundingModeRTZFloat32( shaderRoundingModeRTZFloat32_ )
|
|
, shaderRoundingModeRTZFloat64( shaderRoundingModeRTZFloat64_ )
|
|
{
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR PhysicalDeviceFloatControlsProperties( PhysicalDeviceFloatControlsProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
PhysicalDeviceFloatControlsProperties( VkPhysicalDeviceFloatControlsProperties const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: PhysicalDeviceFloatControlsProperties( *reinterpret_cast<PhysicalDeviceFloatControlsProperties const *>( &rhs ) )
|
|
{
|
|
}
|
|
|
|
PhysicalDeviceFloatControlsProperties & operator=( PhysicalDeviceFloatControlsProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
PhysicalDeviceFloatControlsProperties & operator=( VkPhysicalDeviceFloatControlsProperties const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceFloatControlsProperties const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
operator VkPhysicalDeviceFloatControlsProperties const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkPhysicalDeviceFloatControlsProperties *>( this );
|
|
}
|
|
|
|
operator VkPhysicalDeviceFloatControlsProperties &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkPhysicalDeviceFloatControlsProperties *>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
# if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
# else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &,
|
|
void * const &,
|
|
VULKAN_HPP_NAMESPACE::ShaderFloatControlsIndependence const &,
|
|
VULKAN_HPP_NAMESPACE::ShaderFloatControlsIndependence const &,
|
|
VULKAN_HPP_NAMESPACE::Bool32 const &,
|
|
VULKAN_HPP_NAMESPACE::Bool32 const &,
|
|
VULKAN_HPP_NAMESPACE::Bool32 const &,
|
|
VULKAN_HPP_NAMESPACE::Bool32 const &,
|
|
VULKAN_HPP_NAMESPACE::Bool32 const &,
|
|
VULKAN_HPP_NAMESPACE::Bool32 const &,
|
|
VULKAN_HPP_NAMESPACE::Bool32 const &,
|
|
VULKAN_HPP_NAMESPACE::Bool32 const &,
|
|
VULKAN_HPP_NAMESPACE::Bool32 const &,
|
|
VULKAN_HPP_NAMESPACE::Bool32 const &,
|
|
VULKAN_HPP_NAMESPACE::Bool32 const &,
|
|
VULKAN_HPP_NAMESPACE::Bool32 const &,
|
|
VULKAN_HPP_NAMESPACE::Bool32 const &,
|
|
VULKAN_HPP_NAMESPACE::Bool32 const &,
|
|
VULKAN_HPP_NAMESPACE::Bool32 const &>
|
|
# endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( sType,
|
|
pNext,
|
|
denormBehaviorIndependence,
|
|
roundingModeIndependence,
|
|
shaderSignedZeroInfNanPreserveFloat16,
|
|
shaderSignedZeroInfNanPreserveFloat32,
|
|
shaderSignedZeroInfNanPreserveFloat64,
|
|
shaderDenormPreserveFloat16,
|
|
shaderDenormPreserveFloat32,
|
|
shaderDenormPreserveFloat64,
|
|
shaderDenormFlushToZeroFloat16,
|
|
shaderDenormFlushToZeroFloat32,
|
|
shaderDenormFlushToZeroFloat64,
|
|
shaderRoundingModeRTEFloat16,
|
|
shaderRoundingModeRTEFloat32,
|
|
shaderRoundingModeRTEFloat64,
|
|
shaderRoundingModeRTZFloat16,
|
|
shaderRoundingModeRTZFloat32,
|
|
shaderRoundingModeRTZFloat64 );
|
|
}
|
|
#endif
|
|
|
|
#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
|
|
auto operator<=>( PhysicalDeviceFloatControlsProperties const & ) const = default;
|
|
#else
|
|
bool operator==( PhysicalDeviceFloatControlsProperties const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
# if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
# else
|
|
return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( denormBehaviorIndependence == rhs.denormBehaviorIndependence ) &&
|
|
( roundingModeIndependence == rhs.roundingModeIndependence ) &&
|
|
( shaderSignedZeroInfNanPreserveFloat16 == rhs.shaderSignedZeroInfNanPreserveFloat16 ) &&
|
|
( shaderSignedZeroInfNanPreserveFloat32 == rhs.shaderSignedZeroInfNanPreserveFloat32 ) &&
|
|
( shaderSignedZeroInfNanPreserveFloat64 == rhs.shaderSignedZeroInfNanPreserveFloat64 ) &&
|
|
( shaderDenormPreserveFloat16 == rhs.shaderDenormPreserveFloat16 ) && ( shaderDenormPreserveFloat32 == rhs.shaderDenormPreserveFloat32 ) &&
|
|
( shaderDenormPreserveFloat64 == rhs.shaderDenormPreserveFloat64 ) && ( shaderDenormFlushToZeroFloat16 == rhs.shaderDenormFlushToZeroFloat16 ) &&
|
|
( shaderDenormFlushToZeroFloat32 == rhs.shaderDenormFlushToZeroFloat32 ) &&
|
|
( shaderDenormFlushToZeroFloat64 == rhs.shaderDenormFlushToZeroFloat64 ) && ( shaderRoundingModeRTEFloat16 == rhs.shaderRoundingModeRTEFloat16 ) &&
|
|
( shaderRoundingModeRTEFloat32 == rhs.shaderRoundingModeRTEFloat32 ) && ( shaderRoundingModeRTEFloat64 == rhs.shaderRoundingModeRTEFloat64 ) &&
|
|
( shaderRoundingModeRTZFloat16 == rhs.shaderRoundingModeRTZFloat16 ) && ( shaderRoundingModeRTZFloat32 == rhs.shaderRoundingModeRTZFloat32 ) &&
|
|
( shaderRoundingModeRTZFloat64 == rhs.shaderRoundingModeRTZFloat64 );
|
|
# endif
|
|
}
|
|
|
|
bool operator!=( PhysicalDeviceFloatControlsProperties const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceFloatControlsProperties;
|
|
void * pNext = {};
|
|
VULKAN_HPP_NAMESPACE::ShaderFloatControlsIndependence denormBehaviorIndependence = VULKAN_HPP_NAMESPACE::ShaderFloatControlsIndependence::e32BitOnly;
|
|
VULKAN_HPP_NAMESPACE::ShaderFloatControlsIndependence roundingModeIndependence = VULKAN_HPP_NAMESPACE::ShaderFloatControlsIndependence::e32BitOnly;
|
|
VULKAN_HPP_NAMESPACE::Bool32 shaderSignedZeroInfNanPreserveFloat16 = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 shaderSignedZeroInfNanPreserveFloat32 = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 shaderSignedZeroInfNanPreserveFloat64 = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 shaderDenormPreserveFloat16 = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 shaderDenormPreserveFloat32 = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 shaderDenormPreserveFloat64 = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 shaderDenormFlushToZeroFloat16 = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 shaderDenormFlushToZeroFloat32 = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 shaderDenormFlushToZeroFloat64 = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 shaderRoundingModeRTEFloat16 = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 shaderRoundingModeRTEFloat32 = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 shaderRoundingModeRTEFloat64 = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 shaderRoundingModeRTZFloat16 = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 shaderRoundingModeRTZFloat32 = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 shaderRoundingModeRTZFloat64 = {};
|
|
};
|
|
|
|
template <>
|
|
struct CppType<StructureType, StructureType::ePhysicalDeviceFloatControlsProperties>
|
|
{
|
|
using Type = PhysicalDeviceFloatControlsProperties;
|
|
};
|
|
|
|
using PhysicalDeviceFloatControlsPropertiesKHR = PhysicalDeviceFloatControlsProperties;
|
|
|
|
struct PhysicalDeviceFragmentShaderInterlockFeaturesEXT
|
|
{
|
|
using NativeType = VkPhysicalDeviceFragmentShaderInterlockFeaturesEXT;
|
|
|
|
static const bool allowDuplicate = false;
|
|
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceFragmentShaderInterlockFeaturesEXT;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR PhysicalDeviceFragmentShaderInterlockFeaturesEXT( VULKAN_HPP_NAMESPACE::Bool32 fragmentShaderSampleInterlock_ = {},
|
|
VULKAN_HPP_NAMESPACE::Bool32 fragmentShaderPixelInterlock_ = {},
|
|
VULKAN_HPP_NAMESPACE::Bool32 fragmentShaderShadingRateInterlock_ = {},
|
|
void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
|
|
: pNext( pNext_ )
|
|
, fragmentShaderSampleInterlock( fragmentShaderSampleInterlock_ )
|
|
, fragmentShaderPixelInterlock( fragmentShaderPixelInterlock_ )
|
|
, fragmentShaderShadingRateInterlock( fragmentShaderShadingRateInterlock_ )
|
|
{
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR
|
|
PhysicalDeviceFragmentShaderInterlockFeaturesEXT( PhysicalDeviceFragmentShaderInterlockFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
PhysicalDeviceFragmentShaderInterlockFeaturesEXT( VkPhysicalDeviceFragmentShaderInterlockFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: PhysicalDeviceFragmentShaderInterlockFeaturesEXT( *reinterpret_cast<PhysicalDeviceFragmentShaderInterlockFeaturesEXT const *>( &rhs ) )
|
|
{
|
|
}
|
|
|
|
PhysicalDeviceFragmentShaderInterlockFeaturesEXT & operator=( PhysicalDeviceFragmentShaderInterlockFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
PhysicalDeviceFragmentShaderInterlockFeaturesEXT & operator=( VkPhysicalDeviceFragmentShaderInterlockFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceFragmentShaderInterlockFeaturesEXT const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
|
|
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFragmentShaderInterlockFeaturesEXT & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pNext = pNext_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFragmentShaderInterlockFeaturesEXT &
|
|
setFragmentShaderSampleInterlock( VULKAN_HPP_NAMESPACE::Bool32 fragmentShaderSampleInterlock_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
fragmentShaderSampleInterlock = fragmentShaderSampleInterlock_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFragmentShaderInterlockFeaturesEXT &
|
|
setFragmentShaderPixelInterlock( VULKAN_HPP_NAMESPACE::Bool32 fragmentShaderPixelInterlock_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
fragmentShaderPixelInterlock = fragmentShaderPixelInterlock_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFragmentShaderInterlockFeaturesEXT &
|
|
setFragmentShaderShadingRateInterlock( VULKAN_HPP_NAMESPACE::Bool32 fragmentShaderShadingRateInterlock_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
fragmentShaderShadingRateInterlock = fragmentShaderShadingRateInterlock_;
|
|
return *this;
|
|
}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
|
|
|
|
operator VkPhysicalDeviceFragmentShaderInterlockFeaturesEXT const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkPhysicalDeviceFragmentShaderInterlockFeaturesEXT *>( this );
|
|
}
|
|
|
|
operator VkPhysicalDeviceFragmentShaderInterlockFeaturesEXT &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkPhysicalDeviceFragmentShaderInterlockFeaturesEXT *>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
# if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
# else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &,
|
|
void * const &,
|
|
VULKAN_HPP_NAMESPACE::Bool32 const &,
|
|
VULKAN_HPP_NAMESPACE::Bool32 const &,
|
|
VULKAN_HPP_NAMESPACE::Bool32 const &>
|
|
# endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( sType, pNext, fragmentShaderSampleInterlock, fragmentShaderPixelInterlock, fragmentShaderShadingRateInterlock );
|
|
}
|
|
#endif
|
|
|
|
#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
|
|
auto operator<=>( PhysicalDeviceFragmentShaderInterlockFeaturesEXT const & ) const = default;
|
|
#else
|
|
bool operator==( PhysicalDeviceFragmentShaderInterlockFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
# if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
# else
|
|
return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( fragmentShaderSampleInterlock == rhs.fragmentShaderSampleInterlock ) &&
|
|
( fragmentShaderPixelInterlock == rhs.fragmentShaderPixelInterlock ) &&
|
|
( fragmentShaderShadingRateInterlock == rhs.fragmentShaderShadingRateInterlock );
|
|
# endif
|
|
}
|
|
|
|
bool operator!=( PhysicalDeviceFragmentShaderInterlockFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceFragmentShaderInterlockFeaturesEXT;
|
|
void * pNext = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 fragmentShaderSampleInterlock = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 fragmentShaderPixelInterlock = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 fragmentShaderShadingRateInterlock = {};
|
|
};
|
|
|
|
template <>
|
|
struct CppType<StructureType, StructureType::ePhysicalDeviceFragmentShaderInterlockFeaturesEXT>
|
|
{
|
|
using Type = PhysicalDeviceFragmentShaderInterlockFeaturesEXT;
|
|
};
|
|
|
|
struct PhysicalDeviceFragmentShadingRateFeaturesKHR
|
|
{
|
|
using NativeType = VkPhysicalDeviceFragmentShadingRateFeaturesKHR;
|
|
|
|
static const bool allowDuplicate = false;
|
|
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceFragmentShadingRateFeaturesKHR;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR PhysicalDeviceFragmentShadingRateFeaturesKHR( VULKAN_HPP_NAMESPACE::Bool32 pipelineFragmentShadingRate_ = {},
|
|
VULKAN_HPP_NAMESPACE::Bool32 primitiveFragmentShadingRate_ = {},
|
|
VULKAN_HPP_NAMESPACE::Bool32 attachmentFragmentShadingRate_ = {},
|
|
void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
|
|
: pNext( pNext_ )
|
|
, pipelineFragmentShadingRate( pipelineFragmentShadingRate_ )
|
|
, primitiveFragmentShadingRate( primitiveFragmentShadingRate_ )
|
|
, attachmentFragmentShadingRate( attachmentFragmentShadingRate_ )
|
|
{
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR PhysicalDeviceFragmentShadingRateFeaturesKHR( PhysicalDeviceFragmentShadingRateFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
PhysicalDeviceFragmentShadingRateFeaturesKHR( VkPhysicalDeviceFragmentShadingRateFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: PhysicalDeviceFragmentShadingRateFeaturesKHR( *reinterpret_cast<PhysicalDeviceFragmentShadingRateFeaturesKHR const *>( &rhs ) )
|
|
{
|
|
}
|
|
|
|
PhysicalDeviceFragmentShadingRateFeaturesKHR & operator=( PhysicalDeviceFragmentShadingRateFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
PhysicalDeviceFragmentShadingRateFeaturesKHR & operator=( VkPhysicalDeviceFragmentShadingRateFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceFragmentShadingRateFeaturesKHR const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
|
|
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFragmentShadingRateFeaturesKHR & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pNext = pNext_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFragmentShadingRateFeaturesKHR &
|
|
setPipelineFragmentShadingRate( VULKAN_HPP_NAMESPACE::Bool32 pipelineFragmentShadingRate_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pipelineFragmentShadingRate = pipelineFragmentShadingRate_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFragmentShadingRateFeaturesKHR &
|
|
setPrimitiveFragmentShadingRate( VULKAN_HPP_NAMESPACE::Bool32 primitiveFragmentShadingRate_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
primitiveFragmentShadingRate = primitiveFragmentShadingRate_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFragmentShadingRateFeaturesKHR &
|
|
setAttachmentFragmentShadingRate( VULKAN_HPP_NAMESPACE::Bool32 attachmentFragmentShadingRate_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
attachmentFragmentShadingRate = attachmentFragmentShadingRate_;
|
|
return *this;
|
|
}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
|
|
|
|
operator VkPhysicalDeviceFragmentShadingRateFeaturesKHR const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkPhysicalDeviceFragmentShadingRateFeaturesKHR *>( this );
|
|
}
|
|
|
|
operator VkPhysicalDeviceFragmentShadingRateFeaturesKHR &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkPhysicalDeviceFragmentShadingRateFeaturesKHR *>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
# if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
# else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &,
|
|
void * const &,
|
|
VULKAN_HPP_NAMESPACE::Bool32 const &,
|
|
VULKAN_HPP_NAMESPACE::Bool32 const &,
|
|
VULKAN_HPP_NAMESPACE::Bool32 const &>
|
|
# endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( sType, pNext, pipelineFragmentShadingRate, primitiveFragmentShadingRate, attachmentFragmentShadingRate );
|
|
}
|
|
#endif
|
|
|
|
#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
|
|
auto operator<=>( PhysicalDeviceFragmentShadingRateFeaturesKHR const & ) const = default;
|
|
#else
|
|
bool operator==( PhysicalDeviceFragmentShadingRateFeaturesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
# if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
# else
|
|
return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( pipelineFragmentShadingRate == rhs.pipelineFragmentShadingRate ) &&
|
|
( primitiveFragmentShadingRate == rhs.primitiveFragmentShadingRate ) && ( attachmentFragmentShadingRate == rhs.attachmentFragmentShadingRate );
|
|
# endif
|
|
}
|
|
|
|
bool operator!=( PhysicalDeviceFragmentShadingRateFeaturesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceFragmentShadingRateFeaturesKHR;
|
|
void * pNext = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 pipelineFragmentShadingRate = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 primitiveFragmentShadingRate = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 attachmentFragmentShadingRate = {};
|
|
};
|
|
|
|
template <>
|
|
struct CppType<StructureType, StructureType::ePhysicalDeviceFragmentShadingRateFeaturesKHR>
|
|
{
|
|
using Type = PhysicalDeviceFragmentShadingRateFeaturesKHR;
|
|
};
|
|
|
|
struct PhysicalDeviceFragmentShadingRateKHR
|
|
{
|
|
using NativeType = VkPhysicalDeviceFragmentShadingRateKHR;
|
|
|
|
static const bool allowDuplicate = false;
|
|
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceFragmentShadingRateKHR;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR PhysicalDeviceFragmentShadingRateKHR( VULKAN_HPP_NAMESPACE::SampleCountFlags sampleCounts_ = {},
|
|
VULKAN_HPP_NAMESPACE::Extent2D fragmentSize_ = {},
|
|
void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
|
|
: pNext( pNext_ )
|
|
, sampleCounts( sampleCounts_ )
|
|
, fragmentSize( fragmentSize_ )
|
|
{
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR PhysicalDeviceFragmentShadingRateKHR( PhysicalDeviceFragmentShadingRateKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
PhysicalDeviceFragmentShadingRateKHR( VkPhysicalDeviceFragmentShadingRateKHR const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: PhysicalDeviceFragmentShadingRateKHR( *reinterpret_cast<PhysicalDeviceFragmentShadingRateKHR const *>( &rhs ) )
|
|
{
|
|
}
|
|
|
|
PhysicalDeviceFragmentShadingRateKHR & operator=( PhysicalDeviceFragmentShadingRateKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
PhysicalDeviceFragmentShadingRateKHR & operator=( VkPhysicalDeviceFragmentShadingRateKHR const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceFragmentShadingRateKHR const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
operator VkPhysicalDeviceFragmentShadingRateKHR const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkPhysicalDeviceFragmentShadingRateKHR *>( this );
|
|
}
|
|
|
|
operator VkPhysicalDeviceFragmentShadingRateKHR &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkPhysicalDeviceFragmentShadingRateKHR *>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
# if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
# else
|
|
std::
|
|
tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::SampleCountFlags const &, VULKAN_HPP_NAMESPACE::Extent2D const &>
|
|
# endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( sType, pNext, sampleCounts, fragmentSize );
|
|
}
|
|
#endif
|
|
|
|
#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
|
|
auto operator<=>( PhysicalDeviceFragmentShadingRateKHR const & ) const = default;
|
|
#else
|
|
bool operator==( PhysicalDeviceFragmentShadingRateKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
# if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
# else
|
|
return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( sampleCounts == rhs.sampleCounts ) && ( fragmentSize == rhs.fragmentSize );
|
|
# endif
|
|
}
|
|
|
|
bool operator!=( PhysicalDeviceFragmentShadingRateKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceFragmentShadingRateKHR;
|
|
void * pNext = {};
|
|
VULKAN_HPP_NAMESPACE::SampleCountFlags sampleCounts = {};
|
|
VULKAN_HPP_NAMESPACE::Extent2D fragmentSize = {};
|
|
};
|
|
|
|
template <>
|
|
struct CppType<StructureType, StructureType::ePhysicalDeviceFragmentShadingRateKHR>
|
|
{
|
|
using Type = PhysicalDeviceFragmentShadingRateKHR;
|
|
};
|
|
|
|
struct PhysicalDeviceFragmentShadingRatePropertiesKHR
|
|
{
|
|
using NativeType = VkPhysicalDeviceFragmentShadingRatePropertiesKHR;
|
|
|
|
static const bool allowDuplicate = false;
|
|
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceFragmentShadingRatePropertiesKHR;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR PhysicalDeviceFragmentShadingRatePropertiesKHR(
|
|
VULKAN_HPP_NAMESPACE::Extent2D minFragmentShadingRateAttachmentTexelSize_ = {},
|
|
VULKAN_HPP_NAMESPACE::Extent2D maxFragmentShadingRateAttachmentTexelSize_ = {},
|
|
uint32_t maxFragmentShadingRateAttachmentTexelSizeAspectRatio_ = {},
|
|
VULKAN_HPP_NAMESPACE::Bool32 primitiveFragmentShadingRateWithMultipleViewports_ = {},
|
|
VULKAN_HPP_NAMESPACE::Bool32 layeredShadingRateAttachments_ = {},
|
|
VULKAN_HPP_NAMESPACE::Bool32 fragmentShadingRateNonTrivialCombinerOps_ = {},
|
|
VULKAN_HPP_NAMESPACE::Extent2D maxFragmentSize_ = {},
|
|
uint32_t maxFragmentSizeAspectRatio_ = {},
|
|
uint32_t maxFragmentShadingRateCoverageSamples_ = {},
|
|
VULKAN_HPP_NAMESPACE::SampleCountFlagBits maxFragmentShadingRateRasterizationSamples_ = VULKAN_HPP_NAMESPACE::SampleCountFlagBits::e1,
|
|
VULKAN_HPP_NAMESPACE::Bool32 fragmentShadingRateWithShaderDepthStencilWrites_ = {},
|
|
VULKAN_HPP_NAMESPACE::Bool32 fragmentShadingRateWithSampleMask_ = {},
|
|
VULKAN_HPP_NAMESPACE::Bool32 fragmentShadingRateWithShaderSampleMask_ = {},
|
|
VULKAN_HPP_NAMESPACE::Bool32 fragmentShadingRateWithConservativeRasterization_ = {},
|
|
VULKAN_HPP_NAMESPACE::Bool32 fragmentShadingRateWithFragmentShaderInterlock_ = {},
|
|
VULKAN_HPP_NAMESPACE::Bool32 fragmentShadingRateWithCustomSampleLocations_ = {},
|
|
VULKAN_HPP_NAMESPACE::Bool32 fragmentShadingRateStrictMultiplyCombiner_ = {},
|
|
void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
|
|
: pNext( pNext_ )
|
|
, minFragmentShadingRateAttachmentTexelSize( minFragmentShadingRateAttachmentTexelSize_ )
|
|
, maxFragmentShadingRateAttachmentTexelSize( maxFragmentShadingRateAttachmentTexelSize_ )
|
|
, maxFragmentShadingRateAttachmentTexelSizeAspectRatio( maxFragmentShadingRateAttachmentTexelSizeAspectRatio_ )
|
|
, primitiveFragmentShadingRateWithMultipleViewports( primitiveFragmentShadingRateWithMultipleViewports_ )
|
|
, layeredShadingRateAttachments( layeredShadingRateAttachments_ )
|
|
, fragmentShadingRateNonTrivialCombinerOps( fragmentShadingRateNonTrivialCombinerOps_ )
|
|
, maxFragmentSize( maxFragmentSize_ )
|
|
, maxFragmentSizeAspectRatio( maxFragmentSizeAspectRatio_ )
|
|
, maxFragmentShadingRateCoverageSamples( maxFragmentShadingRateCoverageSamples_ )
|
|
, maxFragmentShadingRateRasterizationSamples( maxFragmentShadingRateRasterizationSamples_ )
|
|
, fragmentShadingRateWithShaderDepthStencilWrites( fragmentShadingRateWithShaderDepthStencilWrites_ )
|
|
, fragmentShadingRateWithSampleMask( fragmentShadingRateWithSampleMask_ )
|
|
, fragmentShadingRateWithShaderSampleMask( fragmentShadingRateWithShaderSampleMask_ )
|
|
, fragmentShadingRateWithConservativeRasterization( fragmentShadingRateWithConservativeRasterization_ )
|
|
, fragmentShadingRateWithFragmentShaderInterlock( fragmentShadingRateWithFragmentShaderInterlock_ )
|
|
, fragmentShadingRateWithCustomSampleLocations( fragmentShadingRateWithCustomSampleLocations_ )
|
|
, fragmentShadingRateStrictMultiplyCombiner( fragmentShadingRateStrictMultiplyCombiner_ )
|
|
{
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR
|
|
PhysicalDeviceFragmentShadingRatePropertiesKHR( PhysicalDeviceFragmentShadingRatePropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
PhysicalDeviceFragmentShadingRatePropertiesKHR( VkPhysicalDeviceFragmentShadingRatePropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: PhysicalDeviceFragmentShadingRatePropertiesKHR( *reinterpret_cast<PhysicalDeviceFragmentShadingRatePropertiesKHR const *>( &rhs ) )
|
|
{
|
|
}
|
|
|
|
PhysicalDeviceFragmentShadingRatePropertiesKHR & operator=( PhysicalDeviceFragmentShadingRatePropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
PhysicalDeviceFragmentShadingRatePropertiesKHR & operator=( VkPhysicalDeviceFragmentShadingRatePropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceFragmentShadingRatePropertiesKHR const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
operator VkPhysicalDeviceFragmentShadingRatePropertiesKHR const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkPhysicalDeviceFragmentShadingRatePropertiesKHR *>( this );
|
|
}
|
|
|
|
operator VkPhysicalDeviceFragmentShadingRatePropertiesKHR &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkPhysicalDeviceFragmentShadingRatePropertiesKHR *>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
# if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
# else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &,
|
|
void * const &,
|
|
VULKAN_HPP_NAMESPACE::Extent2D const &,
|
|
VULKAN_HPP_NAMESPACE::Extent2D const &,
|
|
uint32_t const &,
|
|
VULKAN_HPP_NAMESPACE::Bool32 const &,
|
|
VULKAN_HPP_NAMESPACE::Bool32 const &,
|
|
VULKAN_HPP_NAMESPACE::Bool32 const &,
|
|
VULKAN_HPP_NAMESPACE::Extent2D const &,
|
|
uint32_t const &,
|
|
uint32_t const &,
|
|
VULKAN_HPP_NAMESPACE::SampleCountFlagBits const &,
|
|
VULKAN_HPP_NAMESPACE::Bool32 const &,
|
|
VULKAN_HPP_NAMESPACE::Bool32 const &,
|
|
VULKAN_HPP_NAMESPACE::Bool32 const &,
|
|
VULKAN_HPP_NAMESPACE::Bool32 const &,
|
|
VULKAN_HPP_NAMESPACE::Bool32 const &,
|
|
VULKAN_HPP_NAMESPACE::Bool32 const &,
|
|
VULKAN_HPP_NAMESPACE::Bool32 const &>
|
|
# endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( sType,
|
|
pNext,
|
|
minFragmentShadingRateAttachmentTexelSize,
|
|
maxFragmentShadingRateAttachmentTexelSize,
|
|
maxFragmentShadingRateAttachmentTexelSizeAspectRatio,
|
|
primitiveFragmentShadingRateWithMultipleViewports,
|
|
layeredShadingRateAttachments,
|
|
fragmentShadingRateNonTrivialCombinerOps,
|
|
maxFragmentSize,
|
|
maxFragmentSizeAspectRatio,
|
|
maxFragmentShadingRateCoverageSamples,
|
|
maxFragmentShadingRateRasterizationSamples,
|
|
fragmentShadingRateWithShaderDepthStencilWrites,
|
|
fragmentShadingRateWithSampleMask,
|
|
fragmentShadingRateWithShaderSampleMask,
|
|
fragmentShadingRateWithConservativeRasterization,
|
|
fragmentShadingRateWithFragmentShaderInterlock,
|
|
fragmentShadingRateWithCustomSampleLocations,
|
|
fragmentShadingRateStrictMultiplyCombiner );
|
|
}
|
|
#endif
|
|
|
|
#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
|
|
auto operator<=>( PhysicalDeviceFragmentShadingRatePropertiesKHR const & ) const = default;
|
|
#else
|
|
bool operator==( PhysicalDeviceFragmentShadingRatePropertiesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
# if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
# else
|
|
return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) &&
|
|
( minFragmentShadingRateAttachmentTexelSize == rhs.minFragmentShadingRateAttachmentTexelSize ) &&
|
|
( maxFragmentShadingRateAttachmentTexelSize == rhs.maxFragmentShadingRateAttachmentTexelSize ) &&
|
|
( maxFragmentShadingRateAttachmentTexelSizeAspectRatio == rhs.maxFragmentShadingRateAttachmentTexelSizeAspectRatio ) &&
|
|
( primitiveFragmentShadingRateWithMultipleViewports == rhs.primitiveFragmentShadingRateWithMultipleViewports ) &&
|
|
( layeredShadingRateAttachments == rhs.layeredShadingRateAttachments ) &&
|
|
( fragmentShadingRateNonTrivialCombinerOps == rhs.fragmentShadingRateNonTrivialCombinerOps ) && ( maxFragmentSize == rhs.maxFragmentSize ) &&
|
|
( maxFragmentSizeAspectRatio == rhs.maxFragmentSizeAspectRatio ) &&
|
|
( maxFragmentShadingRateCoverageSamples == rhs.maxFragmentShadingRateCoverageSamples ) &&
|
|
( maxFragmentShadingRateRasterizationSamples == rhs.maxFragmentShadingRateRasterizationSamples ) &&
|
|
( fragmentShadingRateWithShaderDepthStencilWrites == rhs.fragmentShadingRateWithShaderDepthStencilWrites ) &&
|
|
( fragmentShadingRateWithSampleMask == rhs.fragmentShadingRateWithSampleMask ) &&
|
|
( fragmentShadingRateWithShaderSampleMask == rhs.fragmentShadingRateWithShaderSampleMask ) &&
|
|
( fragmentShadingRateWithConservativeRasterization == rhs.fragmentShadingRateWithConservativeRasterization ) &&
|
|
( fragmentShadingRateWithFragmentShaderInterlock == rhs.fragmentShadingRateWithFragmentShaderInterlock ) &&
|
|
( fragmentShadingRateWithCustomSampleLocations == rhs.fragmentShadingRateWithCustomSampleLocations ) &&
|
|
( fragmentShadingRateStrictMultiplyCombiner == rhs.fragmentShadingRateStrictMultiplyCombiner );
|
|
# endif
|
|
}
|
|
|
|
bool operator!=( PhysicalDeviceFragmentShadingRatePropertiesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceFragmentShadingRatePropertiesKHR;
|
|
void * pNext = {};
|
|
VULKAN_HPP_NAMESPACE::Extent2D minFragmentShadingRateAttachmentTexelSize = {};
|
|
VULKAN_HPP_NAMESPACE::Extent2D maxFragmentShadingRateAttachmentTexelSize = {};
|
|
uint32_t maxFragmentShadingRateAttachmentTexelSizeAspectRatio = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 primitiveFragmentShadingRateWithMultipleViewports = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 layeredShadingRateAttachments = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 fragmentShadingRateNonTrivialCombinerOps = {};
|
|
VULKAN_HPP_NAMESPACE::Extent2D maxFragmentSize = {};
|
|
uint32_t maxFragmentSizeAspectRatio = {};
|
|
uint32_t maxFragmentShadingRateCoverageSamples = {};
|
|
VULKAN_HPP_NAMESPACE::SampleCountFlagBits maxFragmentShadingRateRasterizationSamples = VULKAN_HPP_NAMESPACE::SampleCountFlagBits::e1;
|
|
VULKAN_HPP_NAMESPACE::Bool32 fragmentShadingRateWithShaderDepthStencilWrites = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 fragmentShadingRateWithSampleMask = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 fragmentShadingRateWithShaderSampleMask = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 fragmentShadingRateWithConservativeRasterization = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 fragmentShadingRateWithFragmentShaderInterlock = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 fragmentShadingRateWithCustomSampleLocations = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 fragmentShadingRateStrictMultiplyCombiner = {};
|
|
};
|
|
|
|
template <>
|
|
struct CppType<StructureType, StructureType::ePhysicalDeviceFragmentShadingRatePropertiesKHR>
|
|
{
|
|
using Type = PhysicalDeviceFragmentShadingRatePropertiesKHR;
|
|
};
|
|
|
|
struct PhysicalDeviceGroupProperties
|
|
{
|
|
using NativeType = VkPhysicalDeviceGroupProperties;
|
|
|
|
static const bool allowDuplicate = false;
|
|
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceGroupProperties;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR_14
|
|
PhysicalDeviceGroupProperties( uint32_t physicalDeviceCount_ = {},
|
|
std::array<VULKAN_HPP_NAMESPACE::PhysicalDevice, VK_MAX_DEVICE_GROUP_SIZE> const & physicalDevices_ = {},
|
|
VULKAN_HPP_NAMESPACE::Bool32 subsetAllocation_ = {},
|
|
void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
|
|
: pNext( pNext_ )
|
|
, physicalDeviceCount( physicalDeviceCount_ )
|
|
, physicalDevices( physicalDevices_ )
|
|
, subsetAllocation( subsetAllocation_ )
|
|
{
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceGroupProperties( PhysicalDeviceGroupProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
PhysicalDeviceGroupProperties( VkPhysicalDeviceGroupProperties const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: PhysicalDeviceGroupProperties( *reinterpret_cast<PhysicalDeviceGroupProperties const *>( &rhs ) )
|
|
{
|
|
}
|
|
|
|
# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
|
|
PhysicalDeviceGroupProperties( VULKAN_HPP_NAMESPACE::ArrayProxy<VULKAN_HPP_NAMESPACE::PhysicalDevice> const & physicalDevices_,
|
|
VULKAN_HPP_NAMESPACE::Bool32 subsetAllocation_ = {},
|
|
void * pNext_ = nullptr )
|
|
: pNext( pNext_ )
|
|
, physicalDeviceCount( std::min( static_cast<uint32_t>( physicalDevices_.size() ), VK_MAX_DEVICE_GROUP_SIZE ) )
|
|
, subsetAllocation( subsetAllocation_ )
|
|
{
|
|
VULKAN_HPP_ASSERT( physicalDevices_.size() < VK_MAX_DEVICE_GROUP_SIZE );
|
|
memcpy( physicalDevices, physicalDevices_.data(), physicalDeviceCount * sizeof( VULKAN_HPP_NAMESPACE::PhysicalDevice ) );
|
|
}
|
|
# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
|
|
|
|
PhysicalDeviceGroupProperties & operator=( PhysicalDeviceGroupProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
PhysicalDeviceGroupProperties & operator=( VkPhysicalDeviceGroupProperties const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceGroupProperties const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
operator VkPhysicalDeviceGroupProperties const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkPhysicalDeviceGroupProperties *>( this );
|
|
}
|
|
|
|
operator VkPhysicalDeviceGroupProperties &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkPhysicalDeviceGroupProperties *>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
# if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
# else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &,
|
|
void * const &,
|
|
uint32_t const &,
|
|
VULKAN_HPP_NAMESPACE::ArrayWrapper1D<VULKAN_HPP_NAMESPACE::PhysicalDevice, VK_MAX_DEVICE_GROUP_SIZE> const &,
|
|
VULKAN_HPP_NAMESPACE::Bool32 const &>
|
|
# endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( sType, pNext, physicalDeviceCount, physicalDevices, subsetAllocation );
|
|
}
|
|
#endif
|
|
|
|
#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
|
|
std::strong_ordering operator<=>( PhysicalDeviceGroupProperties const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
if ( auto cmp = sType <=> rhs.sType; cmp != 0 )
|
|
return cmp;
|
|
if ( auto cmp = pNext <=> rhs.pNext; cmp != 0 )
|
|
return cmp;
|
|
if ( auto cmp = physicalDeviceCount <=> rhs.physicalDeviceCount; cmp != 0 )
|
|
return cmp;
|
|
for ( size_t i = 0; i < physicalDeviceCount; ++i )
|
|
{
|
|
if ( auto cmp = physicalDevices[i] <=> rhs.physicalDevices[i]; cmp != 0 )
|
|
return cmp;
|
|
}
|
|
if ( auto cmp = subsetAllocation <=> rhs.subsetAllocation; cmp != 0 )
|
|
return cmp;
|
|
|
|
return std::strong_ordering::equivalent;
|
|
}
|
|
#endif
|
|
|
|
bool operator==( PhysicalDeviceGroupProperties const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( physicalDeviceCount == rhs.physicalDeviceCount ) &&
|
|
( memcmp( physicalDevices, rhs.physicalDevices, physicalDeviceCount * sizeof( VULKAN_HPP_NAMESPACE::PhysicalDevice ) ) == 0 ) &&
|
|
( subsetAllocation == rhs.subsetAllocation );
|
|
}
|
|
|
|
bool operator!=( PhysicalDeviceGroupProperties const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceGroupProperties;
|
|
void * pNext = {};
|
|
uint32_t physicalDeviceCount = {};
|
|
VULKAN_HPP_NAMESPACE::ArrayWrapper1D<VULKAN_HPP_NAMESPACE::PhysicalDevice, VK_MAX_DEVICE_GROUP_SIZE> physicalDevices = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 subsetAllocation = {};
|
|
};
|
|
|
|
template <>
|
|
struct CppType<StructureType, StructureType::ePhysicalDeviceGroupProperties>
|
|
{
|
|
using Type = PhysicalDeviceGroupProperties;
|
|
};
|
|
|
|
using PhysicalDeviceGroupPropertiesKHR = PhysicalDeviceGroupProperties;
|
|
|
|
struct PhysicalDeviceHostQueryResetFeatures
|
|
{
|
|
using NativeType = VkPhysicalDeviceHostQueryResetFeatures;
|
|
|
|
static const bool allowDuplicate = false;
|
|
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceHostQueryResetFeatures;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR PhysicalDeviceHostQueryResetFeatures( VULKAN_HPP_NAMESPACE::Bool32 hostQueryReset_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
|
|
: pNext( pNext_ )
|
|
, hostQueryReset( hostQueryReset_ )
|
|
{
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR PhysicalDeviceHostQueryResetFeatures( PhysicalDeviceHostQueryResetFeatures const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
PhysicalDeviceHostQueryResetFeatures( VkPhysicalDeviceHostQueryResetFeatures const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: PhysicalDeviceHostQueryResetFeatures( *reinterpret_cast<PhysicalDeviceHostQueryResetFeatures const *>( &rhs ) )
|
|
{
|
|
}
|
|
|
|
PhysicalDeviceHostQueryResetFeatures & operator=( PhysicalDeviceHostQueryResetFeatures const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
PhysicalDeviceHostQueryResetFeatures & operator=( VkPhysicalDeviceHostQueryResetFeatures const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceHostQueryResetFeatures const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
|
|
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceHostQueryResetFeatures & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pNext = pNext_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceHostQueryResetFeatures & setHostQueryReset( VULKAN_HPP_NAMESPACE::Bool32 hostQueryReset_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
hostQueryReset = hostQueryReset_;
|
|
return *this;
|
|
}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
|
|
|
|
operator VkPhysicalDeviceHostQueryResetFeatures const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkPhysicalDeviceHostQueryResetFeatures *>( this );
|
|
}
|
|
|
|
operator VkPhysicalDeviceHostQueryResetFeatures &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkPhysicalDeviceHostQueryResetFeatures *>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
# if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
# else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::Bool32 const &>
|
|
# endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( sType, pNext, hostQueryReset );
|
|
}
|
|
#endif
|
|
|
|
#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
|
|
auto operator<=>( PhysicalDeviceHostQueryResetFeatures const & ) const = default;
|
|
#else
|
|
bool operator==( PhysicalDeviceHostQueryResetFeatures const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
# if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
# else
|
|
return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( hostQueryReset == rhs.hostQueryReset );
|
|
# endif
|
|
}
|
|
|
|
bool operator!=( PhysicalDeviceHostQueryResetFeatures const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceHostQueryResetFeatures;
|
|
void * pNext = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 hostQueryReset = {};
|
|
};
|
|
|
|
template <>
|
|
struct CppType<StructureType, StructureType::ePhysicalDeviceHostQueryResetFeatures>
|
|
{
|
|
using Type = PhysicalDeviceHostQueryResetFeatures;
|
|
};
|
|
|
|
using PhysicalDeviceHostQueryResetFeaturesEXT = PhysicalDeviceHostQueryResetFeatures;
|
|
|
|
struct PhysicalDeviceIDProperties
|
|
{
|
|
using NativeType = VkPhysicalDeviceIDProperties;
|
|
|
|
static const bool allowDuplicate = false;
|
|
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceIdProperties;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceIDProperties( std::array<uint8_t, VK_UUID_SIZE> const & deviceUUID_ = {},
|
|
std::array<uint8_t, VK_UUID_SIZE> const & driverUUID_ = {},
|
|
std::array<uint8_t, VK_LUID_SIZE> const & deviceLUID_ = {},
|
|
uint32_t deviceNodeMask_ = {},
|
|
VULKAN_HPP_NAMESPACE::Bool32 deviceLUIDValid_ = {},
|
|
void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
|
|
: pNext( pNext_ )
|
|
, deviceUUID( deviceUUID_ )
|
|
, driverUUID( driverUUID_ )
|
|
, deviceLUID( deviceLUID_ )
|
|
, deviceNodeMask( deviceNodeMask_ )
|
|
, deviceLUIDValid( deviceLUIDValid_ )
|
|
{
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceIDProperties( PhysicalDeviceIDProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
PhysicalDeviceIDProperties( VkPhysicalDeviceIDProperties const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: PhysicalDeviceIDProperties( *reinterpret_cast<PhysicalDeviceIDProperties const *>( &rhs ) )
|
|
{
|
|
}
|
|
|
|
PhysicalDeviceIDProperties & operator=( PhysicalDeviceIDProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
PhysicalDeviceIDProperties & operator=( VkPhysicalDeviceIDProperties const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceIDProperties const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
operator VkPhysicalDeviceIDProperties const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkPhysicalDeviceIDProperties *>( this );
|
|
}
|
|
|
|
operator VkPhysicalDeviceIDProperties &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkPhysicalDeviceIDProperties *>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
# if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
# else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &,
|
|
void * const &,
|
|
VULKAN_HPP_NAMESPACE::ArrayWrapper1D<uint8_t, VK_UUID_SIZE> const &,
|
|
VULKAN_HPP_NAMESPACE::ArrayWrapper1D<uint8_t, VK_UUID_SIZE> const &,
|
|
VULKAN_HPP_NAMESPACE::ArrayWrapper1D<uint8_t, VK_LUID_SIZE> const &,
|
|
uint32_t const &,
|
|
VULKAN_HPP_NAMESPACE::Bool32 const &>
|
|
# endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( sType, pNext, deviceUUID, driverUUID, deviceLUID, deviceNodeMask, deviceLUIDValid );
|
|
}
|
|
#endif
|
|
|
|
#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
|
|
auto operator<=>( PhysicalDeviceIDProperties const & ) const = default;
|
|
#else
|
|
bool operator==( PhysicalDeviceIDProperties const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
# if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
# else
|
|
return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( deviceUUID == rhs.deviceUUID ) && ( driverUUID == rhs.driverUUID ) &&
|
|
( deviceLUID == rhs.deviceLUID ) && ( deviceNodeMask == rhs.deviceNodeMask ) && ( deviceLUIDValid == rhs.deviceLUIDValid );
|
|
# endif
|
|
}
|
|
|
|
bool operator!=( PhysicalDeviceIDProperties const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceIdProperties;
|
|
void * pNext = {};
|
|
VULKAN_HPP_NAMESPACE::ArrayWrapper1D<uint8_t, VK_UUID_SIZE> deviceUUID = {};
|
|
VULKAN_HPP_NAMESPACE::ArrayWrapper1D<uint8_t, VK_UUID_SIZE> driverUUID = {};
|
|
VULKAN_HPP_NAMESPACE::ArrayWrapper1D<uint8_t, VK_LUID_SIZE> deviceLUID = {};
|
|
uint32_t deviceNodeMask = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 deviceLUIDValid = {};
|
|
};
|
|
|
|
template <>
|
|
struct CppType<StructureType, StructureType::ePhysicalDeviceIdProperties>
|
|
{
|
|
using Type = PhysicalDeviceIDProperties;
|
|
};
|
|
|
|
using PhysicalDeviceIDPropertiesKHR = PhysicalDeviceIDProperties;
|
|
|
|
struct PhysicalDeviceImageDrmFormatModifierInfoEXT
|
|
{
|
|
using NativeType = VkPhysicalDeviceImageDrmFormatModifierInfoEXT;
|
|
|
|
static const bool allowDuplicate = false;
|
|
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceImageDrmFormatModifierInfoEXT;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR
|
|
PhysicalDeviceImageDrmFormatModifierInfoEXT( uint64_t drmFormatModifier_ = {},
|
|
VULKAN_HPP_NAMESPACE::SharingMode sharingMode_ = VULKAN_HPP_NAMESPACE::SharingMode::eExclusive,
|
|
uint32_t queueFamilyIndexCount_ = {},
|
|
const uint32_t * pQueueFamilyIndices_ = {},
|
|
const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
|
|
: pNext( pNext_ )
|
|
, drmFormatModifier( drmFormatModifier_ )
|
|
, sharingMode( sharingMode_ )
|
|
, queueFamilyIndexCount( queueFamilyIndexCount_ )
|
|
, pQueueFamilyIndices( pQueueFamilyIndices_ )
|
|
{
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR PhysicalDeviceImageDrmFormatModifierInfoEXT( PhysicalDeviceImageDrmFormatModifierInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
PhysicalDeviceImageDrmFormatModifierInfoEXT( VkPhysicalDeviceImageDrmFormatModifierInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: PhysicalDeviceImageDrmFormatModifierInfoEXT( *reinterpret_cast<PhysicalDeviceImageDrmFormatModifierInfoEXT const *>( &rhs ) )
|
|
{
|
|
}
|
|
|
|
# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
|
|
PhysicalDeviceImageDrmFormatModifierInfoEXT( uint64_t drmFormatModifier_,
|
|
VULKAN_HPP_NAMESPACE::SharingMode sharingMode_,
|
|
VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const uint32_t> const & queueFamilyIndices_,
|
|
const void * pNext_ = nullptr )
|
|
: pNext( pNext_ )
|
|
, drmFormatModifier( drmFormatModifier_ )
|
|
, sharingMode( sharingMode_ )
|
|
, queueFamilyIndexCount( static_cast<uint32_t>( queueFamilyIndices_.size() ) )
|
|
, pQueueFamilyIndices( queueFamilyIndices_.data() )
|
|
{
|
|
}
|
|
# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
|
|
|
|
PhysicalDeviceImageDrmFormatModifierInfoEXT & operator=( PhysicalDeviceImageDrmFormatModifierInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
PhysicalDeviceImageDrmFormatModifierInfoEXT & operator=( VkPhysicalDeviceImageDrmFormatModifierInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceImageDrmFormatModifierInfoEXT const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
|
|
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceImageDrmFormatModifierInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pNext = pNext_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceImageDrmFormatModifierInfoEXT & setDrmFormatModifier( uint64_t drmFormatModifier_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
drmFormatModifier = drmFormatModifier_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceImageDrmFormatModifierInfoEXT & setSharingMode( VULKAN_HPP_NAMESPACE::SharingMode sharingMode_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
sharingMode = sharingMode_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceImageDrmFormatModifierInfoEXT & setQueueFamilyIndexCount( uint32_t queueFamilyIndexCount_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
queueFamilyIndexCount = queueFamilyIndexCount_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceImageDrmFormatModifierInfoEXT & setPQueueFamilyIndices( const uint32_t * pQueueFamilyIndices_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pQueueFamilyIndices = pQueueFamilyIndices_;
|
|
return *this;
|
|
}
|
|
|
|
# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
|
|
PhysicalDeviceImageDrmFormatModifierInfoEXT &
|
|
setQueueFamilyIndices( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const uint32_t> const & queueFamilyIndices_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
queueFamilyIndexCount = static_cast<uint32_t>( queueFamilyIndices_.size() );
|
|
pQueueFamilyIndices = queueFamilyIndices_.data();
|
|
return *this;
|
|
}
|
|
# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
|
|
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
|
|
|
|
operator VkPhysicalDeviceImageDrmFormatModifierInfoEXT const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkPhysicalDeviceImageDrmFormatModifierInfoEXT *>( this );
|
|
}
|
|
|
|
operator VkPhysicalDeviceImageDrmFormatModifierInfoEXT &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkPhysicalDeviceImageDrmFormatModifierInfoEXT *>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
# if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
# else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &,
|
|
const void * const &,
|
|
uint64_t const &,
|
|
VULKAN_HPP_NAMESPACE::SharingMode const &,
|
|
uint32_t const &,
|
|
const uint32_t * const &>
|
|
# endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( sType, pNext, drmFormatModifier, sharingMode, queueFamilyIndexCount, pQueueFamilyIndices );
|
|
}
|
|
#endif
|
|
|
|
#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
|
|
auto operator<=>( PhysicalDeviceImageDrmFormatModifierInfoEXT const & ) const = default;
|
|
#else
|
|
bool operator==( PhysicalDeviceImageDrmFormatModifierInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
# if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
# else
|
|
return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( drmFormatModifier == rhs.drmFormatModifier ) && ( sharingMode == rhs.sharingMode ) &&
|
|
( queueFamilyIndexCount == rhs.queueFamilyIndexCount ) && ( pQueueFamilyIndices == rhs.pQueueFamilyIndices );
|
|
# endif
|
|
}
|
|
|
|
bool operator!=( PhysicalDeviceImageDrmFormatModifierInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceImageDrmFormatModifierInfoEXT;
|
|
const void * pNext = {};
|
|
uint64_t drmFormatModifier = {};
|
|
VULKAN_HPP_NAMESPACE::SharingMode sharingMode = VULKAN_HPP_NAMESPACE::SharingMode::eExclusive;
|
|
uint32_t queueFamilyIndexCount = {};
|
|
const uint32_t * pQueueFamilyIndices = {};
|
|
};
|
|
|
|
template <>
|
|
struct CppType<StructureType, StructureType::ePhysicalDeviceImageDrmFormatModifierInfoEXT>
|
|
{
|
|
using Type = PhysicalDeviceImageDrmFormatModifierInfoEXT;
|
|
};
|
|
|
|
struct PhysicalDeviceImageFormatInfo2
|
|
{
|
|
using NativeType = VkPhysicalDeviceImageFormatInfo2;
|
|
|
|
static const bool allowDuplicate = false;
|
|
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceImageFormatInfo2;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR PhysicalDeviceImageFormatInfo2( VULKAN_HPP_NAMESPACE::Format format_ = VULKAN_HPP_NAMESPACE::Format::eUndefined,
|
|
VULKAN_HPP_NAMESPACE::ImageType type_ = VULKAN_HPP_NAMESPACE::ImageType::e1D,
|
|
VULKAN_HPP_NAMESPACE::ImageTiling tiling_ = VULKAN_HPP_NAMESPACE::ImageTiling::eOptimal,
|
|
VULKAN_HPP_NAMESPACE::ImageUsageFlags usage_ = {},
|
|
VULKAN_HPP_NAMESPACE::ImageCreateFlags flags_ = {},
|
|
const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
|
|
: pNext( pNext_ )
|
|
, format( format_ )
|
|
, type( type_ )
|
|
, tiling( tiling_ )
|
|
, usage( usage_ )
|
|
, flags( flags_ )
|
|
{
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR PhysicalDeviceImageFormatInfo2( PhysicalDeviceImageFormatInfo2 const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
PhysicalDeviceImageFormatInfo2( VkPhysicalDeviceImageFormatInfo2 const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: PhysicalDeviceImageFormatInfo2( *reinterpret_cast<PhysicalDeviceImageFormatInfo2 const *>( &rhs ) )
|
|
{
|
|
}
|
|
|
|
PhysicalDeviceImageFormatInfo2 & operator=( PhysicalDeviceImageFormatInfo2 const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
PhysicalDeviceImageFormatInfo2 & operator=( VkPhysicalDeviceImageFormatInfo2 const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceImageFormatInfo2 const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
|
|
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceImageFormatInfo2 & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pNext = pNext_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceImageFormatInfo2 & setFormat( VULKAN_HPP_NAMESPACE::Format format_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
format = format_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceImageFormatInfo2 & setType( VULKAN_HPP_NAMESPACE::ImageType type_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
type = type_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceImageFormatInfo2 & setTiling( VULKAN_HPP_NAMESPACE::ImageTiling tiling_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
tiling = tiling_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceImageFormatInfo2 & setUsage( VULKAN_HPP_NAMESPACE::ImageUsageFlags usage_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
usage = usage_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceImageFormatInfo2 & setFlags( VULKAN_HPP_NAMESPACE::ImageCreateFlags flags_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
flags = flags_;
|
|
return *this;
|
|
}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
|
|
|
|
operator VkPhysicalDeviceImageFormatInfo2 const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkPhysicalDeviceImageFormatInfo2 *>( this );
|
|
}
|
|
|
|
operator VkPhysicalDeviceImageFormatInfo2 &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkPhysicalDeviceImageFormatInfo2 *>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
# if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
# else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &,
|
|
const void * const &,
|
|
VULKAN_HPP_NAMESPACE::Format const &,
|
|
VULKAN_HPP_NAMESPACE::ImageType const &,
|
|
VULKAN_HPP_NAMESPACE::ImageTiling const &,
|
|
VULKAN_HPP_NAMESPACE::ImageUsageFlags const &,
|
|
VULKAN_HPP_NAMESPACE::ImageCreateFlags const &>
|
|
# endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( sType, pNext, format, type, tiling, usage, flags );
|
|
}
|
|
#endif
|
|
|
|
#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
|
|
auto operator<=>( PhysicalDeviceImageFormatInfo2 const & ) const = default;
|
|
#else
|
|
bool operator==( PhysicalDeviceImageFormatInfo2 const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
# if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
# else
|
|
return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( format == rhs.format ) && ( type == rhs.type ) && ( tiling == rhs.tiling ) &&
|
|
( usage == rhs.usage ) && ( flags == rhs.flags );
|
|
# endif
|
|
}
|
|
|
|
bool operator!=( PhysicalDeviceImageFormatInfo2 const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceImageFormatInfo2;
|
|
const void * pNext = {};
|
|
VULKAN_HPP_NAMESPACE::Format format = VULKAN_HPP_NAMESPACE::Format::eUndefined;
|
|
VULKAN_HPP_NAMESPACE::ImageType type = VULKAN_HPP_NAMESPACE::ImageType::e1D;
|
|
VULKAN_HPP_NAMESPACE::ImageTiling tiling = VULKAN_HPP_NAMESPACE::ImageTiling::eOptimal;
|
|
VULKAN_HPP_NAMESPACE::ImageUsageFlags usage = {};
|
|
VULKAN_HPP_NAMESPACE::ImageCreateFlags flags = {};
|
|
};
|
|
|
|
template <>
|
|
struct CppType<StructureType, StructureType::ePhysicalDeviceImageFormatInfo2>
|
|
{
|
|
using Type = PhysicalDeviceImageFormatInfo2;
|
|
};
|
|
|
|
using PhysicalDeviceImageFormatInfo2KHR = PhysicalDeviceImageFormatInfo2;
|
|
|
|
struct PhysicalDeviceImageRobustnessFeatures
|
|
{
|
|
using NativeType = VkPhysicalDeviceImageRobustnessFeatures;
|
|
|
|
static const bool allowDuplicate = false;
|
|
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceImageRobustnessFeatures;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR PhysicalDeviceImageRobustnessFeatures( VULKAN_HPP_NAMESPACE::Bool32 robustImageAccess_ = {},
|
|
void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
|
|
: pNext( pNext_ )
|
|
, robustImageAccess( robustImageAccess_ )
|
|
{
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR PhysicalDeviceImageRobustnessFeatures( PhysicalDeviceImageRobustnessFeatures const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
PhysicalDeviceImageRobustnessFeatures( VkPhysicalDeviceImageRobustnessFeatures const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: PhysicalDeviceImageRobustnessFeatures( *reinterpret_cast<PhysicalDeviceImageRobustnessFeatures const *>( &rhs ) )
|
|
{
|
|
}
|
|
|
|
PhysicalDeviceImageRobustnessFeatures & operator=( PhysicalDeviceImageRobustnessFeatures const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
PhysicalDeviceImageRobustnessFeatures & operator=( VkPhysicalDeviceImageRobustnessFeatures const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceImageRobustnessFeatures const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
|
|
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceImageRobustnessFeatures & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pNext = pNext_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceImageRobustnessFeatures & setRobustImageAccess( VULKAN_HPP_NAMESPACE::Bool32 robustImageAccess_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
robustImageAccess = robustImageAccess_;
|
|
return *this;
|
|
}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
|
|
|
|
operator VkPhysicalDeviceImageRobustnessFeatures const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkPhysicalDeviceImageRobustnessFeatures *>( this );
|
|
}
|
|
|
|
operator VkPhysicalDeviceImageRobustnessFeatures &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkPhysicalDeviceImageRobustnessFeatures *>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
# if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
# else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::Bool32 const &>
|
|
# endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( sType, pNext, robustImageAccess );
|
|
}
|
|
#endif
|
|
|
|
#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
|
|
auto operator<=>( PhysicalDeviceImageRobustnessFeatures const & ) const = default;
|
|
#else
|
|
bool operator==( PhysicalDeviceImageRobustnessFeatures const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
# if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
# else
|
|
return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( robustImageAccess == rhs.robustImageAccess );
|
|
# endif
|
|
}
|
|
|
|
bool operator!=( PhysicalDeviceImageRobustnessFeatures const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceImageRobustnessFeatures;
|
|
void * pNext = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 robustImageAccess = {};
|
|
};
|
|
|
|
template <>
|
|
struct CppType<StructureType, StructureType::ePhysicalDeviceImageRobustnessFeatures>
|
|
{
|
|
using Type = PhysicalDeviceImageRobustnessFeatures;
|
|
};
|
|
|
|
using PhysicalDeviceImageRobustnessFeaturesEXT = PhysicalDeviceImageRobustnessFeatures;
|
|
|
|
struct PhysicalDeviceImageViewImageFormatInfoEXT
|
|
{
|
|
using NativeType = VkPhysicalDeviceImageViewImageFormatInfoEXT;
|
|
|
|
static const bool allowDuplicate = false;
|
|
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceImageViewImageFormatInfoEXT;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR
|
|
PhysicalDeviceImageViewImageFormatInfoEXT( VULKAN_HPP_NAMESPACE::ImageViewType imageViewType_ = VULKAN_HPP_NAMESPACE::ImageViewType::e1D,
|
|
void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
|
|
: pNext( pNext_ )
|
|
, imageViewType( imageViewType_ )
|
|
{
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR PhysicalDeviceImageViewImageFormatInfoEXT( PhysicalDeviceImageViewImageFormatInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
PhysicalDeviceImageViewImageFormatInfoEXT( VkPhysicalDeviceImageViewImageFormatInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: PhysicalDeviceImageViewImageFormatInfoEXT( *reinterpret_cast<PhysicalDeviceImageViewImageFormatInfoEXT const *>( &rhs ) )
|
|
{
|
|
}
|
|
|
|
PhysicalDeviceImageViewImageFormatInfoEXT & operator=( PhysicalDeviceImageViewImageFormatInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
PhysicalDeviceImageViewImageFormatInfoEXT & operator=( VkPhysicalDeviceImageViewImageFormatInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceImageViewImageFormatInfoEXT const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
|
|
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceImageViewImageFormatInfoEXT & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pNext = pNext_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceImageViewImageFormatInfoEXT &
|
|
setImageViewType( VULKAN_HPP_NAMESPACE::ImageViewType imageViewType_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
imageViewType = imageViewType_;
|
|
return *this;
|
|
}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
|
|
|
|
operator VkPhysicalDeviceImageViewImageFormatInfoEXT const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkPhysicalDeviceImageViewImageFormatInfoEXT *>( this );
|
|
}
|
|
|
|
operator VkPhysicalDeviceImageViewImageFormatInfoEXT &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkPhysicalDeviceImageViewImageFormatInfoEXT *>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
# if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
# else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::ImageViewType const &>
|
|
# endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( sType, pNext, imageViewType );
|
|
}
|
|
#endif
|
|
|
|
#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
|
|
auto operator<=>( PhysicalDeviceImageViewImageFormatInfoEXT const & ) const = default;
|
|
#else
|
|
bool operator==( PhysicalDeviceImageViewImageFormatInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
# if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
# else
|
|
return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( imageViewType == rhs.imageViewType );
|
|
# endif
|
|
}
|
|
|
|
bool operator!=( PhysicalDeviceImageViewImageFormatInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceImageViewImageFormatInfoEXT;
|
|
void * pNext = {};
|
|
VULKAN_HPP_NAMESPACE::ImageViewType imageViewType = VULKAN_HPP_NAMESPACE::ImageViewType::e1D;
|
|
};
|
|
|
|
template <>
|
|
struct CppType<StructureType, StructureType::ePhysicalDeviceImageViewImageFormatInfoEXT>
|
|
{
|
|
using Type = PhysicalDeviceImageViewImageFormatInfoEXT;
|
|
};
|
|
|
|
struct PhysicalDeviceImagelessFramebufferFeatures
|
|
{
|
|
using NativeType = VkPhysicalDeviceImagelessFramebufferFeatures;
|
|
|
|
static const bool allowDuplicate = false;
|
|
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceImagelessFramebufferFeatures;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR PhysicalDeviceImagelessFramebufferFeatures( VULKAN_HPP_NAMESPACE::Bool32 imagelessFramebuffer_ = {},
|
|
void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
|
|
: pNext( pNext_ )
|
|
, imagelessFramebuffer( imagelessFramebuffer_ )
|
|
{
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR PhysicalDeviceImagelessFramebufferFeatures( PhysicalDeviceImagelessFramebufferFeatures const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
PhysicalDeviceImagelessFramebufferFeatures( VkPhysicalDeviceImagelessFramebufferFeatures const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: PhysicalDeviceImagelessFramebufferFeatures( *reinterpret_cast<PhysicalDeviceImagelessFramebufferFeatures const *>( &rhs ) )
|
|
{
|
|
}
|
|
|
|
PhysicalDeviceImagelessFramebufferFeatures & operator=( PhysicalDeviceImagelessFramebufferFeatures const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
PhysicalDeviceImagelessFramebufferFeatures & operator=( VkPhysicalDeviceImagelessFramebufferFeatures const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceImagelessFramebufferFeatures const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
|
|
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceImagelessFramebufferFeatures & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pNext = pNext_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceImagelessFramebufferFeatures &
|
|
setImagelessFramebuffer( VULKAN_HPP_NAMESPACE::Bool32 imagelessFramebuffer_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
imagelessFramebuffer = imagelessFramebuffer_;
|
|
return *this;
|
|
}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
|
|
|
|
operator VkPhysicalDeviceImagelessFramebufferFeatures const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkPhysicalDeviceImagelessFramebufferFeatures *>( this );
|
|
}
|
|
|
|
operator VkPhysicalDeviceImagelessFramebufferFeatures &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkPhysicalDeviceImagelessFramebufferFeatures *>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
# if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
# else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::Bool32 const &>
|
|
# endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( sType, pNext, imagelessFramebuffer );
|
|
}
|
|
#endif
|
|
|
|
#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
|
|
auto operator<=>( PhysicalDeviceImagelessFramebufferFeatures const & ) const = default;
|
|
#else
|
|
bool operator==( PhysicalDeviceImagelessFramebufferFeatures const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
# if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
# else
|
|
return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( imagelessFramebuffer == rhs.imagelessFramebuffer );
|
|
# endif
|
|
}
|
|
|
|
bool operator!=( PhysicalDeviceImagelessFramebufferFeatures const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceImagelessFramebufferFeatures;
|
|
void * pNext = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 imagelessFramebuffer = {};
|
|
};
|
|
|
|
template <>
|
|
struct CppType<StructureType, StructureType::ePhysicalDeviceImagelessFramebufferFeatures>
|
|
{
|
|
using Type = PhysicalDeviceImagelessFramebufferFeatures;
|
|
};
|
|
|
|
using PhysicalDeviceImagelessFramebufferFeaturesKHR = PhysicalDeviceImagelessFramebufferFeatures;
|
|
|
|
struct PhysicalDeviceInlineUniformBlockFeatures
|
|
{
|
|
using NativeType = VkPhysicalDeviceInlineUniformBlockFeatures;
|
|
|
|
static const bool allowDuplicate = false;
|
|
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceInlineUniformBlockFeatures;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR PhysicalDeviceInlineUniformBlockFeatures( VULKAN_HPP_NAMESPACE::Bool32 inlineUniformBlock_ = {},
|
|
VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingInlineUniformBlockUpdateAfterBind_ = {},
|
|
void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
|
|
: pNext( pNext_ )
|
|
, inlineUniformBlock( inlineUniformBlock_ )
|
|
, descriptorBindingInlineUniformBlockUpdateAfterBind( descriptorBindingInlineUniformBlockUpdateAfterBind_ )
|
|
{
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR PhysicalDeviceInlineUniformBlockFeatures( PhysicalDeviceInlineUniformBlockFeatures const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
PhysicalDeviceInlineUniformBlockFeatures( VkPhysicalDeviceInlineUniformBlockFeatures const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: PhysicalDeviceInlineUniformBlockFeatures( *reinterpret_cast<PhysicalDeviceInlineUniformBlockFeatures const *>( &rhs ) )
|
|
{
|
|
}
|
|
|
|
PhysicalDeviceInlineUniformBlockFeatures & operator=( PhysicalDeviceInlineUniformBlockFeatures const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
PhysicalDeviceInlineUniformBlockFeatures & operator=( VkPhysicalDeviceInlineUniformBlockFeatures const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceInlineUniformBlockFeatures const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
|
|
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceInlineUniformBlockFeatures & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pNext = pNext_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceInlineUniformBlockFeatures &
|
|
setInlineUniformBlock( VULKAN_HPP_NAMESPACE::Bool32 inlineUniformBlock_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
inlineUniformBlock = inlineUniformBlock_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceInlineUniformBlockFeatures & setDescriptorBindingInlineUniformBlockUpdateAfterBind(
|
|
VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingInlineUniformBlockUpdateAfterBind_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
descriptorBindingInlineUniformBlockUpdateAfterBind = descriptorBindingInlineUniformBlockUpdateAfterBind_;
|
|
return *this;
|
|
}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
|
|
|
|
operator VkPhysicalDeviceInlineUniformBlockFeatures const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkPhysicalDeviceInlineUniformBlockFeatures *>( this );
|
|
}
|
|
|
|
operator VkPhysicalDeviceInlineUniformBlockFeatures &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkPhysicalDeviceInlineUniformBlockFeatures *>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
# if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
# else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &>
|
|
# endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( sType, pNext, inlineUniformBlock, descriptorBindingInlineUniformBlockUpdateAfterBind );
|
|
}
|
|
#endif
|
|
|
|
#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
|
|
auto operator<=>( PhysicalDeviceInlineUniformBlockFeatures const & ) const = default;
|
|
#else
|
|
bool operator==( PhysicalDeviceInlineUniformBlockFeatures const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
# if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
# else
|
|
return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( inlineUniformBlock == rhs.inlineUniformBlock ) &&
|
|
( descriptorBindingInlineUniformBlockUpdateAfterBind == rhs.descriptorBindingInlineUniformBlockUpdateAfterBind );
|
|
# endif
|
|
}
|
|
|
|
bool operator!=( PhysicalDeviceInlineUniformBlockFeatures const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceInlineUniformBlockFeatures;
|
|
void * pNext = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 inlineUniformBlock = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingInlineUniformBlockUpdateAfterBind = {};
|
|
};
|
|
|
|
template <>
|
|
struct CppType<StructureType, StructureType::ePhysicalDeviceInlineUniformBlockFeatures>
|
|
{
|
|
using Type = PhysicalDeviceInlineUniformBlockFeatures;
|
|
};
|
|
|
|
using PhysicalDeviceInlineUniformBlockFeaturesEXT = PhysicalDeviceInlineUniformBlockFeatures;
|
|
|
|
struct PhysicalDeviceInlineUniformBlockProperties
|
|
{
|
|
using NativeType = VkPhysicalDeviceInlineUniformBlockProperties;
|
|
|
|
static const bool allowDuplicate = false;
|
|
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceInlineUniformBlockProperties;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR PhysicalDeviceInlineUniformBlockProperties( uint32_t maxInlineUniformBlockSize_ = {},
|
|
uint32_t maxPerStageDescriptorInlineUniformBlocks_ = {},
|
|
uint32_t maxPerStageDescriptorUpdateAfterBindInlineUniformBlocks_ = {},
|
|
uint32_t maxDescriptorSetInlineUniformBlocks_ = {},
|
|
uint32_t maxDescriptorSetUpdateAfterBindInlineUniformBlocks_ = {},
|
|
void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
|
|
: pNext( pNext_ )
|
|
, maxInlineUniformBlockSize( maxInlineUniformBlockSize_ )
|
|
, maxPerStageDescriptorInlineUniformBlocks( maxPerStageDescriptorInlineUniformBlocks_ )
|
|
, maxPerStageDescriptorUpdateAfterBindInlineUniformBlocks( maxPerStageDescriptorUpdateAfterBindInlineUniformBlocks_ )
|
|
, maxDescriptorSetInlineUniformBlocks( maxDescriptorSetInlineUniformBlocks_ )
|
|
, maxDescriptorSetUpdateAfterBindInlineUniformBlocks( maxDescriptorSetUpdateAfterBindInlineUniformBlocks_ )
|
|
{
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR PhysicalDeviceInlineUniformBlockProperties( PhysicalDeviceInlineUniformBlockProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
PhysicalDeviceInlineUniformBlockProperties( VkPhysicalDeviceInlineUniformBlockProperties const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: PhysicalDeviceInlineUniformBlockProperties( *reinterpret_cast<PhysicalDeviceInlineUniformBlockProperties const *>( &rhs ) )
|
|
{
|
|
}
|
|
|
|
PhysicalDeviceInlineUniformBlockProperties & operator=( PhysicalDeviceInlineUniformBlockProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
PhysicalDeviceInlineUniformBlockProperties & operator=( VkPhysicalDeviceInlineUniformBlockProperties const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceInlineUniformBlockProperties const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
operator VkPhysicalDeviceInlineUniformBlockProperties const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkPhysicalDeviceInlineUniformBlockProperties *>( this );
|
|
}
|
|
|
|
operator VkPhysicalDeviceInlineUniformBlockProperties &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkPhysicalDeviceInlineUniformBlockProperties *>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
# if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
# else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &,
|
|
void * const &,
|
|
uint32_t const &,
|
|
uint32_t const &,
|
|
uint32_t const &,
|
|
uint32_t const &,
|
|
uint32_t const &>
|
|
# endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( sType,
|
|
pNext,
|
|
maxInlineUniformBlockSize,
|
|
maxPerStageDescriptorInlineUniformBlocks,
|
|
maxPerStageDescriptorUpdateAfterBindInlineUniformBlocks,
|
|
maxDescriptorSetInlineUniformBlocks,
|
|
maxDescriptorSetUpdateAfterBindInlineUniformBlocks );
|
|
}
|
|
#endif
|
|
|
|
#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
|
|
auto operator<=>( PhysicalDeviceInlineUniformBlockProperties const & ) const = default;
|
|
#else
|
|
bool operator==( PhysicalDeviceInlineUniformBlockProperties const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
# if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
# else
|
|
return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( maxInlineUniformBlockSize == rhs.maxInlineUniformBlockSize ) &&
|
|
( maxPerStageDescriptorInlineUniformBlocks == rhs.maxPerStageDescriptorInlineUniformBlocks ) &&
|
|
( maxPerStageDescriptorUpdateAfterBindInlineUniformBlocks == rhs.maxPerStageDescriptorUpdateAfterBindInlineUniformBlocks ) &&
|
|
( maxDescriptorSetInlineUniformBlocks == rhs.maxDescriptorSetInlineUniformBlocks ) &&
|
|
( maxDescriptorSetUpdateAfterBindInlineUniformBlocks == rhs.maxDescriptorSetUpdateAfterBindInlineUniformBlocks );
|
|
# endif
|
|
}
|
|
|
|
bool operator!=( PhysicalDeviceInlineUniformBlockProperties const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceInlineUniformBlockProperties;
|
|
void * pNext = {};
|
|
uint32_t maxInlineUniformBlockSize = {};
|
|
uint32_t maxPerStageDescriptorInlineUniformBlocks = {};
|
|
uint32_t maxPerStageDescriptorUpdateAfterBindInlineUniformBlocks = {};
|
|
uint32_t maxDescriptorSetInlineUniformBlocks = {};
|
|
uint32_t maxDescriptorSetUpdateAfterBindInlineUniformBlocks = {};
|
|
};
|
|
|
|
template <>
|
|
struct CppType<StructureType, StructureType::ePhysicalDeviceInlineUniformBlockProperties>
|
|
{
|
|
using Type = PhysicalDeviceInlineUniformBlockProperties;
|
|
};
|
|
|
|
using PhysicalDeviceInlineUniformBlockPropertiesEXT = PhysicalDeviceInlineUniformBlockProperties;
|
|
|
|
struct PhysicalDeviceLimits
|
|
{
|
|
using NativeType = VkPhysicalDeviceLimits;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceLimits( uint32_t maxImageDimension1D_ = {},
|
|
uint32_t maxImageDimension2D_ = {},
|
|
uint32_t maxImageDimension3D_ = {},
|
|
uint32_t maxImageDimensionCube_ = {},
|
|
uint32_t maxImageArrayLayers_ = {},
|
|
uint32_t maxTexelBufferElements_ = {},
|
|
uint32_t maxUniformBufferRange_ = {},
|
|
uint32_t maxStorageBufferRange_ = {},
|
|
uint32_t maxPushConstantsSize_ = {},
|
|
uint32_t maxMemoryAllocationCount_ = {},
|
|
uint32_t maxSamplerAllocationCount_ = {},
|
|
VULKAN_HPP_NAMESPACE::DeviceSize bufferImageGranularity_ = {},
|
|
VULKAN_HPP_NAMESPACE::DeviceSize sparseAddressSpaceSize_ = {},
|
|
uint32_t maxBoundDescriptorSets_ = {},
|
|
uint32_t maxPerStageDescriptorSamplers_ = {},
|
|
uint32_t maxPerStageDescriptorUniformBuffers_ = {},
|
|
uint32_t maxPerStageDescriptorStorageBuffers_ = {},
|
|
uint32_t maxPerStageDescriptorSampledImages_ = {},
|
|
uint32_t maxPerStageDescriptorStorageImages_ = {},
|
|
uint32_t maxPerStageDescriptorInputAttachments_ = {},
|
|
uint32_t maxPerStageResources_ = {},
|
|
uint32_t maxDescriptorSetSamplers_ = {},
|
|
uint32_t maxDescriptorSetUniformBuffers_ = {},
|
|
uint32_t maxDescriptorSetUniformBuffersDynamic_ = {},
|
|
uint32_t maxDescriptorSetStorageBuffers_ = {},
|
|
uint32_t maxDescriptorSetStorageBuffersDynamic_ = {},
|
|
uint32_t maxDescriptorSetSampledImages_ = {},
|
|
uint32_t maxDescriptorSetStorageImages_ = {},
|
|
uint32_t maxDescriptorSetInputAttachments_ = {},
|
|
uint32_t maxVertexInputAttributes_ = {},
|
|
uint32_t maxVertexInputBindings_ = {},
|
|
uint32_t maxVertexInputAttributeOffset_ = {},
|
|
uint32_t maxVertexInputBindingStride_ = {},
|
|
uint32_t maxVertexOutputComponents_ = {},
|
|
uint32_t maxTessellationGenerationLevel_ = {},
|
|
uint32_t maxTessellationPatchSize_ = {},
|
|
uint32_t maxTessellationControlPerVertexInputComponents_ = {},
|
|
uint32_t maxTessellationControlPerVertexOutputComponents_ = {},
|
|
uint32_t maxTessellationControlPerPatchOutputComponents_ = {},
|
|
uint32_t maxTessellationControlTotalOutputComponents_ = {},
|
|
uint32_t maxTessellationEvaluationInputComponents_ = {},
|
|
uint32_t maxTessellationEvaluationOutputComponents_ = {},
|
|
uint32_t maxGeometryShaderInvocations_ = {},
|
|
uint32_t maxGeometryInputComponents_ = {},
|
|
uint32_t maxGeometryOutputComponents_ = {},
|
|
uint32_t maxGeometryOutputVertices_ = {},
|
|
uint32_t maxGeometryTotalOutputComponents_ = {},
|
|
uint32_t maxFragmentInputComponents_ = {},
|
|
uint32_t maxFragmentOutputAttachments_ = {},
|
|
uint32_t maxFragmentDualSrcAttachments_ = {},
|
|
uint32_t maxFragmentCombinedOutputResources_ = {},
|
|
uint32_t maxComputeSharedMemorySize_ = {},
|
|
std::array<uint32_t, 3> const & maxComputeWorkGroupCount_ = {},
|
|
uint32_t maxComputeWorkGroupInvocations_ = {},
|
|
std::array<uint32_t, 3> const & maxComputeWorkGroupSize_ = {},
|
|
uint32_t subPixelPrecisionBits_ = {},
|
|
uint32_t subTexelPrecisionBits_ = {},
|
|
uint32_t mipmapPrecisionBits_ = {},
|
|
uint32_t maxDrawIndexedIndexValue_ = {},
|
|
uint32_t maxDrawIndirectCount_ = {},
|
|
float maxSamplerLodBias_ = {},
|
|
float maxSamplerAnisotropy_ = {},
|
|
uint32_t maxViewports_ = {},
|
|
std::array<uint32_t, 2> const & maxViewportDimensions_ = {},
|
|
std::array<float, 2> const & viewportBoundsRange_ = {},
|
|
uint32_t viewportSubPixelBits_ = {},
|
|
size_t minMemoryMapAlignment_ = {},
|
|
VULKAN_HPP_NAMESPACE::DeviceSize minTexelBufferOffsetAlignment_ = {},
|
|
VULKAN_HPP_NAMESPACE::DeviceSize minUniformBufferOffsetAlignment_ = {},
|
|
VULKAN_HPP_NAMESPACE::DeviceSize minStorageBufferOffsetAlignment_ = {},
|
|
int32_t minTexelOffset_ = {},
|
|
uint32_t maxTexelOffset_ = {},
|
|
int32_t minTexelGatherOffset_ = {},
|
|
uint32_t maxTexelGatherOffset_ = {},
|
|
float minInterpolationOffset_ = {},
|
|
float maxInterpolationOffset_ = {},
|
|
uint32_t subPixelInterpolationOffsetBits_ = {},
|
|
uint32_t maxFramebufferWidth_ = {},
|
|
uint32_t maxFramebufferHeight_ = {},
|
|
uint32_t maxFramebufferLayers_ = {},
|
|
VULKAN_HPP_NAMESPACE::SampleCountFlags framebufferColorSampleCounts_ = {},
|
|
VULKAN_HPP_NAMESPACE::SampleCountFlags framebufferDepthSampleCounts_ = {},
|
|
VULKAN_HPP_NAMESPACE::SampleCountFlags framebufferStencilSampleCounts_ = {},
|
|
VULKAN_HPP_NAMESPACE::SampleCountFlags framebufferNoAttachmentsSampleCounts_ = {},
|
|
uint32_t maxColorAttachments_ = {},
|
|
VULKAN_HPP_NAMESPACE::SampleCountFlags sampledImageColorSampleCounts_ = {},
|
|
VULKAN_HPP_NAMESPACE::SampleCountFlags sampledImageIntegerSampleCounts_ = {},
|
|
VULKAN_HPP_NAMESPACE::SampleCountFlags sampledImageDepthSampleCounts_ = {},
|
|
VULKAN_HPP_NAMESPACE::SampleCountFlags sampledImageStencilSampleCounts_ = {},
|
|
VULKAN_HPP_NAMESPACE::SampleCountFlags storageImageSampleCounts_ = {},
|
|
uint32_t maxSampleMaskWords_ = {},
|
|
VULKAN_HPP_NAMESPACE::Bool32 timestampComputeAndGraphics_ = {},
|
|
float timestampPeriod_ = {},
|
|
uint32_t maxClipDistances_ = {},
|
|
uint32_t maxCullDistances_ = {},
|
|
uint32_t maxCombinedClipAndCullDistances_ = {},
|
|
uint32_t discreteQueuePriorities_ = {},
|
|
std::array<float, 2> const & pointSizeRange_ = {},
|
|
std::array<float, 2> const & lineWidthRange_ = {},
|
|
float pointSizeGranularity_ = {},
|
|
float lineWidthGranularity_ = {},
|
|
VULKAN_HPP_NAMESPACE::Bool32 strictLines_ = {},
|
|
VULKAN_HPP_NAMESPACE::Bool32 standardSampleLocations_ = {},
|
|
VULKAN_HPP_NAMESPACE::DeviceSize optimalBufferCopyOffsetAlignment_ = {},
|
|
VULKAN_HPP_NAMESPACE::DeviceSize optimalBufferCopyRowPitchAlignment_ = {},
|
|
VULKAN_HPP_NAMESPACE::DeviceSize nonCoherentAtomSize_ = {} ) VULKAN_HPP_NOEXCEPT
|
|
: maxImageDimension1D( maxImageDimension1D_ )
|
|
, maxImageDimension2D( maxImageDimension2D_ )
|
|
, maxImageDimension3D( maxImageDimension3D_ )
|
|
, maxImageDimensionCube( maxImageDimensionCube_ )
|
|
, maxImageArrayLayers( maxImageArrayLayers_ )
|
|
, maxTexelBufferElements( maxTexelBufferElements_ )
|
|
, maxUniformBufferRange( maxUniformBufferRange_ )
|
|
, maxStorageBufferRange( maxStorageBufferRange_ )
|
|
, maxPushConstantsSize( maxPushConstantsSize_ )
|
|
, maxMemoryAllocationCount( maxMemoryAllocationCount_ )
|
|
, maxSamplerAllocationCount( maxSamplerAllocationCount_ )
|
|
, bufferImageGranularity( bufferImageGranularity_ )
|
|
, sparseAddressSpaceSize( sparseAddressSpaceSize_ )
|
|
, maxBoundDescriptorSets( maxBoundDescriptorSets_ )
|
|
, maxPerStageDescriptorSamplers( maxPerStageDescriptorSamplers_ )
|
|
, maxPerStageDescriptorUniformBuffers( maxPerStageDescriptorUniformBuffers_ )
|
|
, maxPerStageDescriptorStorageBuffers( maxPerStageDescriptorStorageBuffers_ )
|
|
, maxPerStageDescriptorSampledImages( maxPerStageDescriptorSampledImages_ )
|
|
, maxPerStageDescriptorStorageImages( maxPerStageDescriptorStorageImages_ )
|
|
, maxPerStageDescriptorInputAttachments( maxPerStageDescriptorInputAttachments_ )
|
|
, maxPerStageResources( maxPerStageResources_ )
|
|
, maxDescriptorSetSamplers( maxDescriptorSetSamplers_ )
|
|
, maxDescriptorSetUniformBuffers( maxDescriptorSetUniformBuffers_ )
|
|
, maxDescriptorSetUniformBuffersDynamic( maxDescriptorSetUniformBuffersDynamic_ )
|
|
, maxDescriptorSetStorageBuffers( maxDescriptorSetStorageBuffers_ )
|
|
, maxDescriptorSetStorageBuffersDynamic( maxDescriptorSetStorageBuffersDynamic_ )
|
|
, maxDescriptorSetSampledImages( maxDescriptorSetSampledImages_ )
|
|
, maxDescriptorSetStorageImages( maxDescriptorSetStorageImages_ )
|
|
, maxDescriptorSetInputAttachments( maxDescriptorSetInputAttachments_ )
|
|
, maxVertexInputAttributes( maxVertexInputAttributes_ )
|
|
, maxVertexInputBindings( maxVertexInputBindings_ )
|
|
, maxVertexInputAttributeOffset( maxVertexInputAttributeOffset_ )
|
|
, maxVertexInputBindingStride( maxVertexInputBindingStride_ )
|
|
, maxVertexOutputComponents( maxVertexOutputComponents_ )
|
|
, maxTessellationGenerationLevel( maxTessellationGenerationLevel_ )
|
|
, maxTessellationPatchSize( maxTessellationPatchSize_ )
|
|
, maxTessellationControlPerVertexInputComponents( maxTessellationControlPerVertexInputComponents_ )
|
|
, maxTessellationControlPerVertexOutputComponents( maxTessellationControlPerVertexOutputComponents_ )
|
|
, maxTessellationControlPerPatchOutputComponents( maxTessellationControlPerPatchOutputComponents_ )
|
|
, maxTessellationControlTotalOutputComponents( maxTessellationControlTotalOutputComponents_ )
|
|
, maxTessellationEvaluationInputComponents( maxTessellationEvaluationInputComponents_ )
|
|
, maxTessellationEvaluationOutputComponents( maxTessellationEvaluationOutputComponents_ )
|
|
, maxGeometryShaderInvocations( maxGeometryShaderInvocations_ )
|
|
, maxGeometryInputComponents( maxGeometryInputComponents_ )
|
|
, maxGeometryOutputComponents( maxGeometryOutputComponents_ )
|
|
, maxGeometryOutputVertices( maxGeometryOutputVertices_ )
|
|
, maxGeometryTotalOutputComponents( maxGeometryTotalOutputComponents_ )
|
|
, maxFragmentInputComponents( maxFragmentInputComponents_ )
|
|
, maxFragmentOutputAttachments( maxFragmentOutputAttachments_ )
|
|
, maxFragmentDualSrcAttachments( maxFragmentDualSrcAttachments_ )
|
|
, maxFragmentCombinedOutputResources( maxFragmentCombinedOutputResources_ )
|
|
, maxComputeSharedMemorySize( maxComputeSharedMemorySize_ )
|
|
, maxComputeWorkGroupCount( maxComputeWorkGroupCount_ )
|
|
, maxComputeWorkGroupInvocations( maxComputeWorkGroupInvocations_ )
|
|
, maxComputeWorkGroupSize( maxComputeWorkGroupSize_ )
|
|
, subPixelPrecisionBits( subPixelPrecisionBits_ )
|
|
, subTexelPrecisionBits( subTexelPrecisionBits_ )
|
|
, mipmapPrecisionBits( mipmapPrecisionBits_ )
|
|
, maxDrawIndexedIndexValue( maxDrawIndexedIndexValue_ )
|
|
, maxDrawIndirectCount( maxDrawIndirectCount_ )
|
|
, maxSamplerLodBias( maxSamplerLodBias_ )
|
|
, maxSamplerAnisotropy( maxSamplerAnisotropy_ )
|
|
, maxViewports( maxViewports_ )
|
|
, maxViewportDimensions( maxViewportDimensions_ )
|
|
, viewportBoundsRange( viewportBoundsRange_ )
|
|
, viewportSubPixelBits( viewportSubPixelBits_ )
|
|
, minMemoryMapAlignment( minMemoryMapAlignment_ )
|
|
, minTexelBufferOffsetAlignment( minTexelBufferOffsetAlignment_ )
|
|
, minUniformBufferOffsetAlignment( minUniformBufferOffsetAlignment_ )
|
|
, minStorageBufferOffsetAlignment( minStorageBufferOffsetAlignment_ )
|
|
, minTexelOffset( minTexelOffset_ )
|
|
, maxTexelOffset( maxTexelOffset_ )
|
|
, minTexelGatherOffset( minTexelGatherOffset_ )
|
|
, maxTexelGatherOffset( maxTexelGatherOffset_ )
|
|
, minInterpolationOffset( minInterpolationOffset_ )
|
|
, maxInterpolationOffset( maxInterpolationOffset_ )
|
|
, subPixelInterpolationOffsetBits( subPixelInterpolationOffsetBits_ )
|
|
, maxFramebufferWidth( maxFramebufferWidth_ )
|
|
, maxFramebufferHeight( maxFramebufferHeight_ )
|
|
, maxFramebufferLayers( maxFramebufferLayers_ )
|
|
, framebufferColorSampleCounts( framebufferColorSampleCounts_ )
|
|
, framebufferDepthSampleCounts( framebufferDepthSampleCounts_ )
|
|
, framebufferStencilSampleCounts( framebufferStencilSampleCounts_ )
|
|
, framebufferNoAttachmentsSampleCounts( framebufferNoAttachmentsSampleCounts_ )
|
|
, maxColorAttachments( maxColorAttachments_ )
|
|
, sampledImageColorSampleCounts( sampledImageColorSampleCounts_ )
|
|
, sampledImageIntegerSampleCounts( sampledImageIntegerSampleCounts_ )
|
|
, sampledImageDepthSampleCounts( sampledImageDepthSampleCounts_ )
|
|
, sampledImageStencilSampleCounts( sampledImageStencilSampleCounts_ )
|
|
, storageImageSampleCounts( storageImageSampleCounts_ )
|
|
, maxSampleMaskWords( maxSampleMaskWords_ )
|
|
, timestampComputeAndGraphics( timestampComputeAndGraphics_ )
|
|
, timestampPeriod( timestampPeriod_ )
|
|
, maxClipDistances( maxClipDistances_ )
|
|
, maxCullDistances( maxCullDistances_ )
|
|
, maxCombinedClipAndCullDistances( maxCombinedClipAndCullDistances_ )
|
|
, discreteQueuePriorities( discreteQueuePriorities_ )
|
|
, pointSizeRange( pointSizeRange_ )
|
|
, lineWidthRange( lineWidthRange_ )
|
|
, pointSizeGranularity( pointSizeGranularity_ )
|
|
, lineWidthGranularity( lineWidthGranularity_ )
|
|
, strictLines( strictLines_ )
|
|
, standardSampleLocations( standardSampleLocations_ )
|
|
, optimalBufferCopyOffsetAlignment( optimalBufferCopyOffsetAlignment_ )
|
|
, optimalBufferCopyRowPitchAlignment( optimalBufferCopyRowPitchAlignment_ )
|
|
, nonCoherentAtomSize( nonCoherentAtomSize_ )
|
|
{
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceLimits( PhysicalDeviceLimits const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
PhysicalDeviceLimits( VkPhysicalDeviceLimits const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: PhysicalDeviceLimits( *reinterpret_cast<PhysicalDeviceLimits const *>( &rhs ) )
|
|
{
|
|
}
|
|
|
|
PhysicalDeviceLimits & operator=( PhysicalDeviceLimits const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
PhysicalDeviceLimits & operator=( VkPhysicalDeviceLimits const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceLimits const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
operator VkPhysicalDeviceLimits const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkPhysicalDeviceLimits *>( this );
|
|
}
|
|
|
|
operator VkPhysicalDeviceLimits &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkPhysicalDeviceLimits *>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
# if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
# else
|
|
std::tuple<uint32_t const &,
|
|
uint32_t const &,
|
|
uint32_t const &,
|
|
uint32_t const &,
|
|
uint32_t const &,
|
|
uint32_t const &,
|
|
uint32_t const &,
|
|
uint32_t const &,
|
|
uint32_t const &,
|
|
uint32_t const &,
|
|
uint32_t const &,
|
|
VULKAN_HPP_NAMESPACE::DeviceSize const &,
|
|
VULKAN_HPP_NAMESPACE::DeviceSize const &,
|
|
uint32_t const &,
|
|
uint32_t const &,
|
|
uint32_t const &,
|
|
uint32_t const &,
|
|
uint32_t const &,
|
|
uint32_t const &,
|
|
uint32_t const &,
|
|
uint32_t const &,
|
|
uint32_t const &,
|
|
uint32_t const &,
|
|
uint32_t const &,
|
|
uint32_t const &,
|
|
uint32_t const &,
|
|
uint32_t const &,
|
|
uint32_t const &,
|
|
uint32_t const &,
|
|
uint32_t const &,
|
|
uint32_t const &,
|
|
uint32_t const &,
|
|
uint32_t const &,
|
|
uint32_t const &,
|
|
uint32_t const &,
|
|
uint32_t const &,
|
|
uint32_t const &,
|
|
uint32_t const &,
|
|
uint32_t const &,
|
|
uint32_t const &,
|
|
uint32_t const &,
|
|
uint32_t const &,
|
|
uint32_t const &,
|
|
uint32_t const &,
|
|
uint32_t const &,
|
|
uint32_t const &,
|
|
uint32_t const &,
|
|
uint32_t const &,
|
|
uint32_t const &,
|
|
uint32_t const &,
|
|
uint32_t const &,
|
|
uint32_t const &,
|
|
VULKAN_HPP_NAMESPACE::ArrayWrapper1D<uint32_t, 3> const &,
|
|
uint32_t const &,
|
|
VULKAN_HPP_NAMESPACE::ArrayWrapper1D<uint32_t, 3> const &,
|
|
uint32_t const &,
|
|
uint32_t const &,
|
|
uint32_t const &,
|
|
uint32_t const &,
|
|
uint32_t const &,
|
|
float const &,
|
|
float const &,
|
|
uint32_t const &,
|
|
VULKAN_HPP_NAMESPACE::ArrayWrapper1D<uint32_t, 2> const &,
|
|
VULKAN_HPP_NAMESPACE::ArrayWrapper1D<float, 2> const &,
|
|
uint32_t const &,
|
|
size_t const &,
|
|
VULKAN_HPP_NAMESPACE::DeviceSize const &,
|
|
VULKAN_HPP_NAMESPACE::DeviceSize const &,
|
|
VULKAN_HPP_NAMESPACE::DeviceSize const &,
|
|
int32_t const &,
|
|
uint32_t const &,
|
|
int32_t const &,
|
|
uint32_t const &,
|
|
float const &,
|
|
float const &,
|
|
uint32_t const &,
|
|
uint32_t const &,
|
|
uint32_t const &,
|
|
uint32_t const &,
|
|
VULKAN_HPP_NAMESPACE::SampleCountFlags const &,
|
|
VULKAN_HPP_NAMESPACE::SampleCountFlags const &,
|
|
VULKAN_HPP_NAMESPACE::SampleCountFlags const &,
|
|
VULKAN_HPP_NAMESPACE::SampleCountFlags const &,
|
|
uint32_t const &,
|
|
VULKAN_HPP_NAMESPACE::SampleCountFlags const &,
|
|
VULKAN_HPP_NAMESPACE::SampleCountFlags const &,
|
|
VULKAN_HPP_NAMESPACE::SampleCountFlags const &,
|
|
VULKAN_HPP_NAMESPACE::SampleCountFlags const &,
|
|
VULKAN_HPP_NAMESPACE::SampleCountFlags const &,
|
|
uint32_t const &,
|
|
VULKAN_HPP_NAMESPACE::Bool32 const &,
|
|
float const &,
|
|
uint32_t const &,
|
|
uint32_t const &,
|
|
uint32_t const &,
|
|
uint32_t const &,
|
|
VULKAN_HPP_NAMESPACE::ArrayWrapper1D<float, 2> const &,
|
|
VULKAN_HPP_NAMESPACE::ArrayWrapper1D<float, 2> const &,
|
|
float const &,
|
|
float const &,
|
|
VULKAN_HPP_NAMESPACE::Bool32 const &,
|
|
VULKAN_HPP_NAMESPACE::Bool32 const &,
|
|
VULKAN_HPP_NAMESPACE::DeviceSize const &,
|
|
VULKAN_HPP_NAMESPACE::DeviceSize const &,
|
|
VULKAN_HPP_NAMESPACE::DeviceSize const &>
|
|
# endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( maxImageDimension1D,
|
|
maxImageDimension2D,
|
|
maxImageDimension3D,
|
|
maxImageDimensionCube,
|
|
maxImageArrayLayers,
|
|
maxTexelBufferElements,
|
|
maxUniformBufferRange,
|
|
maxStorageBufferRange,
|
|
maxPushConstantsSize,
|
|
maxMemoryAllocationCount,
|
|
maxSamplerAllocationCount,
|
|
bufferImageGranularity,
|
|
sparseAddressSpaceSize,
|
|
maxBoundDescriptorSets,
|
|
maxPerStageDescriptorSamplers,
|
|
maxPerStageDescriptorUniformBuffers,
|
|
maxPerStageDescriptorStorageBuffers,
|
|
maxPerStageDescriptorSampledImages,
|
|
maxPerStageDescriptorStorageImages,
|
|
maxPerStageDescriptorInputAttachments,
|
|
maxPerStageResources,
|
|
maxDescriptorSetSamplers,
|
|
maxDescriptorSetUniformBuffers,
|
|
maxDescriptorSetUniformBuffersDynamic,
|
|
maxDescriptorSetStorageBuffers,
|
|
maxDescriptorSetStorageBuffersDynamic,
|
|
maxDescriptorSetSampledImages,
|
|
maxDescriptorSetStorageImages,
|
|
maxDescriptorSetInputAttachments,
|
|
maxVertexInputAttributes,
|
|
maxVertexInputBindings,
|
|
maxVertexInputAttributeOffset,
|
|
maxVertexInputBindingStride,
|
|
maxVertexOutputComponents,
|
|
maxTessellationGenerationLevel,
|
|
maxTessellationPatchSize,
|
|
maxTessellationControlPerVertexInputComponents,
|
|
maxTessellationControlPerVertexOutputComponents,
|
|
maxTessellationControlPerPatchOutputComponents,
|
|
maxTessellationControlTotalOutputComponents,
|
|
maxTessellationEvaluationInputComponents,
|
|
maxTessellationEvaluationOutputComponents,
|
|
maxGeometryShaderInvocations,
|
|
maxGeometryInputComponents,
|
|
maxGeometryOutputComponents,
|
|
maxGeometryOutputVertices,
|
|
maxGeometryTotalOutputComponents,
|
|
maxFragmentInputComponents,
|
|
maxFragmentOutputAttachments,
|
|
maxFragmentDualSrcAttachments,
|
|
maxFragmentCombinedOutputResources,
|
|
maxComputeSharedMemorySize,
|
|
maxComputeWorkGroupCount,
|
|
maxComputeWorkGroupInvocations,
|
|
maxComputeWorkGroupSize,
|
|
subPixelPrecisionBits,
|
|
subTexelPrecisionBits,
|
|
mipmapPrecisionBits,
|
|
maxDrawIndexedIndexValue,
|
|
maxDrawIndirectCount,
|
|
maxSamplerLodBias,
|
|
maxSamplerAnisotropy,
|
|
maxViewports,
|
|
maxViewportDimensions,
|
|
viewportBoundsRange,
|
|
viewportSubPixelBits,
|
|
minMemoryMapAlignment,
|
|
minTexelBufferOffsetAlignment,
|
|
minUniformBufferOffsetAlignment,
|
|
minStorageBufferOffsetAlignment,
|
|
minTexelOffset,
|
|
maxTexelOffset,
|
|
minTexelGatherOffset,
|
|
maxTexelGatherOffset,
|
|
minInterpolationOffset,
|
|
maxInterpolationOffset,
|
|
subPixelInterpolationOffsetBits,
|
|
maxFramebufferWidth,
|
|
maxFramebufferHeight,
|
|
maxFramebufferLayers,
|
|
framebufferColorSampleCounts,
|
|
framebufferDepthSampleCounts,
|
|
framebufferStencilSampleCounts,
|
|
framebufferNoAttachmentsSampleCounts,
|
|
maxColorAttachments,
|
|
sampledImageColorSampleCounts,
|
|
sampledImageIntegerSampleCounts,
|
|
sampledImageDepthSampleCounts,
|
|
sampledImageStencilSampleCounts,
|
|
storageImageSampleCounts,
|
|
maxSampleMaskWords,
|
|
timestampComputeAndGraphics,
|
|
timestampPeriod,
|
|
maxClipDistances,
|
|
maxCullDistances,
|
|
maxCombinedClipAndCullDistances,
|
|
discreteQueuePriorities,
|
|
pointSizeRange,
|
|
lineWidthRange,
|
|
pointSizeGranularity,
|
|
lineWidthGranularity,
|
|
strictLines,
|
|
standardSampleLocations,
|
|
optimalBufferCopyOffsetAlignment,
|
|
optimalBufferCopyRowPitchAlignment,
|
|
nonCoherentAtomSize );
|
|
}
|
|
#endif
|
|
|
|
#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
|
|
auto operator<=>( PhysicalDeviceLimits const & ) const = default;
|
|
#else
|
|
bool operator==( PhysicalDeviceLimits const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
# if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
# else
|
|
return ( maxImageDimension1D == rhs.maxImageDimension1D ) && ( maxImageDimension2D == rhs.maxImageDimension2D ) &&
|
|
( maxImageDimension3D == rhs.maxImageDimension3D ) && ( maxImageDimensionCube == rhs.maxImageDimensionCube ) &&
|
|
( maxImageArrayLayers == rhs.maxImageArrayLayers ) && ( maxTexelBufferElements == rhs.maxTexelBufferElements ) &&
|
|
( maxUniformBufferRange == rhs.maxUniformBufferRange ) && ( maxStorageBufferRange == rhs.maxStorageBufferRange ) &&
|
|
( maxPushConstantsSize == rhs.maxPushConstantsSize ) && ( maxMemoryAllocationCount == rhs.maxMemoryAllocationCount ) &&
|
|
( maxSamplerAllocationCount == rhs.maxSamplerAllocationCount ) && ( bufferImageGranularity == rhs.bufferImageGranularity ) &&
|
|
( sparseAddressSpaceSize == rhs.sparseAddressSpaceSize ) && ( maxBoundDescriptorSets == rhs.maxBoundDescriptorSets ) &&
|
|
( maxPerStageDescriptorSamplers == rhs.maxPerStageDescriptorSamplers ) &&
|
|
( maxPerStageDescriptorUniformBuffers == rhs.maxPerStageDescriptorUniformBuffers ) &&
|
|
( maxPerStageDescriptorStorageBuffers == rhs.maxPerStageDescriptorStorageBuffers ) &&
|
|
( maxPerStageDescriptorSampledImages == rhs.maxPerStageDescriptorSampledImages ) &&
|
|
( maxPerStageDescriptorStorageImages == rhs.maxPerStageDescriptorStorageImages ) &&
|
|
( maxPerStageDescriptorInputAttachments == rhs.maxPerStageDescriptorInputAttachments ) && ( maxPerStageResources == rhs.maxPerStageResources ) &&
|
|
( maxDescriptorSetSamplers == rhs.maxDescriptorSetSamplers ) && ( maxDescriptorSetUniformBuffers == rhs.maxDescriptorSetUniformBuffers ) &&
|
|
( maxDescriptorSetUniformBuffersDynamic == rhs.maxDescriptorSetUniformBuffersDynamic ) &&
|
|
( maxDescriptorSetStorageBuffers == rhs.maxDescriptorSetStorageBuffers ) &&
|
|
( maxDescriptorSetStorageBuffersDynamic == rhs.maxDescriptorSetStorageBuffersDynamic ) &&
|
|
( maxDescriptorSetSampledImages == rhs.maxDescriptorSetSampledImages ) && ( maxDescriptorSetStorageImages == rhs.maxDescriptorSetStorageImages ) &&
|
|
( maxDescriptorSetInputAttachments == rhs.maxDescriptorSetInputAttachments ) && ( maxVertexInputAttributes == rhs.maxVertexInputAttributes ) &&
|
|
( maxVertexInputBindings == rhs.maxVertexInputBindings ) && ( maxVertexInputAttributeOffset == rhs.maxVertexInputAttributeOffset ) &&
|
|
( maxVertexInputBindingStride == rhs.maxVertexInputBindingStride ) && ( maxVertexOutputComponents == rhs.maxVertexOutputComponents ) &&
|
|
( maxTessellationGenerationLevel == rhs.maxTessellationGenerationLevel ) && ( maxTessellationPatchSize == rhs.maxTessellationPatchSize ) &&
|
|
( maxTessellationControlPerVertexInputComponents == rhs.maxTessellationControlPerVertexInputComponents ) &&
|
|
( maxTessellationControlPerVertexOutputComponents == rhs.maxTessellationControlPerVertexOutputComponents ) &&
|
|
( maxTessellationControlPerPatchOutputComponents == rhs.maxTessellationControlPerPatchOutputComponents ) &&
|
|
( maxTessellationControlTotalOutputComponents == rhs.maxTessellationControlTotalOutputComponents ) &&
|
|
( maxTessellationEvaluationInputComponents == rhs.maxTessellationEvaluationInputComponents ) &&
|
|
( maxTessellationEvaluationOutputComponents == rhs.maxTessellationEvaluationOutputComponents ) &&
|
|
( maxGeometryShaderInvocations == rhs.maxGeometryShaderInvocations ) && ( maxGeometryInputComponents == rhs.maxGeometryInputComponents ) &&
|
|
( maxGeometryOutputComponents == rhs.maxGeometryOutputComponents ) && ( maxGeometryOutputVertices == rhs.maxGeometryOutputVertices ) &&
|
|
( maxGeometryTotalOutputComponents == rhs.maxGeometryTotalOutputComponents ) && ( maxFragmentInputComponents == rhs.maxFragmentInputComponents ) &&
|
|
( maxFragmentOutputAttachments == rhs.maxFragmentOutputAttachments ) && ( maxFragmentDualSrcAttachments == rhs.maxFragmentDualSrcAttachments ) &&
|
|
( maxFragmentCombinedOutputResources == rhs.maxFragmentCombinedOutputResources ) &&
|
|
( maxComputeSharedMemorySize == rhs.maxComputeSharedMemorySize ) && ( maxComputeWorkGroupCount == rhs.maxComputeWorkGroupCount ) &&
|
|
( maxComputeWorkGroupInvocations == rhs.maxComputeWorkGroupInvocations ) && ( maxComputeWorkGroupSize == rhs.maxComputeWorkGroupSize ) &&
|
|
( subPixelPrecisionBits == rhs.subPixelPrecisionBits ) && ( subTexelPrecisionBits == rhs.subTexelPrecisionBits ) &&
|
|
( mipmapPrecisionBits == rhs.mipmapPrecisionBits ) && ( maxDrawIndexedIndexValue == rhs.maxDrawIndexedIndexValue ) &&
|
|
( maxDrawIndirectCount == rhs.maxDrawIndirectCount ) && ( maxSamplerLodBias == rhs.maxSamplerLodBias ) &&
|
|
( maxSamplerAnisotropy == rhs.maxSamplerAnisotropy ) && ( maxViewports == rhs.maxViewports ) &&
|
|
( maxViewportDimensions == rhs.maxViewportDimensions ) && ( viewportBoundsRange == rhs.viewportBoundsRange ) &&
|
|
( viewportSubPixelBits == rhs.viewportSubPixelBits ) && ( minMemoryMapAlignment == rhs.minMemoryMapAlignment ) &&
|
|
( minTexelBufferOffsetAlignment == rhs.minTexelBufferOffsetAlignment ) &&
|
|
( minUniformBufferOffsetAlignment == rhs.minUniformBufferOffsetAlignment ) &&
|
|
( minStorageBufferOffsetAlignment == rhs.minStorageBufferOffsetAlignment ) && ( minTexelOffset == rhs.minTexelOffset ) &&
|
|
( maxTexelOffset == rhs.maxTexelOffset ) && ( minTexelGatherOffset == rhs.minTexelGatherOffset ) &&
|
|
( maxTexelGatherOffset == rhs.maxTexelGatherOffset ) && ( minInterpolationOffset == rhs.minInterpolationOffset ) &&
|
|
( maxInterpolationOffset == rhs.maxInterpolationOffset ) && ( subPixelInterpolationOffsetBits == rhs.subPixelInterpolationOffsetBits ) &&
|
|
( maxFramebufferWidth == rhs.maxFramebufferWidth ) && ( maxFramebufferHeight == rhs.maxFramebufferHeight ) &&
|
|
( maxFramebufferLayers == rhs.maxFramebufferLayers ) && ( framebufferColorSampleCounts == rhs.framebufferColorSampleCounts ) &&
|
|
( framebufferDepthSampleCounts == rhs.framebufferDepthSampleCounts ) && ( framebufferStencilSampleCounts == rhs.framebufferStencilSampleCounts ) &&
|
|
( framebufferNoAttachmentsSampleCounts == rhs.framebufferNoAttachmentsSampleCounts ) && ( maxColorAttachments == rhs.maxColorAttachments ) &&
|
|
( sampledImageColorSampleCounts == rhs.sampledImageColorSampleCounts ) &&
|
|
( sampledImageIntegerSampleCounts == rhs.sampledImageIntegerSampleCounts ) &&
|
|
( sampledImageDepthSampleCounts == rhs.sampledImageDepthSampleCounts ) &&
|
|
( sampledImageStencilSampleCounts == rhs.sampledImageStencilSampleCounts ) && ( storageImageSampleCounts == rhs.storageImageSampleCounts ) &&
|
|
( maxSampleMaskWords == rhs.maxSampleMaskWords ) && ( timestampComputeAndGraphics == rhs.timestampComputeAndGraphics ) &&
|
|
( timestampPeriod == rhs.timestampPeriod ) && ( maxClipDistances == rhs.maxClipDistances ) && ( maxCullDistances == rhs.maxCullDistances ) &&
|
|
( maxCombinedClipAndCullDistances == rhs.maxCombinedClipAndCullDistances ) && ( discreteQueuePriorities == rhs.discreteQueuePriorities ) &&
|
|
( pointSizeRange == rhs.pointSizeRange ) && ( lineWidthRange == rhs.lineWidthRange ) && ( pointSizeGranularity == rhs.pointSizeGranularity ) &&
|
|
( lineWidthGranularity == rhs.lineWidthGranularity ) && ( strictLines == rhs.strictLines ) &&
|
|
( standardSampleLocations == rhs.standardSampleLocations ) && ( optimalBufferCopyOffsetAlignment == rhs.optimalBufferCopyOffsetAlignment ) &&
|
|
( optimalBufferCopyRowPitchAlignment == rhs.optimalBufferCopyRowPitchAlignment ) && ( nonCoherentAtomSize == rhs.nonCoherentAtomSize );
|
|
# endif
|
|
}
|
|
|
|
bool operator!=( PhysicalDeviceLimits const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
uint32_t maxImageDimension1D = {};
|
|
uint32_t maxImageDimension2D = {};
|
|
uint32_t maxImageDimension3D = {};
|
|
uint32_t maxImageDimensionCube = {};
|
|
uint32_t maxImageArrayLayers = {};
|
|
uint32_t maxTexelBufferElements = {};
|
|
uint32_t maxUniformBufferRange = {};
|
|
uint32_t maxStorageBufferRange = {};
|
|
uint32_t maxPushConstantsSize = {};
|
|
uint32_t maxMemoryAllocationCount = {};
|
|
uint32_t maxSamplerAllocationCount = {};
|
|
VULKAN_HPP_NAMESPACE::DeviceSize bufferImageGranularity = {};
|
|
VULKAN_HPP_NAMESPACE::DeviceSize sparseAddressSpaceSize = {};
|
|
uint32_t maxBoundDescriptorSets = {};
|
|
uint32_t maxPerStageDescriptorSamplers = {};
|
|
uint32_t maxPerStageDescriptorUniformBuffers = {};
|
|
uint32_t maxPerStageDescriptorStorageBuffers = {};
|
|
uint32_t maxPerStageDescriptorSampledImages = {};
|
|
uint32_t maxPerStageDescriptorStorageImages = {};
|
|
uint32_t maxPerStageDescriptorInputAttachments = {};
|
|
uint32_t maxPerStageResources = {};
|
|
uint32_t maxDescriptorSetSamplers = {};
|
|
uint32_t maxDescriptorSetUniformBuffers = {};
|
|
uint32_t maxDescriptorSetUniformBuffersDynamic = {};
|
|
uint32_t maxDescriptorSetStorageBuffers = {};
|
|
uint32_t maxDescriptorSetStorageBuffersDynamic = {};
|
|
uint32_t maxDescriptorSetSampledImages = {};
|
|
uint32_t maxDescriptorSetStorageImages = {};
|
|
uint32_t maxDescriptorSetInputAttachments = {};
|
|
uint32_t maxVertexInputAttributes = {};
|
|
uint32_t maxVertexInputBindings = {};
|
|
uint32_t maxVertexInputAttributeOffset = {};
|
|
uint32_t maxVertexInputBindingStride = {};
|
|
uint32_t maxVertexOutputComponents = {};
|
|
uint32_t maxTessellationGenerationLevel = {};
|
|
uint32_t maxTessellationPatchSize = {};
|
|
uint32_t maxTessellationControlPerVertexInputComponents = {};
|
|
uint32_t maxTessellationControlPerVertexOutputComponents = {};
|
|
uint32_t maxTessellationControlPerPatchOutputComponents = {};
|
|
uint32_t maxTessellationControlTotalOutputComponents = {};
|
|
uint32_t maxTessellationEvaluationInputComponents = {};
|
|
uint32_t maxTessellationEvaluationOutputComponents = {};
|
|
uint32_t maxGeometryShaderInvocations = {};
|
|
uint32_t maxGeometryInputComponents = {};
|
|
uint32_t maxGeometryOutputComponents = {};
|
|
uint32_t maxGeometryOutputVertices = {};
|
|
uint32_t maxGeometryTotalOutputComponents = {};
|
|
uint32_t maxFragmentInputComponents = {};
|
|
uint32_t maxFragmentOutputAttachments = {};
|
|
uint32_t maxFragmentDualSrcAttachments = {};
|
|
uint32_t maxFragmentCombinedOutputResources = {};
|
|
uint32_t maxComputeSharedMemorySize = {};
|
|
VULKAN_HPP_NAMESPACE::ArrayWrapper1D<uint32_t, 3> maxComputeWorkGroupCount = {};
|
|
uint32_t maxComputeWorkGroupInvocations = {};
|
|
VULKAN_HPP_NAMESPACE::ArrayWrapper1D<uint32_t, 3> maxComputeWorkGroupSize = {};
|
|
uint32_t subPixelPrecisionBits = {};
|
|
uint32_t subTexelPrecisionBits = {};
|
|
uint32_t mipmapPrecisionBits = {};
|
|
uint32_t maxDrawIndexedIndexValue = {};
|
|
uint32_t maxDrawIndirectCount = {};
|
|
float maxSamplerLodBias = {};
|
|
float maxSamplerAnisotropy = {};
|
|
uint32_t maxViewports = {};
|
|
VULKAN_HPP_NAMESPACE::ArrayWrapper1D<uint32_t, 2> maxViewportDimensions = {};
|
|
VULKAN_HPP_NAMESPACE::ArrayWrapper1D<float, 2> viewportBoundsRange = {};
|
|
uint32_t viewportSubPixelBits = {};
|
|
size_t minMemoryMapAlignment = {};
|
|
VULKAN_HPP_NAMESPACE::DeviceSize minTexelBufferOffsetAlignment = {};
|
|
VULKAN_HPP_NAMESPACE::DeviceSize minUniformBufferOffsetAlignment = {};
|
|
VULKAN_HPP_NAMESPACE::DeviceSize minStorageBufferOffsetAlignment = {};
|
|
int32_t minTexelOffset = {};
|
|
uint32_t maxTexelOffset = {};
|
|
int32_t minTexelGatherOffset = {};
|
|
uint32_t maxTexelGatherOffset = {};
|
|
float minInterpolationOffset = {};
|
|
float maxInterpolationOffset = {};
|
|
uint32_t subPixelInterpolationOffsetBits = {};
|
|
uint32_t maxFramebufferWidth = {};
|
|
uint32_t maxFramebufferHeight = {};
|
|
uint32_t maxFramebufferLayers = {};
|
|
VULKAN_HPP_NAMESPACE::SampleCountFlags framebufferColorSampleCounts = {};
|
|
VULKAN_HPP_NAMESPACE::SampleCountFlags framebufferDepthSampleCounts = {};
|
|
VULKAN_HPP_NAMESPACE::SampleCountFlags framebufferStencilSampleCounts = {};
|
|
VULKAN_HPP_NAMESPACE::SampleCountFlags framebufferNoAttachmentsSampleCounts = {};
|
|
uint32_t maxColorAttachments = {};
|
|
VULKAN_HPP_NAMESPACE::SampleCountFlags sampledImageColorSampleCounts = {};
|
|
VULKAN_HPP_NAMESPACE::SampleCountFlags sampledImageIntegerSampleCounts = {};
|
|
VULKAN_HPP_NAMESPACE::SampleCountFlags sampledImageDepthSampleCounts = {};
|
|
VULKAN_HPP_NAMESPACE::SampleCountFlags sampledImageStencilSampleCounts = {};
|
|
VULKAN_HPP_NAMESPACE::SampleCountFlags storageImageSampleCounts = {};
|
|
uint32_t maxSampleMaskWords = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 timestampComputeAndGraphics = {};
|
|
float timestampPeriod = {};
|
|
uint32_t maxClipDistances = {};
|
|
uint32_t maxCullDistances = {};
|
|
uint32_t maxCombinedClipAndCullDistances = {};
|
|
uint32_t discreteQueuePriorities = {};
|
|
VULKAN_HPP_NAMESPACE::ArrayWrapper1D<float, 2> pointSizeRange = {};
|
|
VULKAN_HPP_NAMESPACE::ArrayWrapper1D<float, 2> lineWidthRange = {};
|
|
float pointSizeGranularity = {};
|
|
float lineWidthGranularity = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 strictLines = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 standardSampleLocations = {};
|
|
VULKAN_HPP_NAMESPACE::DeviceSize optimalBufferCopyOffsetAlignment = {};
|
|
VULKAN_HPP_NAMESPACE::DeviceSize optimalBufferCopyRowPitchAlignment = {};
|
|
VULKAN_HPP_NAMESPACE::DeviceSize nonCoherentAtomSize = {};
|
|
};
|
|
|
|
struct PhysicalDeviceMaintenance3Properties
|
|
{
|
|
using NativeType = VkPhysicalDeviceMaintenance3Properties;
|
|
|
|
static const bool allowDuplicate = false;
|
|
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceMaintenance3Properties;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR PhysicalDeviceMaintenance3Properties( uint32_t maxPerSetDescriptors_ = {},
|
|
VULKAN_HPP_NAMESPACE::DeviceSize maxMemoryAllocationSize_ = {},
|
|
void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
|
|
: pNext( pNext_ )
|
|
, maxPerSetDescriptors( maxPerSetDescriptors_ )
|
|
, maxMemoryAllocationSize( maxMemoryAllocationSize_ )
|
|
{
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR PhysicalDeviceMaintenance3Properties( PhysicalDeviceMaintenance3Properties const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
PhysicalDeviceMaintenance3Properties( VkPhysicalDeviceMaintenance3Properties const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: PhysicalDeviceMaintenance3Properties( *reinterpret_cast<PhysicalDeviceMaintenance3Properties const *>( &rhs ) )
|
|
{
|
|
}
|
|
|
|
PhysicalDeviceMaintenance3Properties & operator=( PhysicalDeviceMaintenance3Properties const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
PhysicalDeviceMaintenance3Properties & operator=( VkPhysicalDeviceMaintenance3Properties const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceMaintenance3Properties const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
operator VkPhysicalDeviceMaintenance3Properties const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkPhysicalDeviceMaintenance3Properties *>( this );
|
|
}
|
|
|
|
operator VkPhysicalDeviceMaintenance3Properties &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkPhysicalDeviceMaintenance3Properties *>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
# if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
# else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, uint32_t const &, VULKAN_HPP_NAMESPACE::DeviceSize const &>
|
|
# endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( sType, pNext, maxPerSetDescriptors, maxMemoryAllocationSize );
|
|
}
|
|
#endif
|
|
|
|
#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
|
|
auto operator<=>( PhysicalDeviceMaintenance3Properties const & ) const = default;
|
|
#else
|
|
bool operator==( PhysicalDeviceMaintenance3Properties const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
# if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
# else
|
|
return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( maxPerSetDescriptors == rhs.maxPerSetDescriptors ) &&
|
|
( maxMemoryAllocationSize == rhs.maxMemoryAllocationSize );
|
|
# endif
|
|
}
|
|
|
|
bool operator!=( PhysicalDeviceMaintenance3Properties const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceMaintenance3Properties;
|
|
void * pNext = {};
|
|
uint32_t maxPerSetDescriptors = {};
|
|
VULKAN_HPP_NAMESPACE::DeviceSize maxMemoryAllocationSize = {};
|
|
};
|
|
|
|
template <>
|
|
struct CppType<StructureType, StructureType::ePhysicalDeviceMaintenance3Properties>
|
|
{
|
|
using Type = PhysicalDeviceMaintenance3Properties;
|
|
};
|
|
|
|
using PhysicalDeviceMaintenance3PropertiesKHR = PhysicalDeviceMaintenance3Properties;
|
|
|
|
struct PhysicalDeviceMaintenance4Features
|
|
{
|
|
using NativeType = VkPhysicalDeviceMaintenance4Features;
|
|
|
|
static const bool allowDuplicate = false;
|
|
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceMaintenance4Features;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR PhysicalDeviceMaintenance4Features( VULKAN_HPP_NAMESPACE::Bool32 maintenance4_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
|
|
: pNext( pNext_ )
|
|
, maintenance4( maintenance4_ )
|
|
{
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR PhysicalDeviceMaintenance4Features( PhysicalDeviceMaintenance4Features const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
PhysicalDeviceMaintenance4Features( VkPhysicalDeviceMaintenance4Features const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: PhysicalDeviceMaintenance4Features( *reinterpret_cast<PhysicalDeviceMaintenance4Features const *>( &rhs ) )
|
|
{
|
|
}
|
|
|
|
PhysicalDeviceMaintenance4Features & operator=( PhysicalDeviceMaintenance4Features const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
PhysicalDeviceMaintenance4Features & operator=( VkPhysicalDeviceMaintenance4Features const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceMaintenance4Features const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
|
|
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceMaintenance4Features & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pNext = pNext_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceMaintenance4Features & setMaintenance4( VULKAN_HPP_NAMESPACE::Bool32 maintenance4_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
maintenance4 = maintenance4_;
|
|
return *this;
|
|
}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
|
|
|
|
operator VkPhysicalDeviceMaintenance4Features const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkPhysicalDeviceMaintenance4Features *>( this );
|
|
}
|
|
|
|
operator VkPhysicalDeviceMaintenance4Features &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkPhysicalDeviceMaintenance4Features *>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
# if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
# else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::Bool32 const &>
|
|
# endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( sType, pNext, maintenance4 );
|
|
}
|
|
#endif
|
|
|
|
#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
|
|
auto operator<=>( PhysicalDeviceMaintenance4Features const & ) const = default;
|
|
#else
|
|
bool operator==( PhysicalDeviceMaintenance4Features const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
# if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
# else
|
|
return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( maintenance4 == rhs.maintenance4 );
|
|
# endif
|
|
}
|
|
|
|
bool operator!=( PhysicalDeviceMaintenance4Features const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceMaintenance4Features;
|
|
void * pNext = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 maintenance4 = {};
|
|
};
|
|
|
|
template <>
|
|
struct CppType<StructureType, StructureType::ePhysicalDeviceMaintenance4Features>
|
|
{
|
|
using Type = PhysicalDeviceMaintenance4Features;
|
|
};
|
|
|
|
using PhysicalDeviceMaintenance4FeaturesKHR = PhysicalDeviceMaintenance4Features;
|
|
|
|
struct PhysicalDeviceMaintenance4Properties
|
|
{
|
|
using NativeType = VkPhysicalDeviceMaintenance4Properties;
|
|
|
|
static const bool allowDuplicate = false;
|
|
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceMaintenance4Properties;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR PhysicalDeviceMaintenance4Properties( VULKAN_HPP_NAMESPACE::DeviceSize maxBufferSize_ = {},
|
|
void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
|
|
: pNext( pNext_ )
|
|
, maxBufferSize( maxBufferSize_ )
|
|
{
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR PhysicalDeviceMaintenance4Properties( PhysicalDeviceMaintenance4Properties const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
PhysicalDeviceMaintenance4Properties( VkPhysicalDeviceMaintenance4Properties const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: PhysicalDeviceMaintenance4Properties( *reinterpret_cast<PhysicalDeviceMaintenance4Properties const *>( &rhs ) )
|
|
{
|
|
}
|
|
|
|
PhysicalDeviceMaintenance4Properties & operator=( PhysicalDeviceMaintenance4Properties const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
PhysicalDeviceMaintenance4Properties & operator=( VkPhysicalDeviceMaintenance4Properties const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceMaintenance4Properties const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
operator VkPhysicalDeviceMaintenance4Properties const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkPhysicalDeviceMaintenance4Properties *>( this );
|
|
}
|
|
|
|
operator VkPhysicalDeviceMaintenance4Properties &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkPhysicalDeviceMaintenance4Properties *>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
# if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
# else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::DeviceSize const &>
|
|
# endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( sType, pNext, maxBufferSize );
|
|
}
|
|
#endif
|
|
|
|
#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
|
|
auto operator<=>( PhysicalDeviceMaintenance4Properties const & ) const = default;
|
|
#else
|
|
bool operator==( PhysicalDeviceMaintenance4Properties const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
# if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
# else
|
|
return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( maxBufferSize == rhs.maxBufferSize );
|
|
# endif
|
|
}
|
|
|
|
bool operator!=( PhysicalDeviceMaintenance4Properties const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceMaintenance4Properties;
|
|
void * pNext = {};
|
|
VULKAN_HPP_NAMESPACE::DeviceSize maxBufferSize = {};
|
|
};
|
|
|
|
template <>
|
|
struct CppType<StructureType, StructureType::ePhysicalDeviceMaintenance4Properties>
|
|
{
|
|
using Type = PhysicalDeviceMaintenance4Properties;
|
|
};
|
|
|
|
using PhysicalDeviceMaintenance4PropertiesKHR = PhysicalDeviceMaintenance4Properties;
|
|
|
|
struct PhysicalDeviceMemoryBudgetPropertiesEXT
|
|
{
|
|
using NativeType = VkPhysicalDeviceMemoryBudgetPropertiesEXT;
|
|
|
|
static const bool allowDuplicate = false;
|
|
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceMemoryBudgetPropertiesEXT;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceMemoryBudgetPropertiesEXT( std::array<VULKAN_HPP_NAMESPACE::DeviceSize, VK_MAX_MEMORY_HEAPS> const & heapBudget_ = {},
|
|
std::array<VULKAN_HPP_NAMESPACE::DeviceSize, VK_MAX_MEMORY_HEAPS> const & heapUsage_ = {},
|
|
void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
|
|
: pNext( pNext_ )
|
|
, heapBudget( heapBudget_ )
|
|
, heapUsage( heapUsage_ )
|
|
{
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceMemoryBudgetPropertiesEXT( PhysicalDeviceMemoryBudgetPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
PhysicalDeviceMemoryBudgetPropertiesEXT( VkPhysicalDeviceMemoryBudgetPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: PhysicalDeviceMemoryBudgetPropertiesEXT( *reinterpret_cast<PhysicalDeviceMemoryBudgetPropertiesEXT const *>( &rhs ) )
|
|
{
|
|
}
|
|
|
|
PhysicalDeviceMemoryBudgetPropertiesEXT & operator=( PhysicalDeviceMemoryBudgetPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
PhysicalDeviceMemoryBudgetPropertiesEXT & operator=( VkPhysicalDeviceMemoryBudgetPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceMemoryBudgetPropertiesEXT const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
operator VkPhysicalDeviceMemoryBudgetPropertiesEXT const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkPhysicalDeviceMemoryBudgetPropertiesEXT *>( this );
|
|
}
|
|
|
|
operator VkPhysicalDeviceMemoryBudgetPropertiesEXT &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkPhysicalDeviceMemoryBudgetPropertiesEXT *>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
# if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
# else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &,
|
|
void * const &,
|
|
VULKAN_HPP_NAMESPACE::ArrayWrapper1D<VULKAN_HPP_NAMESPACE::DeviceSize, VK_MAX_MEMORY_HEAPS> const &,
|
|
VULKAN_HPP_NAMESPACE::ArrayWrapper1D<VULKAN_HPP_NAMESPACE::DeviceSize, VK_MAX_MEMORY_HEAPS> const &>
|
|
# endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( sType, pNext, heapBudget, heapUsage );
|
|
}
|
|
#endif
|
|
|
|
#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
|
|
auto operator<=>( PhysicalDeviceMemoryBudgetPropertiesEXT const & ) const = default;
|
|
#else
|
|
bool operator==( PhysicalDeviceMemoryBudgetPropertiesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
# if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
# else
|
|
return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( heapBudget == rhs.heapBudget ) && ( heapUsage == rhs.heapUsage );
|
|
# endif
|
|
}
|
|
|
|
bool operator!=( PhysicalDeviceMemoryBudgetPropertiesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceMemoryBudgetPropertiesEXT;
|
|
void * pNext = {};
|
|
VULKAN_HPP_NAMESPACE::ArrayWrapper1D<VULKAN_HPP_NAMESPACE::DeviceSize, VK_MAX_MEMORY_HEAPS> heapBudget = {};
|
|
VULKAN_HPP_NAMESPACE::ArrayWrapper1D<VULKAN_HPP_NAMESPACE::DeviceSize, VK_MAX_MEMORY_HEAPS> heapUsage = {};
|
|
};
|
|
|
|
template <>
|
|
struct CppType<StructureType, StructureType::ePhysicalDeviceMemoryBudgetPropertiesEXT>
|
|
{
|
|
using Type = PhysicalDeviceMemoryBudgetPropertiesEXT;
|
|
};
|
|
|
|
struct PhysicalDeviceMemoryProperties
|
|
{
|
|
using NativeType = VkPhysicalDeviceMemoryProperties;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR_14
|
|
PhysicalDeviceMemoryProperties( uint32_t memoryTypeCount_ = {},
|
|
std::array<VULKAN_HPP_NAMESPACE::MemoryType, VK_MAX_MEMORY_TYPES> const & memoryTypes_ = {},
|
|
uint32_t memoryHeapCount_ = {},
|
|
std::array<VULKAN_HPP_NAMESPACE::MemoryHeap, VK_MAX_MEMORY_HEAPS> const & memoryHeaps_ = {} ) VULKAN_HPP_NOEXCEPT
|
|
: memoryTypeCount( memoryTypeCount_ )
|
|
, memoryTypes( memoryTypes_ )
|
|
, memoryHeapCount( memoryHeapCount_ )
|
|
, memoryHeaps( memoryHeaps_ )
|
|
{
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceMemoryProperties( PhysicalDeviceMemoryProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
PhysicalDeviceMemoryProperties( VkPhysicalDeviceMemoryProperties const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: PhysicalDeviceMemoryProperties( *reinterpret_cast<PhysicalDeviceMemoryProperties const *>( &rhs ) )
|
|
{
|
|
}
|
|
|
|
# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
|
|
PhysicalDeviceMemoryProperties( VULKAN_HPP_NAMESPACE::ArrayProxy<VULKAN_HPP_NAMESPACE::MemoryType> const & memoryTypes_,
|
|
VULKAN_HPP_NAMESPACE::ArrayProxy<VULKAN_HPP_NAMESPACE::MemoryHeap> const & memoryHeaps_ = {} )
|
|
: memoryTypeCount( std::min( static_cast<uint32_t>( memoryTypes_.size() ), VK_MAX_MEMORY_TYPES ) )
|
|
, memoryHeapCount( std::min( static_cast<uint32_t>( memoryHeaps_.size() ), VK_MAX_MEMORY_HEAPS ) )
|
|
{
|
|
VULKAN_HPP_ASSERT( memoryTypes_.size() < VK_MAX_MEMORY_TYPES );
|
|
memcpy( memoryTypes, memoryTypes_.data(), memoryTypeCount * sizeof( VULKAN_HPP_NAMESPACE::MemoryType ) );
|
|
VULKAN_HPP_ASSERT( memoryHeaps_.size() < VK_MAX_MEMORY_HEAPS );
|
|
memcpy( memoryHeaps, memoryHeaps_.data(), memoryHeapCount * sizeof( VULKAN_HPP_NAMESPACE::MemoryHeap ) );
|
|
}
|
|
# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
|
|
|
|
PhysicalDeviceMemoryProperties & operator=( PhysicalDeviceMemoryProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
PhysicalDeviceMemoryProperties & operator=( VkPhysicalDeviceMemoryProperties const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceMemoryProperties const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
operator VkPhysicalDeviceMemoryProperties const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkPhysicalDeviceMemoryProperties *>( this );
|
|
}
|
|
|
|
operator VkPhysicalDeviceMemoryProperties &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkPhysicalDeviceMemoryProperties *>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
# if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
# else
|
|
std::tuple<uint32_t const &,
|
|
VULKAN_HPP_NAMESPACE::ArrayWrapper1D<VULKAN_HPP_NAMESPACE::MemoryType, VK_MAX_MEMORY_TYPES> const &,
|
|
uint32_t const &,
|
|
VULKAN_HPP_NAMESPACE::ArrayWrapper1D<VULKAN_HPP_NAMESPACE::MemoryHeap, VK_MAX_MEMORY_HEAPS> const &>
|
|
# endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( memoryTypeCount, memoryTypes, memoryHeapCount, memoryHeaps );
|
|
}
|
|
#endif
|
|
|
|
#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
|
|
std::strong_ordering operator<=>( PhysicalDeviceMemoryProperties const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
if ( auto cmp = memoryTypeCount <=> rhs.memoryTypeCount; cmp != 0 )
|
|
return cmp;
|
|
for ( size_t i = 0; i < memoryTypeCount; ++i )
|
|
{
|
|
if ( auto cmp = memoryTypes[i] <=> rhs.memoryTypes[i]; cmp != 0 )
|
|
return cmp;
|
|
}
|
|
if ( auto cmp = memoryHeapCount <=> rhs.memoryHeapCount; cmp != 0 )
|
|
return cmp;
|
|
for ( size_t i = 0; i < memoryHeapCount; ++i )
|
|
{
|
|
if ( auto cmp = memoryHeaps[i] <=> rhs.memoryHeaps[i]; cmp != 0 )
|
|
return cmp;
|
|
}
|
|
|
|
return std::strong_ordering::equivalent;
|
|
}
|
|
#endif
|
|
|
|
bool operator==( PhysicalDeviceMemoryProperties const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return ( memoryTypeCount == rhs.memoryTypeCount ) &&
|
|
( memcmp( memoryTypes, rhs.memoryTypes, memoryTypeCount * sizeof( VULKAN_HPP_NAMESPACE::MemoryType ) ) == 0 ) &&
|
|
( memoryHeapCount == rhs.memoryHeapCount ) &&
|
|
( memcmp( memoryHeaps, rhs.memoryHeaps, memoryHeapCount * sizeof( VULKAN_HPP_NAMESPACE::MemoryHeap ) ) == 0 );
|
|
}
|
|
|
|
bool operator!=( PhysicalDeviceMemoryProperties const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
|
|
public:
|
|
uint32_t memoryTypeCount = {};
|
|
VULKAN_HPP_NAMESPACE::ArrayWrapper1D<VULKAN_HPP_NAMESPACE::MemoryType, VK_MAX_MEMORY_TYPES> memoryTypes = {};
|
|
uint32_t memoryHeapCount = {};
|
|
VULKAN_HPP_NAMESPACE::ArrayWrapper1D<VULKAN_HPP_NAMESPACE::MemoryHeap, VK_MAX_MEMORY_HEAPS> memoryHeaps = {};
|
|
};
|
|
|
|
struct PhysicalDeviceMemoryProperties2
|
|
{
|
|
using NativeType = VkPhysicalDeviceMemoryProperties2;
|
|
|
|
static const bool allowDuplicate = false;
|
|
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceMemoryProperties2;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceMemoryProperties2( VULKAN_HPP_NAMESPACE::PhysicalDeviceMemoryProperties memoryProperties_ = {},
|
|
void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
|
|
: pNext( pNext_ )
|
|
, memoryProperties( memoryProperties_ )
|
|
{
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceMemoryProperties2( PhysicalDeviceMemoryProperties2 const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
PhysicalDeviceMemoryProperties2( VkPhysicalDeviceMemoryProperties2 const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: PhysicalDeviceMemoryProperties2( *reinterpret_cast<PhysicalDeviceMemoryProperties2 const *>( &rhs ) )
|
|
{
|
|
}
|
|
|
|
PhysicalDeviceMemoryProperties2 & operator=( PhysicalDeviceMemoryProperties2 const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
PhysicalDeviceMemoryProperties2 & operator=( VkPhysicalDeviceMemoryProperties2 const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceMemoryProperties2 const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
operator VkPhysicalDeviceMemoryProperties2 const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkPhysicalDeviceMemoryProperties2 *>( this );
|
|
}
|
|
|
|
operator VkPhysicalDeviceMemoryProperties2 &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkPhysicalDeviceMemoryProperties2 *>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
# if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
# else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::PhysicalDeviceMemoryProperties const &>
|
|
# endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( sType, pNext, memoryProperties );
|
|
}
|
|
#endif
|
|
|
|
#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
|
|
auto operator<=>( PhysicalDeviceMemoryProperties2 const & ) const = default;
|
|
#else
|
|
bool operator==( PhysicalDeviceMemoryProperties2 const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
# if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
# else
|
|
return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( memoryProperties == rhs.memoryProperties );
|
|
# endif
|
|
}
|
|
|
|
bool operator!=( PhysicalDeviceMemoryProperties2 const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceMemoryProperties2;
|
|
void * pNext = {};
|
|
VULKAN_HPP_NAMESPACE::PhysicalDeviceMemoryProperties memoryProperties = {};
|
|
};
|
|
|
|
template <>
|
|
struct CppType<StructureType, StructureType::ePhysicalDeviceMemoryProperties2>
|
|
{
|
|
using Type = PhysicalDeviceMemoryProperties2;
|
|
};
|
|
|
|
using PhysicalDeviceMemoryProperties2KHR = PhysicalDeviceMemoryProperties2;
|
|
|
|
struct PhysicalDeviceMultiviewFeatures
|
|
{
|
|
using NativeType = VkPhysicalDeviceMultiviewFeatures;
|
|
|
|
static const bool allowDuplicate = false;
|
|
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceMultiviewFeatures;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR PhysicalDeviceMultiviewFeatures( VULKAN_HPP_NAMESPACE::Bool32 multiview_ = {},
|
|
VULKAN_HPP_NAMESPACE::Bool32 multiviewGeometryShader_ = {},
|
|
VULKAN_HPP_NAMESPACE::Bool32 multiviewTessellationShader_ = {},
|
|
void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
|
|
: pNext( pNext_ )
|
|
, multiview( multiview_ )
|
|
, multiviewGeometryShader( multiviewGeometryShader_ )
|
|
, multiviewTessellationShader( multiviewTessellationShader_ )
|
|
{
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR PhysicalDeviceMultiviewFeatures( PhysicalDeviceMultiviewFeatures const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
PhysicalDeviceMultiviewFeatures( VkPhysicalDeviceMultiviewFeatures const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: PhysicalDeviceMultiviewFeatures( *reinterpret_cast<PhysicalDeviceMultiviewFeatures const *>( &rhs ) )
|
|
{
|
|
}
|
|
|
|
PhysicalDeviceMultiviewFeatures & operator=( PhysicalDeviceMultiviewFeatures const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
PhysicalDeviceMultiviewFeatures & operator=( VkPhysicalDeviceMultiviewFeatures const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceMultiviewFeatures const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
|
|
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceMultiviewFeatures & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pNext = pNext_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceMultiviewFeatures & setMultiview( VULKAN_HPP_NAMESPACE::Bool32 multiview_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
multiview = multiview_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceMultiviewFeatures &
|
|
setMultiviewGeometryShader( VULKAN_HPP_NAMESPACE::Bool32 multiviewGeometryShader_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
multiviewGeometryShader = multiviewGeometryShader_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceMultiviewFeatures &
|
|
setMultiviewTessellationShader( VULKAN_HPP_NAMESPACE::Bool32 multiviewTessellationShader_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
multiviewTessellationShader = multiviewTessellationShader_;
|
|
return *this;
|
|
}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
|
|
|
|
operator VkPhysicalDeviceMultiviewFeatures const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkPhysicalDeviceMultiviewFeatures *>( this );
|
|
}
|
|
|
|
operator VkPhysicalDeviceMultiviewFeatures &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkPhysicalDeviceMultiviewFeatures *>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
# if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
# else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &,
|
|
void * const &,
|
|
VULKAN_HPP_NAMESPACE::Bool32 const &,
|
|
VULKAN_HPP_NAMESPACE::Bool32 const &,
|
|
VULKAN_HPP_NAMESPACE::Bool32 const &>
|
|
# endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( sType, pNext, multiview, multiviewGeometryShader, multiviewTessellationShader );
|
|
}
|
|
#endif
|
|
|
|
#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
|
|
auto operator<=>( PhysicalDeviceMultiviewFeatures const & ) const = default;
|
|
#else
|
|
bool operator==( PhysicalDeviceMultiviewFeatures const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
# if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
# else
|
|
return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( multiview == rhs.multiview ) && ( multiviewGeometryShader == rhs.multiviewGeometryShader ) &&
|
|
( multiviewTessellationShader == rhs.multiviewTessellationShader );
|
|
# endif
|
|
}
|
|
|
|
bool operator!=( PhysicalDeviceMultiviewFeatures const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceMultiviewFeatures;
|
|
void * pNext = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 multiview = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 multiviewGeometryShader = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 multiviewTessellationShader = {};
|
|
};
|
|
|
|
template <>
|
|
struct CppType<StructureType, StructureType::ePhysicalDeviceMultiviewFeatures>
|
|
{
|
|
using Type = PhysicalDeviceMultiviewFeatures;
|
|
};
|
|
|
|
using PhysicalDeviceMultiviewFeaturesKHR = PhysicalDeviceMultiviewFeatures;
|
|
|
|
struct PhysicalDeviceMultiviewProperties
|
|
{
|
|
using NativeType = VkPhysicalDeviceMultiviewProperties;
|
|
|
|
static const bool allowDuplicate = false;
|
|
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceMultiviewProperties;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR PhysicalDeviceMultiviewProperties( uint32_t maxMultiviewViewCount_ = {},
|
|
uint32_t maxMultiviewInstanceIndex_ = {},
|
|
void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
|
|
: pNext( pNext_ )
|
|
, maxMultiviewViewCount( maxMultiviewViewCount_ )
|
|
, maxMultiviewInstanceIndex( maxMultiviewInstanceIndex_ )
|
|
{
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR PhysicalDeviceMultiviewProperties( PhysicalDeviceMultiviewProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
PhysicalDeviceMultiviewProperties( VkPhysicalDeviceMultiviewProperties const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: PhysicalDeviceMultiviewProperties( *reinterpret_cast<PhysicalDeviceMultiviewProperties const *>( &rhs ) )
|
|
{
|
|
}
|
|
|
|
PhysicalDeviceMultiviewProperties & operator=( PhysicalDeviceMultiviewProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
PhysicalDeviceMultiviewProperties & operator=( VkPhysicalDeviceMultiviewProperties const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceMultiviewProperties const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
operator VkPhysicalDeviceMultiviewProperties const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkPhysicalDeviceMultiviewProperties *>( this );
|
|
}
|
|
|
|
operator VkPhysicalDeviceMultiviewProperties &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkPhysicalDeviceMultiviewProperties *>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
# if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
# else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, uint32_t const &, uint32_t const &>
|
|
# endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( sType, pNext, maxMultiviewViewCount, maxMultiviewInstanceIndex );
|
|
}
|
|
#endif
|
|
|
|
#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
|
|
auto operator<=>( PhysicalDeviceMultiviewProperties const & ) const = default;
|
|
#else
|
|
bool operator==( PhysicalDeviceMultiviewProperties const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
# if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
# else
|
|
return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( maxMultiviewViewCount == rhs.maxMultiviewViewCount ) &&
|
|
( maxMultiviewInstanceIndex == rhs.maxMultiviewInstanceIndex );
|
|
# endif
|
|
}
|
|
|
|
bool operator!=( PhysicalDeviceMultiviewProperties const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceMultiviewProperties;
|
|
void * pNext = {};
|
|
uint32_t maxMultiviewViewCount = {};
|
|
uint32_t maxMultiviewInstanceIndex = {};
|
|
};
|
|
|
|
template <>
|
|
struct CppType<StructureType, StructureType::ePhysicalDeviceMultiviewProperties>
|
|
{
|
|
using Type = PhysicalDeviceMultiviewProperties;
|
|
};
|
|
|
|
using PhysicalDeviceMultiviewPropertiesKHR = PhysicalDeviceMultiviewProperties;
|
|
|
|
struct PhysicalDevicePCIBusInfoPropertiesEXT
|
|
{
|
|
using NativeType = VkPhysicalDevicePCIBusInfoPropertiesEXT;
|
|
|
|
static const bool allowDuplicate = false;
|
|
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDevicePciBusInfoPropertiesEXT;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR PhysicalDevicePCIBusInfoPropertiesEXT(
|
|
uint32_t pciDomain_ = {}, uint32_t pciBus_ = {}, uint32_t pciDevice_ = {}, uint32_t pciFunction_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
|
|
: pNext( pNext_ )
|
|
, pciDomain( pciDomain_ )
|
|
, pciBus( pciBus_ )
|
|
, pciDevice( pciDevice_ )
|
|
, pciFunction( pciFunction_ )
|
|
{
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR PhysicalDevicePCIBusInfoPropertiesEXT( PhysicalDevicePCIBusInfoPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
PhysicalDevicePCIBusInfoPropertiesEXT( VkPhysicalDevicePCIBusInfoPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: PhysicalDevicePCIBusInfoPropertiesEXT( *reinterpret_cast<PhysicalDevicePCIBusInfoPropertiesEXT const *>( &rhs ) )
|
|
{
|
|
}
|
|
|
|
PhysicalDevicePCIBusInfoPropertiesEXT & operator=( PhysicalDevicePCIBusInfoPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
PhysicalDevicePCIBusInfoPropertiesEXT & operator=( VkPhysicalDevicePCIBusInfoPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDevicePCIBusInfoPropertiesEXT const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
operator VkPhysicalDevicePCIBusInfoPropertiesEXT const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkPhysicalDevicePCIBusInfoPropertiesEXT *>( this );
|
|
}
|
|
|
|
operator VkPhysicalDevicePCIBusInfoPropertiesEXT &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkPhysicalDevicePCIBusInfoPropertiesEXT *>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
# if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
# else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, uint32_t const &, uint32_t const &, uint32_t const &, uint32_t const &>
|
|
# endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( sType, pNext, pciDomain, pciBus, pciDevice, pciFunction );
|
|
}
|
|
#endif
|
|
|
|
#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
|
|
auto operator<=>( PhysicalDevicePCIBusInfoPropertiesEXT const & ) const = default;
|
|
#else
|
|
bool operator==( PhysicalDevicePCIBusInfoPropertiesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
# if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
# else
|
|
return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( pciDomain == rhs.pciDomain ) && ( pciBus == rhs.pciBus ) && ( pciDevice == rhs.pciDevice ) &&
|
|
( pciFunction == rhs.pciFunction );
|
|
# endif
|
|
}
|
|
|
|
bool operator!=( PhysicalDevicePCIBusInfoPropertiesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDevicePciBusInfoPropertiesEXT;
|
|
void * pNext = {};
|
|
uint32_t pciDomain = {};
|
|
uint32_t pciBus = {};
|
|
uint32_t pciDevice = {};
|
|
uint32_t pciFunction = {};
|
|
};
|
|
|
|
template <>
|
|
struct CppType<StructureType, StructureType::ePhysicalDevicePciBusInfoPropertiesEXT>
|
|
{
|
|
using Type = PhysicalDevicePCIBusInfoPropertiesEXT;
|
|
};
|
|
|
|
struct PhysicalDevicePerformanceQueryFeaturesKHR
|
|
{
|
|
using NativeType = VkPhysicalDevicePerformanceQueryFeaturesKHR;
|
|
|
|
static const bool allowDuplicate = false;
|
|
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDevicePerformanceQueryFeaturesKHR;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR PhysicalDevicePerformanceQueryFeaturesKHR( VULKAN_HPP_NAMESPACE::Bool32 performanceCounterQueryPools_ = {},
|
|
VULKAN_HPP_NAMESPACE::Bool32 performanceCounterMultipleQueryPools_ = {},
|
|
void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
|
|
: pNext( pNext_ )
|
|
, performanceCounterQueryPools( performanceCounterQueryPools_ )
|
|
, performanceCounterMultipleQueryPools( performanceCounterMultipleQueryPools_ )
|
|
{
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR PhysicalDevicePerformanceQueryFeaturesKHR( PhysicalDevicePerformanceQueryFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
PhysicalDevicePerformanceQueryFeaturesKHR( VkPhysicalDevicePerformanceQueryFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: PhysicalDevicePerformanceQueryFeaturesKHR( *reinterpret_cast<PhysicalDevicePerformanceQueryFeaturesKHR const *>( &rhs ) )
|
|
{
|
|
}
|
|
|
|
PhysicalDevicePerformanceQueryFeaturesKHR & operator=( PhysicalDevicePerformanceQueryFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
PhysicalDevicePerformanceQueryFeaturesKHR & operator=( VkPhysicalDevicePerformanceQueryFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDevicePerformanceQueryFeaturesKHR const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
|
|
VULKAN_HPP_CONSTEXPR_14 PhysicalDevicePerformanceQueryFeaturesKHR & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pNext = pNext_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PhysicalDevicePerformanceQueryFeaturesKHR &
|
|
setPerformanceCounterQueryPools( VULKAN_HPP_NAMESPACE::Bool32 performanceCounterQueryPools_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
performanceCounterQueryPools = performanceCounterQueryPools_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PhysicalDevicePerformanceQueryFeaturesKHR &
|
|
setPerformanceCounterMultipleQueryPools( VULKAN_HPP_NAMESPACE::Bool32 performanceCounterMultipleQueryPools_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
performanceCounterMultipleQueryPools = performanceCounterMultipleQueryPools_;
|
|
return *this;
|
|
}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
|
|
|
|
operator VkPhysicalDevicePerformanceQueryFeaturesKHR const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkPhysicalDevicePerformanceQueryFeaturesKHR *>( this );
|
|
}
|
|
|
|
operator VkPhysicalDevicePerformanceQueryFeaturesKHR &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkPhysicalDevicePerformanceQueryFeaturesKHR *>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
# if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
# else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &>
|
|
# endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( sType, pNext, performanceCounterQueryPools, performanceCounterMultipleQueryPools );
|
|
}
|
|
#endif
|
|
|
|
#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
|
|
auto operator<=>( PhysicalDevicePerformanceQueryFeaturesKHR const & ) const = default;
|
|
#else
|
|
bool operator==( PhysicalDevicePerformanceQueryFeaturesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
# if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
# else
|
|
return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( performanceCounterQueryPools == rhs.performanceCounterQueryPools ) &&
|
|
( performanceCounterMultipleQueryPools == rhs.performanceCounterMultipleQueryPools );
|
|
# endif
|
|
}
|
|
|
|
bool operator!=( PhysicalDevicePerformanceQueryFeaturesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDevicePerformanceQueryFeaturesKHR;
|
|
void * pNext = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 performanceCounterQueryPools = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 performanceCounterMultipleQueryPools = {};
|
|
};
|
|
|
|
template <>
|
|
struct CppType<StructureType, StructureType::ePhysicalDevicePerformanceQueryFeaturesKHR>
|
|
{
|
|
using Type = PhysicalDevicePerformanceQueryFeaturesKHR;
|
|
};
|
|
|
|
struct PhysicalDevicePerformanceQueryPropertiesKHR
|
|
{
|
|
using NativeType = VkPhysicalDevicePerformanceQueryPropertiesKHR;
|
|
|
|
static const bool allowDuplicate = false;
|
|
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDevicePerformanceQueryPropertiesKHR;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR PhysicalDevicePerformanceQueryPropertiesKHR( VULKAN_HPP_NAMESPACE::Bool32 allowCommandBufferQueryCopies_ = {},
|
|
void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
|
|
: pNext( pNext_ )
|
|
, allowCommandBufferQueryCopies( allowCommandBufferQueryCopies_ )
|
|
{
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR PhysicalDevicePerformanceQueryPropertiesKHR( PhysicalDevicePerformanceQueryPropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
PhysicalDevicePerformanceQueryPropertiesKHR( VkPhysicalDevicePerformanceQueryPropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: PhysicalDevicePerformanceQueryPropertiesKHR( *reinterpret_cast<PhysicalDevicePerformanceQueryPropertiesKHR const *>( &rhs ) )
|
|
{
|
|
}
|
|
|
|
PhysicalDevicePerformanceQueryPropertiesKHR & operator=( PhysicalDevicePerformanceQueryPropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
PhysicalDevicePerformanceQueryPropertiesKHR & operator=( VkPhysicalDevicePerformanceQueryPropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDevicePerformanceQueryPropertiesKHR const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
operator VkPhysicalDevicePerformanceQueryPropertiesKHR const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkPhysicalDevicePerformanceQueryPropertiesKHR *>( this );
|
|
}
|
|
|
|
operator VkPhysicalDevicePerformanceQueryPropertiesKHR &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkPhysicalDevicePerformanceQueryPropertiesKHR *>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
# if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
# else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::Bool32 const &>
|
|
# endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( sType, pNext, allowCommandBufferQueryCopies );
|
|
}
|
|
#endif
|
|
|
|
#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
|
|
auto operator<=>( PhysicalDevicePerformanceQueryPropertiesKHR const & ) const = default;
|
|
#else
|
|
bool operator==( PhysicalDevicePerformanceQueryPropertiesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
# if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
# else
|
|
return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( allowCommandBufferQueryCopies == rhs.allowCommandBufferQueryCopies );
|
|
# endif
|
|
}
|
|
|
|
bool operator!=( PhysicalDevicePerformanceQueryPropertiesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDevicePerformanceQueryPropertiesKHR;
|
|
void * pNext = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 allowCommandBufferQueryCopies = {};
|
|
};
|
|
|
|
template <>
|
|
struct CppType<StructureType, StructureType::ePhysicalDevicePerformanceQueryPropertiesKHR>
|
|
{
|
|
using Type = PhysicalDevicePerformanceQueryPropertiesKHR;
|
|
};
|
|
|
|
struct PhysicalDevicePipelineCreationCacheControlFeatures
|
|
{
|
|
using NativeType = VkPhysicalDevicePipelineCreationCacheControlFeatures;
|
|
|
|
static const bool allowDuplicate = false;
|
|
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDevicePipelineCreationCacheControlFeatures;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR PhysicalDevicePipelineCreationCacheControlFeatures( VULKAN_HPP_NAMESPACE::Bool32 pipelineCreationCacheControl_ = {},
|
|
void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
|
|
: pNext( pNext_ )
|
|
, pipelineCreationCacheControl( pipelineCreationCacheControl_ )
|
|
{
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR
|
|
PhysicalDevicePipelineCreationCacheControlFeatures( PhysicalDevicePipelineCreationCacheControlFeatures const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
PhysicalDevicePipelineCreationCacheControlFeatures( VkPhysicalDevicePipelineCreationCacheControlFeatures const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: PhysicalDevicePipelineCreationCacheControlFeatures( *reinterpret_cast<PhysicalDevicePipelineCreationCacheControlFeatures const *>( &rhs ) )
|
|
{
|
|
}
|
|
|
|
PhysicalDevicePipelineCreationCacheControlFeatures &
|
|
operator=( PhysicalDevicePipelineCreationCacheControlFeatures const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
PhysicalDevicePipelineCreationCacheControlFeatures & operator=( VkPhysicalDevicePipelineCreationCacheControlFeatures const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDevicePipelineCreationCacheControlFeatures const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
|
|
VULKAN_HPP_CONSTEXPR_14 PhysicalDevicePipelineCreationCacheControlFeatures & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pNext = pNext_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PhysicalDevicePipelineCreationCacheControlFeatures &
|
|
setPipelineCreationCacheControl( VULKAN_HPP_NAMESPACE::Bool32 pipelineCreationCacheControl_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pipelineCreationCacheControl = pipelineCreationCacheControl_;
|
|
return *this;
|
|
}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
|
|
|
|
operator VkPhysicalDevicePipelineCreationCacheControlFeatures const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkPhysicalDevicePipelineCreationCacheControlFeatures *>( this );
|
|
}
|
|
|
|
operator VkPhysicalDevicePipelineCreationCacheControlFeatures &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkPhysicalDevicePipelineCreationCacheControlFeatures *>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
# if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
# else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::Bool32 const &>
|
|
# endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( sType, pNext, pipelineCreationCacheControl );
|
|
}
|
|
#endif
|
|
|
|
#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
|
|
auto operator<=>( PhysicalDevicePipelineCreationCacheControlFeatures const & ) const = default;
|
|
#else
|
|
bool operator==( PhysicalDevicePipelineCreationCacheControlFeatures const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
# if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
# else
|
|
return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( pipelineCreationCacheControl == rhs.pipelineCreationCacheControl );
|
|
# endif
|
|
}
|
|
|
|
bool operator!=( PhysicalDevicePipelineCreationCacheControlFeatures const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDevicePipelineCreationCacheControlFeatures;
|
|
void * pNext = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 pipelineCreationCacheControl = {};
|
|
};
|
|
|
|
template <>
|
|
struct CppType<StructureType, StructureType::ePhysicalDevicePipelineCreationCacheControlFeatures>
|
|
{
|
|
using Type = PhysicalDevicePipelineCreationCacheControlFeatures;
|
|
};
|
|
|
|
using PhysicalDevicePipelineCreationCacheControlFeaturesEXT = PhysicalDevicePipelineCreationCacheControlFeatures;
|
|
|
|
struct PhysicalDevicePointClippingProperties
|
|
{
|
|
using NativeType = VkPhysicalDevicePointClippingProperties;
|
|
|
|
static const bool allowDuplicate = false;
|
|
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDevicePointClippingProperties;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR PhysicalDevicePointClippingProperties(
|
|
VULKAN_HPP_NAMESPACE::PointClippingBehavior pointClippingBehavior_ = VULKAN_HPP_NAMESPACE::PointClippingBehavior::eAllClipPlanes,
|
|
void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
|
|
: pNext( pNext_ )
|
|
, pointClippingBehavior( pointClippingBehavior_ )
|
|
{
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR PhysicalDevicePointClippingProperties( PhysicalDevicePointClippingProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
PhysicalDevicePointClippingProperties( VkPhysicalDevicePointClippingProperties const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: PhysicalDevicePointClippingProperties( *reinterpret_cast<PhysicalDevicePointClippingProperties const *>( &rhs ) )
|
|
{
|
|
}
|
|
|
|
PhysicalDevicePointClippingProperties & operator=( PhysicalDevicePointClippingProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
PhysicalDevicePointClippingProperties & operator=( VkPhysicalDevicePointClippingProperties const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDevicePointClippingProperties const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
operator VkPhysicalDevicePointClippingProperties const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkPhysicalDevicePointClippingProperties *>( this );
|
|
}
|
|
|
|
operator VkPhysicalDevicePointClippingProperties &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkPhysicalDevicePointClippingProperties *>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
# if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
# else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::PointClippingBehavior const &>
|
|
# endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( sType, pNext, pointClippingBehavior );
|
|
}
|
|
#endif
|
|
|
|
#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
|
|
auto operator<=>( PhysicalDevicePointClippingProperties const & ) const = default;
|
|
#else
|
|
bool operator==( PhysicalDevicePointClippingProperties const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
# if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
# else
|
|
return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( pointClippingBehavior == rhs.pointClippingBehavior );
|
|
# endif
|
|
}
|
|
|
|
bool operator!=( PhysicalDevicePointClippingProperties const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDevicePointClippingProperties;
|
|
void * pNext = {};
|
|
VULKAN_HPP_NAMESPACE::PointClippingBehavior pointClippingBehavior = VULKAN_HPP_NAMESPACE::PointClippingBehavior::eAllClipPlanes;
|
|
};
|
|
|
|
template <>
|
|
struct CppType<StructureType, StructureType::ePhysicalDevicePointClippingProperties>
|
|
{
|
|
using Type = PhysicalDevicePointClippingProperties;
|
|
};
|
|
|
|
using PhysicalDevicePointClippingPropertiesKHR = PhysicalDevicePointClippingProperties;
|
|
|
|
struct PhysicalDevicePrivateDataFeatures
|
|
{
|
|
using NativeType = VkPhysicalDevicePrivateDataFeatures;
|
|
|
|
static const bool allowDuplicate = false;
|
|
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDevicePrivateDataFeatures;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR PhysicalDevicePrivateDataFeatures( VULKAN_HPP_NAMESPACE::Bool32 privateData_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
|
|
: pNext( pNext_ )
|
|
, privateData( privateData_ )
|
|
{
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR PhysicalDevicePrivateDataFeatures( PhysicalDevicePrivateDataFeatures const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
PhysicalDevicePrivateDataFeatures( VkPhysicalDevicePrivateDataFeatures const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: PhysicalDevicePrivateDataFeatures( *reinterpret_cast<PhysicalDevicePrivateDataFeatures const *>( &rhs ) )
|
|
{
|
|
}
|
|
|
|
PhysicalDevicePrivateDataFeatures & operator=( PhysicalDevicePrivateDataFeatures const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
PhysicalDevicePrivateDataFeatures & operator=( VkPhysicalDevicePrivateDataFeatures const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDevicePrivateDataFeatures const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
|
|
VULKAN_HPP_CONSTEXPR_14 PhysicalDevicePrivateDataFeatures & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pNext = pNext_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PhysicalDevicePrivateDataFeatures & setPrivateData( VULKAN_HPP_NAMESPACE::Bool32 privateData_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
privateData = privateData_;
|
|
return *this;
|
|
}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
|
|
|
|
operator VkPhysicalDevicePrivateDataFeatures const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkPhysicalDevicePrivateDataFeatures *>( this );
|
|
}
|
|
|
|
operator VkPhysicalDevicePrivateDataFeatures &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkPhysicalDevicePrivateDataFeatures *>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
# if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
# else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::Bool32 const &>
|
|
# endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( sType, pNext, privateData );
|
|
}
|
|
#endif
|
|
|
|
#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
|
|
auto operator<=>( PhysicalDevicePrivateDataFeatures const & ) const = default;
|
|
#else
|
|
bool operator==( PhysicalDevicePrivateDataFeatures const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
# if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
# else
|
|
return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( privateData == rhs.privateData );
|
|
# endif
|
|
}
|
|
|
|
bool operator!=( PhysicalDevicePrivateDataFeatures const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDevicePrivateDataFeatures;
|
|
void * pNext = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 privateData = {};
|
|
};
|
|
|
|
template <>
|
|
struct CppType<StructureType, StructureType::ePhysicalDevicePrivateDataFeatures>
|
|
{
|
|
using Type = PhysicalDevicePrivateDataFeatures;
|
|
};
|
|
|
|
using PhysicalDevicePrivateDataFeaturesEXT = PhysicalDevicePrivateDataFeatures;
|
|
|
|
struct PhysicalDeviceSparseProperties
|
|
{
|
|
using NativeType = VkPhysicalDeviceSparseProperties;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR PhysicalDeviceSparseProperties( VULKAN_HPP_NAMESPACE::Bool32 residencyStandard2DBlockShape_ = {},
|
|
VULKAN_HPP_NAMESPACE::Bool32 residencyStandard2DMultisampleBlockShape_ = {},
|
|
VULKAN_HPP_NAMESPACE::Bool32 residencyStandard3DBlockShape_ = {},
|
|
VULKAN_HPP_NAMESPACE::Bool32 residencyAlignedMipSize_ = {},
|
|
VULKAN_HPP_NAMESPACE::Bool32 residencyNonResidentStrict_ = {} ) VULKAN_HPP_NOEXCEPT
|
|
: residencyStandard2DBlockShape( residencyStandard2DBlockShape_ )
|
|
, residencyStandard2DMultisampleBlockShape( residencyStandard2DMultisampleBlockShape_ )
|
|
, residencyStandard3DBlockShape( residencyStandard3DBlockShape_ )
|
|
, residencyAlignedMipSize( residencyAlignedMipSize_ )
|
|
, residencyNonResidentStrict( residencyNonResidentStrict_ )
|
|
{
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR PhysicalDeviceSparseProperties( PhysicalDeviceSparseProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
PhysicalDeviceSparseProperties( VkPhysicalDeviceSparseProperties const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: PhysicalDeviceSparseProperties( *reinterpret_cast<PhysicalDeviceSparseProperties const *>( &rhs ) )
|
|
{
|
|
}
|
|
|
|
PhysicalDeviceSparseProperties & operator=( PhysicalDeviceSparseProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
PhysicalDeviceSparseProperties & operator=( VkPhysicalDeviceSparseProperties const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceSparseProperties const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
operator VkPhysicalDeviceSparseProperties const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkPhysicalDeviceSparseProperties *>( this );
|
|
}
|
|
|
|
operator VkPhysicalDeviceSparseProperties &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkPhysicalDeviceSparseProperties *>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
# if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
# else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::Bool32 const &,
|
|
VULKAN_HPP_NAMESPACE::Bool32 const &,
|
|
VULKAN_HPP_NAMESPACE::Bool32 const &,
|
|
VULKAN_HPP_NAMESPACE::Bool32 const &,
|
|
VULKAN_HPP_NAMESPACE::Bool32 const &>
|
|
# endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( residencyStandard2DBlockShape,
|
|
residencyStandard2DMultisampleBlockShape,
|
|
residencyStandard3DBlockShape,
|
|
residencyAlignedMipSize,
|
|
residencyNonResidentStrict );
|
|
}
|
|
#endif
|
|
|
|
#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
|
|
auto operator<=>( PhysicalDeviceSparseProperties const & ) const = default;
|
|
#else
|
|
bool operator==( PhysicalDeviceSparseProperties const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
# if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
# else
|
|
return ( residencyStandard2DBlockShape == rhs.residencyStandard2DBlockShape ) &&
|
|
( residencyStandard2DMultisampleBlockShape == rhs.residencyStandard2DMultisampleBlockShape ) &&
|
|
( residencyStandard3DBlockShape == rhs.residencyStandard3DBlockShape ) && ( residencyAlignedMipSize == rhs.residencyAlignedMipSize ) &&
|
|
( residencyNonResidentStrict == rhs.residencyNonResidentStrict );
|
|
# endif
|
|
}
|
|
|
|
bool operator!=( PhysicalDeviceSparseProperties const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::Bool32 residencyStandard2DBlockShape = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 residencyStandard2DMultisampleBlockShape = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 residencyStandard3DBlockShape = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 residencyAlignedMipSize = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 residencyNonResidentStrict = {};
|
|
};
|
|
|
|
struct PhysicalDeviceProperties
|
|
{
|
|
using NativeType = VkPhysicalDeviceProperties;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceProperties( uint32_t apiVersion_ = {},
|
|
uint32_t driverVersion_ = {},
|
|
uint32_t vendorID_ = {},
|
|
uint32_t deviceID_ = {},
|
|
VULKAN_HPP_NAMESPACE::PhysicalDeviceType deviceType_ = VULKAN_HPP_NAMESPACE::PhysicalDeviceType::eOther,
|
|
std::array<char, VK_MAX_PHYSICAL_DEVICE_NAME_SIZE> const & deviceName_ = {},
|
|
std::array<uint8_t, VK_UUID_SIZE> const & pipelineCacheUUID_ = {},
|
|
VULKAN_HPP_NAMESPACE::PhysicalDeviceLimits limits_ = {},
|
|
VULKAN_HPP_NAMESPACE::PhysicalDeviceSparseProperties sparseProperties_ = {} ) VULKAN_HPP_NOEXCEPT
|
|
: apiVersion( apiVersion_ )
|
|
, driverVersion( driverVersion_ )
|
|
, vendorID( vendorID_ )
|
|
, deviceID( deviceID_ )
|
|
, deviceType( deviceType_ )
|
|
, deviceName( deviceName_ )
|
|
, pipelineCacheUUID( pipelineCacheUUID_ )
|
|
, limits( limits_ )
|
|
, sparseProperties( sparseProperties_ )
|
|
{
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceProperties( PhysicalDeviceProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
PhysicalDeviceProperties( VkPhysicalDeviceProperties const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: PhysicalDeviceProperties( *reinterpret_cast<PhysicalDeviceProperties const *>( &rhs ) )
|
|
{
|
|
}
|
|
|
|
# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
|
|
PhysicalDeviceProperties( uint32_t apiVersion_,
|
|
uint32_t driverVersion_,
|
|
uint32_t vendorID_,
|
|
uint32_t deviceID_,
|
|
VULKAN_HPP_NAMESPACE::PhysicalDeviceType deviceType_,
|
|
std::string const & deviceName_,
|
|
std::array<uint8_t, VK_UUID_SIZE> const & pipelineCacheUUID_ = {},
|
|
VULKAN_HPP_NAMESPACE::PhysicalDeviceLimits limits_ = {},
|
|
VULKAN_HPP_NAMESPACE::PhysicalDeviceSparseProperties sparseProperties_ = {} )
|
|
: apiVersion( apiVersion_ )
|
|
, driverVersion( driverVersion_ )
|
|
, vendorID( vendorID_ )
|
|
, deviceID( deviceID_ )
|
|
, deviceType( deviceType_ )
|
|
, pipelineCacheUUID( pipelineCacheUUID_ )
|
|
, limits( limits_ )
|
|
, sparseProperties( sparseProperties_ )
|
|
{
|
|
VULKAN_HPP_ASSERT( deviceName_.size() < VK_MAX_PHYSICAL_DEVICE_NAME_SIZE );
|
|
# if defined( WIN32 )
|
|
strncpy_s( deviceName, VK_MAX_PHYSICAL_DEVICE_NAME_SIZE, deviceName_.data(), deviceName_.size() );
|
|
# else
|
|
strncpy( deviceName, deviceName_.data(), std::min<size_t>( VK_MAX_PHYSICAL_DEVICE_NAME_SIZE, deviceName_.size() ) );
|
|
# endif
|
|
}
|
|
# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
|
|
|
|
PhysicalDeviceProperties & operator=( PhysicalDeviceProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
PhysicalDeviceProperties & operator=( VkPhysicalDeviceProperties const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceProperties const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
operator VkPhysicalDeviceProperties const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkPhysicalDeviceProperties *>( this );
|
|
}
|
|
|
|
operator VkPhysicalDeviceProperties &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkPhysicalDeviceProperties *>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
# if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
# else
|
|
std::tuple<uint32_t const &,
|
|
uint32_t const &,
|
|
uint32_t const &,
|
|
uint32_t const &,
|
|
VULKAN_HPP_NAMESPACE::PhysicalDeviceType const &,
|
|
VULKAN_HPP_NAMESPACE::ArrayWrapper1D<char, VK_MAX_PHYSICAL_DEVICE_NAME_SIZE> const &,
|
|
VULKAN_HPP_NAMESPACE::ArrayWrapper1D<uint8_t, VK_UUID_SIZE> const &,
|
|
VULKAN_HPP_NAMESPACE::PhysicalDeviceLimits const &,
|
|
VULKAN_HPP_NAMESPACE::PhysicalDeviceSparseProperties const &>
|
|
# endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( apiVersion, driverVersion, vendorID, deviceID, deviceType, deviceName, pipelineCacheUUID, limits, sparseProperties );
|
|
}
|
|
#endif
|
|
|
|
#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
|
|
std::partial_ordering operator<=>( PhysicalDeviceProperties const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
if ( auto cmp = apiVersion <=> rhs.apiVersion; cmp != 0 )
|
|
return cmp;
|
|
if ( auto cmp = driverVersion <=> rhs.driverVersion; cmp != 0 )
|
|
return cmp;
|
|
if ( auto cmp = vendorID <=> rhs.vendorID; cmp != 0 )
|
|
return cmp;
|
|
if ( auto cmp = deviceID <=> rhs.deviceID; cmp != 0 )
|
|
return cmp;
|
|
if ( auto cmp = deviceType <=> rhs.deviceType; cmp != 0 )
|
|
return cmp;
|
|
if ( auto cmp = strcmp( deviceName, rhs.deviceName ); cmp != 0 )
|
|
return ( cmp < 0 ) ? std::partial_ordering::less : std::partial_ordering::greater;
|
|
if ( auto cmp = pipelineCacheUUID <=> rhs.pipelineCacheUUID; cmp != 0 )
|
|
return cmp;
|
|
if ( auto cmp = limits <=> rhs.limits; cmp != 0 )
|
|
return cmp;
|
|
if ( auto cmp = sparseProperties <=> rhs.sparseProperties; cmp != 0 )
|
|
return cmp;
|
|
|
|
return std::partial_ordering::equivalent;
|
|
}
|
|
#endif
|
|
|
|
bool operator==( PhysicalDeviceProperties const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return ( apiVersion == rhs.apiVersion ) && ( driverVersion == rhs.driverVersion ) && ( vendorID == rhs.vendorID ) && ( deviceID == rhs.deviceID ) &&
|
|
( deviceType == rhs.deviceType ) && ( strcmp( deviceName, rhs.deviceName ) == 0 ) && ( pipelineCacheUUID == rhs.pipelineCacheUUID ) &&
|
|
( limits == rhs.limits ) && ( sparseProperties == rhs.sparseProperties );
|
|
}
|
|
|
|
bool operator!=( PhysicalDeviceProperties const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
|
|
public:
|
|
uint32_t apiVersion = {};
|
|
uint32_t driverVersion = {};
|
|
uint32_t vendorID = {};
|
|
uint32_t deviceID = {};
|
|
VULKAN_HPP_NAMESPACE::PhysicalDeviceType deviceType = VULKAN_HPP_NAMESPACE::PhysicalDeviceType::eOther;
|
|
VULKAN_HPP_NAMESPACE::ArrayWrapper1D<char, VK_MAX_PHYSICAL_DEVICE_NAME_SIZE> deviceName = {};
|
|
VULKAN_HPP_NAMESPACE::ArrayWrapper1D<uint8_t, VK_UUID_SIZE> pipelineCacheUUID = {};
|
|
VULKAN_HPP_NAMESPACE::PhysicalDeviceLimits limits = {};
|
|
VULKAN_HPP_NAMESPACE::PhysicalDeviceSparseProperties sparseProperties = {};
|
|
};
|
|
|
|
struct PhysicalDeviceProperties2
|
|
{
|
|
using NativeType = VkPhysicalDeviceProperties2;
|
|
|
|
static const bool allowDuplicate = false;
|
|
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceProperties2;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceProperties2( VULKAN_HPP_NAMESPACE::PhysicalDeviceProperties properties_ = {},
|
|
void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
|
|
: pNext( pNext_ )
|
|
, properties( properties_ )
|
|
{
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceProperties2( PhysicalDeviceProperties2 const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
PhysicalDeviceProperties2( VkPhysicalDeviceProperties2 const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: PhysicalDeviceProperties2( *reinterpret_cast<PhysicalDeviceProperties2 const *>( &rhs ) )
|
|
{
|
|
}
|
|
|
|
PhysicalDeviceProperties2 & operator=( PhysicalDeviceProperties2 const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
PhysicalDeviceProperties2 & operator=( VkPhysicalDeviceProperties2 const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceProperties2 const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
operator VkPhysicalDeviceProperties2 const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkPhysicalDeviceProperties2 *>( this );
|
|
}
|
|
|
|
operator VkPhysicalDeviceProperties2 &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkPhysicalDeviceProperties2 *>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
# if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
# else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::PhysicalDeviceProperties const &>
|
|
# endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( sType, pNext, properties );
|
|
}
|
|
#endif
|
|
|
|
#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
|
|
auto operator<=>( PhysicalDeviceProperties2 const & ) const = default;
|
|
#else
|
|
bool operator==( PhysicalDeviceProperties2 const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
# if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
# else
|
|
return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( properties == rhs.properties );
|
|
# endif
|
|
}
|
|
|
|
bool operator!=( PhysicalDeviceProperties2 const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceProperties2;
|
|
void * pNext = {};
|
|
VULKAN_HPP_NAMESPACE::PhysicalDeviceProperties properties = {};
|
|
};
|
|
|
|
template <>
|
|
struct CppType<StructureType, StructureType::ePhysicalDeviceProperties2>
|
|
{
|
|
using Type = PhysicalDeviceProperties2;
|
|
};
|
|
|
|
using PhysicalDeviceProperties2KHR = PhysicalDeviceProperties2;
|
|
|
|
struct PhysicalDeviceProtectedMemoryFeatures
|
|
{
|
|
using NativeType = VkPhysicalDeviceProtectedMemoryFeatures;
|
|
|
|
static const bool allowDuplicate = false;
|
|
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceProtectedMemoryFeatures;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR PhysicalDeviceProtectedMemoryFeatures( VULKAN_HPP_NAMESPACE::Bool32 protectedMemory_ = {},
|
|
void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
|
|
: pNext( pNext_ )
|
|
, protectedMemory( protectedMemory_ )
|
|
{
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR PhysicalDeviceProtectedMemoryFeatures( PhysicalDeviceProtectedMemoryFeatures const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
PhysicalDeviceProtectedMemoryFeatures( VkPhysicalDeviceProtectedMemoryFeatures const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: PhysicalDeviceProtectedMemoryFeatures( *reinterpret_cast<PhysicalDeviceProtectedMemoryFeatures const *>( &rhs ) )
|
|
{
|
|
}
|
|
|
|
PhysicalDeviceProtectedMemoryFeatures & operator=( PhysicalDeviceProtectedMemoryFeatures const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
PhysicalDeviceProtectedMemoryFeatures & operator=( VkPhysicalDeviceProtectedMemoryFeatures const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceProtectedMemoryFeatures const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
|
|
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceProtectedMemoryFeatures & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pNext = pNext_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceProtectedMemoryFeatures & setProtectedMemory( VULKAN_HPP_NAMESPACE::Bool32 protectedMemory_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
protectedMemory = protectedMemory_;
|
|
return *this;
|
|
}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
|
|
|
|
operator VkPhysicalDeviceProtectedMemoryFeatures const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkPhysicalDeviceProtectedMemoryFeatures *>( this );
|
|
}
|
|
|
|
operator VkPhysicalDeviceProtectedMemoryFeatures &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkPhysicalDeviceProtectedMemoryFeatures *>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
# if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
# else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::Bool32 const &>
|
|
# endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( sType, pNext, protectedMemory );
|
|
}
|
|
#endif
|
|
|
|
#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
|
|
auto operator<=>( PhysicalDeviceProtectedMemoryFeatures const & ) const = default;
|
|
#else
|
|
bool operator==( PhysicalDeviceProtectedMemoryFeatures const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
# if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
# else
|
|
return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( protectedMemory == rhs.protectedMemory );
|
|
# endif
|
|
}
|
|
|
|
bool operator!=( PhysicalDeviceProtectedMemoryFeatures const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceProtectedMemoryFeatures;
|
|
void * pNext = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 protectedMemory = {};
|
|
};
|
|
|
|
template <>
|
|
struct CppType<StructureType, StructureType::ePhysicalDeviceProtectedMemoryFeatures>
|
|
{
|
|
using Type = PhysicalDeviceProtectedMemoryFeatures;
|
|
};
|
|
|
|
struct PhysicalDeviceProtectedMemoryProperties
|
|
{
|
|
using NativeType = VkPhysicalDeviceProtectedMemoryProperties;
|
|
|
|
static const bool allowDuplicate = false;
|
|
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceProtectedMemoryProperties;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR PhysicalDeviceProtectedMemoryProperties( VULKAN_HPP_NAMESPACE::Bool32 protectedNoFault_ = {},
|
|
void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
|
|
: pNext( pNext_ )
|
|
, protectedNoFault( protectedNoFault_ )
|
|
{
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR PhysicalDeviceProtectedMemoryProperties( PhysicalDeviceProtectedMemoryProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
PhysicalDeviceProtectedMemoryProperties( VkPhysicalDeviceProtectedMemoryProperties const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: PhysicalDeviceProtectedMemoryProperties( *reinterpret_cast<PhysicalDeviceProtectedMemoryProperties const *>( &rhs ) )
|
|
{
|
|
}
|
|
|
|
PhysicalDeviceProtectedMemoryProperties & operator=( PhysicalDeviceProtectedMemoryProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
PhysicalDeviceProtectedMemoryProperties & operator=( VkPhysicalDeviceProtectedMemoryProperties const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceProtectedMemoryProperties const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
operator VkPhysicalDeviceProtectedMemoryProperties const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkPhysicalDeviceProtectedMemoryProperties *>( this );
|
|
}
|
|
|
|
operator VkPhysicalDeviceProtectedMemoryProperties &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkPhysicalDeviceProtectedMemoryProperties *>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
# if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
# else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::Bool32 const &>
|
|
# endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( sType, pNext, protectedNoFault );
|
|
}
|
|
#endif
|
|
|
|
#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
|
|
auto operator<=>( PhysicalDeviceProtectedMemoryProperties const & ) const = default;
|
|
#else
|
|
bool operator==( PhysicalDeviceProtectedMemoryProperties const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
# if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
# else
|
|
return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( protectedNoFault == rhs.protectedNoFault );
|
|
# endif
|
|
}
|
|
|
|
bool operator!=( PhysicalDeviceProtectedMemoryProperties const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceProtectedMemoryProperties;
|
|
void * pNext = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 protectedNoFault = {};
|
|
};
|
|
|
|
template <>
|
|
struct CppType<StructureType, StructureType::ePhysicalDeviceProtectedMemoryProperties>
|
|
{
|
|
using Type = PhysicalDeviceProtectedMemoryProperties;
|
|
};
|
|
|
|
struct PhysicalDeviceRobustness2FeaturesEXT
|
|
{
|
|
using NativeType = VkPhysicalDeviceRobustness2FeaturesEXT;
|
|
|
|
static const bool allowDuplicate = false;
|
|
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceRobustness2FeaturesEXT;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR PhysicalDeviceRobustness2FeaturesEXT( VULKAN_HPP_NAMESPACE::Bool32 robustBufferAccess2_ = {},
|
|
VULKAN_HPP_NAMESPACE::Bool32 robustImageAccess2_ = {},
|
|
VULKAN_HPP_NAMESPACE::Bool32 nullDescriptor_ = {},
|
|
void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
|
|
: pNext( pNext_ )
|
|
, robustBufferAccess2( robustBufferAccess2_ )
|
|
, robustImageAccess2( robustImageAccess2_ )
|
|
, nullDescriptor( nullDescriptor_ )
|
|
{
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR PhysicalDeviceRobustness2FeaturesEXT( PhysicalDeviceRobustness2FeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
PhysicalDeviceRobustness2FeaturesEXT( VkPhysicalDeviceRobustness2FeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: PhysicalDeviceRobustness2FeaturesEXT( *reinterpret_cast<PhysicalDeviceRobustness2FeaturesEXT const *>( &rhs ) )
|
|
{
|
|
}
|
|
|
|
PhysicalDeviceRobustness2FeaturesEXT & operator=( PhysicalDeviceRobustness2FeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
PhysicalDeviceRobustness2FeaturesEXT & operator=( VkPhysicalDeviceRobustness2FeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceRobustness2FeaturesEXT const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
|
|
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceRobustness2FeaturesEXT & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pNext = pNext_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceRobustness2FeaturesEXT &
|
|
setRobustBufferAccess2( VULKAN_HPP_NAMESPACE::Bool32 robustBufferAccess2_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
robustBufferAccess2 = robustBufferAccess2_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceRobustness2FeaturesEXT & setRobustImageAccess2( VULKAN_HPP_NAMESPACE::Bool32 robustImageAccess2_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
robustImageAccess2 = robustImageAccess2_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceRobustness2FeaturesEXT & setNullDescriptor( VULKAN_HPP_NAMESPACE::Bool32 nullDescriptor_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
nullDescriptor = nullDescriptor_;
|
|
return *this;
|
|
}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
|
|
|
|
operator VkPhysicalDeviceRobustness2FeaturesEXT const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkPhysicalDeviceRobustness2FeaturesEXT *>( this );
|
|
}
|
|
|
|
operator VkPhysicalDeviceRobustness2FeaturesEXT &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkPhysicalDeviceRobustness2FeaturesEXT *>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
# if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
# else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &,
|
|
void * const &,
|
|
VULKAN_HPP_NAMESPACE::Bool32 const &,
|
|
VULKAN_HPP_NAMESPACE::Bool32 const &,
|
|
VULKAN_HPP_NAMESPACE::Bool32 const &>
|
|
# endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( sType, pNext, robustBufferAccess2, robustImageAccess2, nullDescriptor );
|
|
}
|
|
#endif
|
|
|
|
#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
|
|
auto operator<=>( PhysicalDeviceRobustness2FeaturesEXT const & ) const = default;
|
|
#else
|
|
bool operator==( PhysicalDeviceRobustness2FeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
# if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
# else
|
|
return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( robustBufferAccess2 == rhs.robustBufferAccess2 ) &&
|
|
( robustImageAccess2 == rhs.robustImageAccess2 ) && ( nullDescriptor == rhs.nullDescriptor );
|
|
# endif
|
|
}
|
|
|
|
bool operator!=( PhysicalDeviceRobustness2FeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceRobustness2FeaturesEXT;
|
|
void * pNext = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 robustBufferAccess2 = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 robustImageAccess2 = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 nullDescriptor = {};
|
|
};
|
|
|
|
template <>
|
|
struct CppType<StructureType, StructureType::ePhysicalDeviceRobustness2FeaturesEXT>
|
|
{
|
|
using Type = PhysicalDeviceRobustness2FeaturesEXT;
|
|
};
|
|
|
|
struct PhysicalDeviceRobustness2PropertiesEXT
|
|
{
|
|
using NativeType = VkPhysicalDeviceRobustness2PropertiesEXT;
|
|
|
|
static const bool allowDuplicate = false;
|
|
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceRobustness2PropertiesEXT;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR PhysicalDeviceRobustness2PropertiesEXT( VULKAN_HPP_NAMESPACE::DeviceSize robustStorageBufferAccessSizeAlignment_ = {},
|
|
VULKAN_HPP_NAMESPACE::DeviceSize robustUniformBufferAccessSizeAlignment_ = {},
|
|
void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
|
|
: pNext( pNext_ )
|
|
, robustStorageBufferAccessSizeAlignment( robustStorageBufferAccessSizeAlignment_ )
|
|
, robustUniformBufferAccessSizeAlignment( robustUniformBufferAccessSizeAlignment_ )
|
|
{
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR PhysicalDeviceRobustness2PropertiesEXT( PhysicalDeviceRobustness2PropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
PhysicalDeviceRobustness2PropertiesEXT( VkPhysicalDeviceRobustness2PropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: PhysicalDeviceRobustness2PropertiesEXT( *reinterpret_cast<PhysicalDeviceRobustness2PropertiesEXT const *>( &rhs ) )
|
|
{
|
|
}
|
|
|
|
PhysicalDeviceRobustness2PropertiesEXT & operator=( PhysicalDeviceRobustness2PropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
PhysicalDeviceRobustness2PropertiesEXT & operator=( VkPhysicalDeviceRobustness2PropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceRobustness2PropertiesEXT const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
operator VkPhysicalDeviceRobustness2PropertiesEXT const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkPhysicalDeviceRobustness2PropertiesEXT *>( this );
|
|
}
|
|
|
|
operator VkPhysicalDeviceRobustness2PropertiesEXT &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkPhysicalDeviceRobustness2PropertiesEXT *>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
# if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
# else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::DeviceSize const &, VULKAN_HPP_NAMESPACE::DeviceSize const &>
|
|
# endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( sType, pNext, robustStorageBufferAccessSizeAlignment, robustUniformBufferAccessSizeAlignment );
|
|
}
|
|
#endif
|
|
|
|
#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
|
|
auto operator<=>( PhysicalDeviceRobustness2PropertiesEXT const & ) const = default;
|
|
#else
|
|
bool operator==( PhysicalDeviceRobustness2PropertiesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
# if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
# else
|
|
return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( robustStorageBufferAccessSizeAlignment == rhs.robustStorageBufferAccessSizeAlignment ) &&
|
|
( robustUniformBufferAccessSizeAlignment == rhs.robustUniformBufferAccessSizeAlignment );
|
|
# endif
|
|
}
|
|
|
|
bool operator!=( PhysicalDeviceRobustness2PropertiesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceRobustness2PropertiesEXT;
|
|
void * pNext = {};
|
|
VULKAN_HPP_NAMESPACE::DeviceSize robustStorageBufferAccessSizeAlignment = {};
|
|
VULKAN_HPP_NAMESPACE::DeviceSize robustUniformBufferAccessSizeAlignment = {};
|
|
};
|
|
|
|
template <>
|
|
struct CppType<StructureType, StructureType::ePhysicalDeviceRobustness2PropertiesEXT>
|
|
{
|
|
using Type = PhysicalDeviceRobustness2PropertiesEXT;
|
|
};
|
|
|
|
struct PhysicalDeviceSampleLocationsPropertiesEXT
|
|
{
|
|
using NativeType = VkPhysicalDeviceSampleLocationsPropertiesEXT;
|
|
|
|
static const bool allowDuplicate = false;
|
|
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceSampleLocationsPropertiesEXT;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceSampleLocationsPropertiesEXT( VULKAN_HPP_NAMESPACE::SampleCountFlags sampleLocationSampleCounts_ = {},
|
|
VULKAN_HPP_NAMESPACE::Extent2D maxSampleLocationGridSize_ = {},
|
|
std::array<float, 2> const & sampleLocationCoordinateRange_ = {},
|
|
uint32_t sampleLocationSubPixelBits_ = {},
|
|
VULKAN_HPP_NAMESPACE::Bool32 variableSampleLocations_ = {},
|
|
void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
|
|
: pNext( pNext_ )
|
|
, sampleLocationSampleCounts( sampleLocationSampleCounts_ )
|
|
, maxSampleLocationGridSize( maxSampleLocationGridSize_ )
|
|
, sampleLocationCoordinateRange( sampleLocationCoordinateRange_ )
|
|
, sampleLocationSubPixelBits( sampleLocationSubPixelBits_ )
|
|
, variableSampleLocations( variableSampleLocations_ )
|
|
{
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceSampleLocationsPropertiesEXT( PhysicalDeviceSampleLocationsPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
PhysicalDeviceSampleLocationsPropertiesEXT( VkPhysicalDeviceSampleLocationsPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: PhysicalDeviceSampleLocationsPropertiesEXT( *reinterpret_cast<PhysicalDeviceSampleLocationsPropertiesEXT const *>( &rhs ) )
|
|
{
|
|
}
|
|
|
|
PhysicalDeviceSampleLocationsPropertiesEXT & operator=( PhysicalDeviceSampleLocationsPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
PhysicalDeviceSampleLocationsPropertiesEXT & operator=( VkPhysicalDeviceSampleLocationsPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceSampleLocationsPropertiesEXT const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
operator VkPhysicalDeviceSampleLocationsPropertiesEXT const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkPhysicalDeviceSampleLocationsPropertiesEXT *>( this );
|
|
}
|
|
|
|
operator VkPhysicalDeviceSampleLocationsPropertiesEXT &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkPhysicalDeviceSampleLocationsPropertiesEXT *>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
# if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
# else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &,
|
|
void * const &,
|
|
VULKAN_HPP_NAMESPACE::SampleCountFlags const &,
|
|
VULKAN_HPP_NAMESPACE::Extent2D const &,
|
|
VULKAN_HPP_NAMESPACE::ArrayWrapper1D<float, 2> const &,
|
|
uint32_t const &,
|
|
VULKAN_HPP_NAMESPACE::Bool32 const &>
|
|
# endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( sType,
|
|
pNext,
|
|
sampleLocationSampleCounts,
|
|
maxSampleLocationGridSize,
|
|
sampleLocationCoordinateRange,
|
|
sampleLocationSubPixelBits,
|
|
variableSampleLocations );
|
|
}
|
|
#endif
|
|
|
|
#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
|
|
auto operator<=>( PhysicalDeviceSampleLocationsPropertiesEXT const & ) const = default;
|
|
#else
|
|
bool operator==( PhysicalDeviceSampleLocationsPropertiesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
# if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
# else
|
|
return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( sampleLocationSampleCounts == rhs.sampleLocationSampleCounts ) &&
|
|
( maxSampleLocationGridSize == rhs.maxSampleLocationGridSize ) && ( sampleLocationCoordinateRange == rhs.sampleLocationCoordinateRange ) &&
|
|
( sampleLocationSubPixelBits == rhs.sampleLocationSubPixelBits ) && ( variableSampleLocations == rhs.variableSampleLocations );
|
|
# endif
|
|
}
|
|
|
|
bool operator!=( PhysicalDeviceSampleLocationsPropertiesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceSampleLocationsPropertiesEXT;
|
|
void * pNext = {};
|
|
VULKAN_HPP_NAMESPACE::SampleCountFlags sampleLocationSampleCounts = {};
|
|
VULKAN_HPP_NAMESPACE::Extent2D maxSampleLocationGridSize = {};
|
|
VULKAN_HPP_NAMESPACE::ArrayWrapper1D<float, 2> sampleLocationCoordinateRange = {};
|
|
uint32_t sampleLocationSubPixelBits = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 variableSampleLocations = {};
|
|
};
|
|
|
|
template <>
|
|
struct CppType<StructureType, StructureType::ePhysicalDeviceSampleLocationsPropertiesEXT>
|
|
{
|
|
using Type = PhysicalDeviceSampleLocationsPropertiesEXT;
|
|
};
|
|
|
|
struct PhysicalDeviceSamplerFilterMinmaxProperties
|
|
{
|
|
using NativeType = VkPhysicalDeviceSamplerFilterMinmaxProperties;
|
|
|
|
static const bool allowDuplicate = false;
|
|
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceSamplerFilterMinmaxProperties;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR PhysicalDeviceSamplerFilterMinmaxProperties( VULKAN_HPP_NAMESPACE::Bool32 filterMinmaxSingleComponentFormats_ = {},
|
|
VULKAN_HPP_NAMESPACE::Bool32 filterMinmaxImageComponentMapping_ = {},
|
|
void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
|
|
: pNext( pNext_ )
|
|
, filterMinmaxSingleComponentFormats( filterMinmaxSingleComponentFormats_ )
|
|
, filterMinmaxImageComponentMapping( filterMinmaxImageComponentMapping_ )
|
|
{
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR PhysicalDeviceSamplerFilterMinmaxProperties( PhysicalDeviceSamplerFilterMinmaxProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
PhysicalDeviceSamplerFilterMinmaxProperties( VkPhysicalDeviceSamplerFilterMinmaxProperties const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: PhysicalDeviceSamplerFilterMinmaxProperties( *reinterpret_cast<PhysicalDeviceSamplerFilterMinmaxProperties const *>( &rhs ) )
|
|
{
|
|
}
|
|
|
|
PhysicalDeviceSamplerFilterMinmaxProperties & operator=( PhysicalDeviceSamplerFilterMinmaxProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
PhysicalDeviceSamplerFilterMinmaxProperties & operator=( VkPhysicalDeviceSamplerFilterMinmaxProperties const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceSamplerFilterMinmaxProperties const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
operator VkPhysicalDeviceSamplerFilterMinmaxProperties const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkPhysicalDeviceSamplerFilterMinmaxProperties *>( this );
|
|
}
|
|
|
|
operator VkPhysicalDeviceSamplerFilterMinmaxProperties &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkPhysicalDeviceSamplerFilterMinmaxProperties *>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
# if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
# else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &>
|
|
# endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( sType, pNext, filterMinmaxSingleComponentFormats, filterMinmaxImageComponentMapping );
|
|
}
|
|
#endif
|
|
|
|
#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
|
|
auto operator<=>( PhysicalDeviceSamplerFilterMinmaxProperties const & ) const = default;
|
|
#else
|
|
bool operator==( PhysicalDeviceSamplerFilterMinmaxProperties const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
# if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
# else
|
|
return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( filterMinmaxSingleComponentFormats == rhs.filterMinmaxSingleComponentFormats ) &&
|
|
( filterMinmaxImageComponentMapping == rhs.filterMinmaxImageComponentMapping );
|
|
# endif
|
|
}
|
|
|
|
bool operator!=( PhysicalDeviceSamplerFilterMinmaxProperties const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceSamplerFilterMinmaxProperties;
|
|
void * pNext = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 filterMinmaxSingleComponentFormats = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 filterMinmaxImageComponentMapping = {};
|
|
};
|
|
|
|
template <>
|
|
struct CppType<StructureType, StructureType::ePhysicalDeviceSamplerFilterMinmaxProperties>
|
|
{
|
|
using Type = PhysicalDeviceSamplerFilterMinmaxProperties;
|
|
};
|
|
|
|
using PhysicalDeviceSamplerFilterMinmaxPropertiesEXT = PhysicalDeviceSamplerFilterMinmaxProperties;
|
|
|
|
struct PhysicalDeviceSamplerYcbcrConversionFeatures
|
|
{
|
|
using NativeType = VkPhysicalDeviceSamplerYcbcrConversionFeatures;
|
|
|
|
static const bool allowDuplicate = false;
|
|
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceSamplerYcbcrConversionFeatures;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR PhysicalDeviceSamplerYcbcrConversionFeatures( VULKAN_HPP_NAMESPACE::Bool32 samplerYcbcrConversion_ = {},
|
|
void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
|
|
: pNext( pNext_ )
|
|
, samplerYcbcrConversion( samplerYcbcrConversion_ )
|
|
{
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR PhysicalDeviceSamplerYcbcrConversionFeatures( PhysicalDeviceSamplerYcbcrConversionFeatures const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
PhysicalDeviceSamplerYcbcrConversionFeatures( VkPhysicalDeviceSamplerYcbcrConversionFeatures const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: PhysicalDeviceSamplerYcbcrConversionFeatures( *reinterpret_cast<PhysicalDeviceSamplerYcbcrConversionFeatures const *>( &rhs ) )
|
|
{
|
|
}
|
|
|
|
PhysicalDeviceSamplerYcbcrConversionFeatures & operator=( PhysicalDeviceSamplerYcbcrConversionFeatures const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
PhysicalDeviceSamplerYcbcrConversionFeatures & operator=( VkPhysicalDeviceSamplerYcbcrConversionFeatures const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceSamplerYcbcrConversionFeatures const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
|
|
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceSamplerYcbcrConversionFeatures & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pNext = pNext_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceSamplerYcbcrConversionFeatures &
|
|
setSamplerYcbcrConversion( VULKAN_HPP_NAMESPACE::Bool32 samplerYcbcrConversion_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
samplerYcbcrConversion = samplerYcbcrConversion_;
|
|
return *this;
|
|
}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
|
|
|
|
operator VkPhysicalDeviceSamplerYcbcrConversionFeatures const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkPhysicalDeviceSamplerYcbcrConversionFeatures *>( this );
|
|
}
|
|
|
|
operator VkPhysicalDeviceSamplerYcbcrConversionFeatures &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkPhysicalDeviceSamplerYcbcrConversionFeatures *>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
# if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
# else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::Bool32 const &>
|
|
# endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( sType, pNext, samplerYcbcrConversion );
|
|
}
|
|
#endif
|
|
|
|
#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
|
|
auto operator<=>( PhysicalDeviceSamplerYcbcrConversionFeatures const & ) const = default;
|
|
#else
|
|
bool operator==( PhysicalDeviceSamplerYcbcrConversionFeatures const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
# if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
# else
|
|
return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( samplerYcbcrConversion == rhs.samplerYcbcrConversion );
|
|
# endif
|
|
}
|
|
|
|
bool operator!=( PhysicalDeviceSamplerYcbcrConversionFeatures const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceSamplerYcbcrConversionFeatures;
|
|
void * pNext = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 samplerYcbcrConversion = {};
|
|
};
|
|
|
|
template <>
|
|
struct CppType<StructureType, StructureType::ePhysicalDeviceSamplerYcbcrConversionFeatures>
|
|
{
|
|
using Type = PhysicalDeviceSamplerYcbcrConversionFeatures;
|
|
};
|
|
|
|
using PhysicalDeviceSamplerYcbcrConversionFeaturesKHR = PhysicalDeviceSamplerYcbcrConversionFeatures;
|
|
|
|
struct PhysicalDeviceScalarBlockLayoutFeatures
|
|
{
|
|
using NativeType = VkPhysicalDeviceScalarBlockLayoutFeatures;
|
|
|
|
static const bool allowDuplicate = false;
|
|
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceScalarBlockLayoutFeatures;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR PhysicalDeviceScalarBlockLayoutFeatures( VULKAN_HPP_NAMESPACE::Bool32 scalarBlockLayout_ = {},
|
|
void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
|
|
: pNext( pNext_ )
|
|
, scalarBlockLayout( scalarBlockLayout_ )
|
|
{
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR PhysicalDeviceScalarBlockLayoutFeatures( PhysicalDeviceScalarBlockLayoutFeatures const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
PhysicalDeviceScalarBlockLayoutFeatures( VkPhysicalDeviceScalarBlockLayoutFeatures const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: PhysicalDeviceScalarBlockLayoutFeatures( *reinterpret_cast<PhysicalDeviceScalarBlockLayoutFeatures const *>( &rhs ) )
|
|
{
|
|
}
|
|
|
|
PhysicalDeviceScalarBlockLayoutFeatures & operator=( PhysicalDeviceScalarBlockLayoutFeatures const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
PhysicalDeviceScalarBlockLayoutFeatures & operator=( VkPhysicalDeviceScalarBlockLayoutFeatures const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceScalarBlockLayoutFeatures const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
|
|
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceScalarBlockLayoutFeatures & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pNext = pNext_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceScalarBlockLayoutFeatures &
|
|
setScalarBlockLayout( VULKAN_HPP_NAMESPACE::Bool32 scalarBlockLayout_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
scalarBlockLayout = scalarBlockLayout_;
|
|
return *this;
|
|
}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
|
|
|
|
operator VkPhysicalDeviceScalarBlockLayoutFeatures const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkPhysicalDeviceScalarBlockLayoutFeatures *>( this );
|
|
}
|
|
|
|
operator VkPhysicalDeviceScalarBlockLayoutFeatures &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkPhysicalDeviceScalarBlockLayoutFeatures *>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
# if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
# else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::Bool32 const &>
|
|
# endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( sType, pNext, scalarBlockLayout );
|
|
}
|
|
#endif
|
|
|
|
#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
|
|
auto operator<=>( PhysicalDeviceScalarBlockLayoutFeatures const & ) const = default;
|
|
#else
|
|
bool operator==( PhysicalDeviceScalarBlockLayoutFeatures const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
# if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
# else
|
|
return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( scalarBlockLayout == rhs.scalarBlockLayout );
|
|
# endif
|
|
}
|
|
|
|
bool operator!=( PhysicalDeviceScalarBlockLayoutFeatures const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceScalarBlockLayoutFeatures;
|
|
void * pNext = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 scalarBlockLayout = {};
|
|
};
|
|
|
|
template <>
|
|
struct CppType<StructureType, StructureType::ePhysicalDeviceScalarBlockLayoutFeatures>
|
|
{
|
|
using Type = PhysicalDeviceScalarBlockLayoutFeatures;
|
|
};
|
|
|
|
using PhysicalDeviceScalarBlockLayoutFeaturesEXT = PhysicalDeviceScalarBlockLayoutFeatures;
|
|
|
|
struct PhysicalDeviceSeparateDepthStencilLayoutsFeatures
|
|
{
|
|
using NativeType = VkPhysicalDeviceSeparateDepthStencilLayoutsFeatures;
|
|
|
|
static const bool allowDuplicate = false;
|
|
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceSeparateDepthStencilLayoutsFeatures;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR PhysicalDeviceSeparateDepthStencilLayoutsFeatures( VULKAN_HPP_NAMESPACE::Bool32 separateDepthStencilLayouts_ = {},
|
|
void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
|
|
: pNext( pNext_ )
|
|
, separateDepthStencilLayouts( separateDepthStencilLayouts_ )
|
|
{
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR
|
|
PhysicalDeviceSeparateDepthStencilLayoutsFeatures( PhysicalDeviceSeparateDepthStencilLayoutsFeatures const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
PhysicalDeviceSeparateDepthStencilLayoutsFeatures( VkPhysicalDeviceSeparateDepthStencilLayoutsFeatures const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: PhysicalDeviceSeparateDepthStencilLayoutsFeatures( *reinterpret_cast<PhysicalDeviceSeparateDepthStencilLayoutsFeatures const *>( &rhs ) )
|
|
{
|
|
}
|
|
|
|
PhysicalDeviceSeparateDepthStencilLayoutsFeatures &
|
|
operator=( PhysicalDeviceSeparateDepthStencilLayoutsFeatures const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
PhysicalDeviceSeparateDepthStencilLayoutsFeatures & operator=( VkPhysicalDeviceSeparateDepthStencilLayoutsFeatures const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceSeparateDepthStencilLayoutsFeatures const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
|
|
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceSeparateDepthStencilLayoutsFeatures & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pNext = pNext_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceSeparateDepthStencilLayoutsFeatures &
|
|
setSeparateDepthStencilLayouts( VULKAN_HPP_NAMESPACE::Bool32 separateDepthStencilLayouts_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
separateDepthStencilLayouts = separateDepthStencilLayouts_;
|
|
return *this;
|
|
}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
|
|
|
|
operator VkPhysicalDeviceSeparateDepthStencilLayoutsFeatures const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkPhysicalDeviceSeparateDepthStencilLayoutsFeatures *>( this );
|
|
}
|
|
|
|
operator VkPhysicalDeviceSeparateDepthStencilLayoutsFeatures &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkPhysicalDeviceSeparateDepthStencilLayoutsFeatures *>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
# if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
# else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::Bool32 const &>
|
|
# endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( sType, pNext, separateDepthStencilLayouts );
|
|
}
|
|
#endif
|
|
|
|
#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
|
|
auto operator<=>( PhysicalDeviceSeparateDepthStencilLayoutsFeatures const & ) const = default;
|
|
#else
|
|
bool operator==( PhysicalDeviceSeparateDepthStencilLayoutsFeatures const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
# if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
# else
|
|
return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( separateDepthStencilLayouts == rhs.separateDepthStencilLayouts );
|
|
# endif
|
|
}
|
|
|
|
bool operator!=( PhysicalDeviceSeparateDepthStencilLayoutsFeatures const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceSeparateDepthStencilLayoutsFeatures;
|
|
void * pNext = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 separateDepthStencilLayouts = {};
|
|
};
|
|
|
|
template <>
|
|
struct CppType<StructureType, StructureType::ePhysicalDeviceSeparateDepthStencilLayoutsFeatures>
|
|
{
|
|
using Type = PhysicalDeviceSeparateDepthStencilLayoutsFeatures;
|
|
};
|
|
|
|
using PhysicalDeviceSeparateDepthStencilLayoutsFeaturesKHR = PhysicalDeviceSeparateDepthStencilLayoutsFeatures;
|
|
|
|
struct PhysicalDeviceShaderAtomicFloatFeaturesEXT
|
|
{
|
|
using NativeType = VkPhysicalDeviceShaderAtomicFloatFeaturesEXT;
|
|
|
|
static const bool allowDuplicate = false;
|
|
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceShaderAtomicFloatFeaturesEXT;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR PhysicalDeviceShaderAtomicFloatFeaturesEXT( VULKAN_HPP_NAMESPACE::Bool32 shaderBufferFloat32Atomics_ = {},
|
|
VULKAN_HPP_NAMESPACE::Bool32 shaderBufferFloat32AtomicAdd_ = {},
|
|
VULKAN_HPP_NAMESPACE::Bool32 shaderBufferFloat64Atomics_ = {},
|
|
VULKAN_HPP_NAMESPACE::Bool32 shaderBufferFloat64AtomicAdd_ = {},
|
|
VULKAN_HPP_NAMESPACE::Bool32 shaderSharedFloat32Atomics_ = {},
|
|
VULKAN_HPP_NAMESPACE::Bool32 shaderSharedFloat32AtomicAdd_ = {},
|
|
VULKAN_HPP_NAMESPACE::Bool32 shaderSharedFloat64Atomics_ = {},
|
|
VULKAN_HPP_NAMESPACE::Bool32 shaderSharedFloat64AtomicAdd_ = {},
|
|
VULKAN_HPP_NAMESPACE::Bool32 shaderImageFloat32Atomics_ = {},
|
|
VULKAN_HPP_NAMESPACE::Bool32 shaderImageFloat32AtomicAdd_ = {},
|
|
VULKAN_HPP_NAMESPACE::Bool32 sparseImageFloat32Atomics_ = {},
|
|
VULKAN_HPP_NAMESPACE::Bool32 sparseImageFloat32AtomicAdd_ = {},
|
|
void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
|
|
: pNext( pNext_ )
|
|
, shaderBufferFloat32Atomics( shaderBufferFloat32Atomics_ )
|
|
, shaderBufferFloat32AtomicAdd( shaderBufferFloat32AtomicAdd_ )
|
|
, shaderBufferFloat64Atomics( shaderBufferFloat64Atomics_ )
|
|
, shaderBufferFloat64AtomicAdd( shaderBufferFloat64AtomicAdd_ )
|
|
, shaderSharedFloat32Atomics( shaderSharedFloat32Atomics_ )
|
|
, shaderSharedFloat32AtomicAdd( shaderSharedFloat32AtomicAdd_ )
|
|
, shaderSharedFloat64Atomics( shaderSharedFloat64Atomics_ )
|
|
, shaderSharedFloat64AtomicAdd( shaderSharedFloat64AtomicAdd_ )
|
|
, shaderImageFloat32Atomics( shaderImageFloat32Atomics_ )
|
|
, shaderImageFloat32AtomicAdd( shaderImageFloat32AtomicAdd_ )
|
|
, sparseImageFloat32Atomics( sparseImageFloat32Atomics_ )
|
|
, sparseImageFloat32AtomicAdd( sparseImageFloat32AtomicAdd_ )
|
|
{
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR PhysicalDeviceShaderAtomicFloatFeaturesEXT( PhysicalDeviceShaderAtomicFloatFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
PhysicalDeviceShaderAtomicFloatFeaturesEXT( VkPhysicalDeviceShaderAtomicFloatFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: PhysicalDeviceShaderAtomicFloatFeaturesEXT( *reinterpret_cast<PhysicalDeviceShaderAtomicFloatFeaturesEXT const *>( &rhs ) )
|
|
{
|
|
}
|
|
|
|
PhysicalDeviceShaderAtomicFloatFeaturesEXT & operator=( PhysicalDeviceShaderAtomicFloatFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
PhysicalDeviceShaderAtomicFloatFeaturesEXT & operator=( VkPhysicalDeviceShaderAtomicFloatFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderAtomicFloatFeaturesEXT const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
|
|
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderAtomicFloatFeaturesEXT & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pNext = pNext_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderAtomicFloatFeaturesEXT &
|
|
setShaderBufferFloat32Atomics( VULKAN_HPP_NAMESPACE::Bool32 shaderBufferFloat32Atomics_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
shaderBufferFloat32Atomics = shaderBufferFloat32Atomics_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderAtomicFloatFeaturesEXT &
|
|
setShaderBufferFloat32AtomicAdd( VULKAN_HPP_NAMESPACE::Bool32 shaderBufferFloat32AtomicAdd_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
shaderBufferFloat32AtomicAdd = shaderBufferFloat32AtomicAdd_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderAtomicFloatFeaturesEXT &
|
|
setShaderBufferFloat64Atomics( VULKAN_HPP_NAMESPACE::Bool32 shaderBufferFloat64Atomics_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
shaderBufferFloat64Atomics = shaderBufferFloat64Atomics_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderAtomicFloatFeaturesEXT &
|
|
setShaderBufferFloat64AtomicAdd( VULKAN_HPP_NAMESPACE::Bool32 shaderBufferFloat64AtomicAdd_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
shaderBufferFloat64AtomicAdd = shaderBufferFloat64AtomicAdd_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderAtomicFloatFeaturesEXT &
|
|
setShaderSharedFloat32Atomics( VULKAN_HPP_NAMESPACE::Bool32 shaderSharedFloat32Atomics_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
shaderSharedFloat32Atomics = shaderSharedFloat32Atomics_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderAtomicFloatFeaturesEXT &
|
|
setShaderSharedFloat32AtomicAdd( VULKAN_HPP_NAMESPACE::Bool32 shaderSharedFloat32AtomicAdd_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
shaderSharedFloat32AtomicAdd = shaderSharedFloat32AtomicAdd_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderAtomicFloatFeaturesEXT &
|
|
setShaderSharedFloat64Atomics( VULKAN_HPP_NAMESPACE::Bool32 shaderSharedFloat64Atomics_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
shaderSharedFloat64Atomics = shaderSharedFloat64Atomics_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderAtomicFloatFeaturesEXT &
|
|
setShaderSharedFloat64AtomicAdd( VULKAN_HPP_NAMESPACE::Bool32 shaderSharedFloat64AtomicAdd_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
shaderSharedFloat64AtomicAdd = shaderSharedFloat64AtomicAdd_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderAtomicFloatFeaturesEXT &
|
|
setShaderImageFloat32Atomics( VULKAN_HPP_NAMESPACE::Bool32 shaderImageFloat32Atomics_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
shaderImageFloat32Atomics = shaderImageFloat32Atomics_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderAtomicFloatFeaturesEXT &
|
|
setShaderImageFloat32AtomicAdd( VULKAN_HPP_NAMESPACE::Bool32 shaderImageFloat32AtomicAdd_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
shaderImageFloat32AtomicAdd = shaderImageFloat32AtomicAdd_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderAtomicFloatFeaturesEXT &
|
|
setSparseImageFloat32Atomics( VULKAN_HPP_NAMESPACE::Bool32 sparseImageFloat32Atomics_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
sparseImageFloat32Atomics = sparseImageFloat32Atomics_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderAtomicFloatFeaturesEXT &
|
|
setSparseImageFloat32AtomicAdd( VULKAN_HPP_NAMESPACE::Bool32 sparseImageFloat32AtomicAdd_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
sparseImageFloat32AtomicAdd = sparseImageFloat32AtomicAdd_;
|
|
return *this;
|
|
}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
|
|
|
|
operator VkPhysicalDeviceShaderAtomicFloatFeaturesEXT const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkPhysicalDeviceShaderAtomicFloatFeaturesEXT *>( this );
|
|
}
|
|
|
|
operator VkPhysicalDeviceShaderAtomicFloatFeaturesEXT &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkPhysicalDeviceShaderAtomicFloatFeaturesEXT *>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
# if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
# else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &,
|
|
void * const &,
|
|
VULKAN_HPP_NAMESPACE::Bool32 const &,
|
|
VULKAN_HPP_NAMESPACE::Bool32 const &,
|
|
VULKAN_HPP_NAMESPACE::Bool32 const &,
|
|
VULKAN_HPP_NAMESPACE::Bool32 const &,
|
|
VULKAN_HPP_NAMESPACE::Bool32 const &,
|
|
VULKAN_HPP_NAMESPACE::Bool32 const &,
|
|
VULKAN_HPP_NAMESPACE::Bool32 const &,
|
|
VULKAN_HPP_NAMESPACE::Bool32 const &,
|
|
VULKAN_HPP_NAMESPACE::Bool32 const &,
|
|
VULKAN_HPP_NAMESPACE::Bool32 const &,
|
|
VULKAN_HPP_NAMESPACE::Bool32 const &,
|
|
VULKAN_HPP_NAMESPACE::Bool32 const &>
|
|
# endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( sType,
|
|
pNext,
|
|
shaderBufferFloat32Atomics,
|
|
shaderBufferFloat32AtomicAdd,
|
|
shaderBufferFloat64Atomics,
|
|
shaderBufferFloat64AtomicAdd,
|
|
shaderSharedFloat32Atomics,
|
|
shaderSharedFloat32AtomicAdd,
|
|
shaderSharedFloat64Atomics,
|
|
shaderSharedFloat64AtomicAdd,
|
|
shaderImageFloat32Atomics,
|
|
shaderImageFloat32AtomicAdd,
|
|
sparseImageFloat32Atomics,
|
|
sparseImageFloat32AtomicAdd );
|
|
}
|
|
#endif
|
|
|
|
#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
|
|
auto operator<=>( PhysicalDeviceShaderAtomicFloatFeaturesEXT const & ) const = default;
|
|
#else
|
|
bool operator==( PhysicalDeviceShaderAtomicFloatFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
# if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
# else
|
|
return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( shaderBufferFloat32Atomics == rhs.shaderBufferFloat32Atomics ) &&
|
|
( shaderBufferFloat32AtomicAdd == rhs.shaderBufferFloat32AtomicAdd ) && ( shaderBufferFloat64Atomics == rhs.shaderBufferFloat64Atomics ) &&
|
|
( shaderBufferFloat64AtomicAdd == rhs.shaderBufferFloat64AtomicAdd ) && ( shaderSharedFloat32Atomics == rhs.shaderSharedFloat32Atomics ) &&
|
|
( shaderSharedFloat32AtomicAdd == rhs.shaderSharedFloat32AtomicAdd ) && ( shaderSharedFloat64Atomics == rhs.shaderSharedFloat64Atomics ) &&
|
|
( shaderSharedFloat64AtomicAdd == rhs.shaderSharedFloat64AtomicAdd ) && ( shaderImageFloat32Atomics == rhs.shaderImageFloat32Atomics ) &&
|
|
( shaderImageFloat32AtomicAdd == rhs.shaderImageFloat32AtomicAdd ) && ( sparseImageFloat32Atomics == rhs.sparseImageFloat32Atomics ) &&
|
|
( sparseImageFloat32AtomicAdd == rhs.sparseImageFloat32AtomicAdd );
|
|
# endif
|
|
}
|
|
|
|
bool operator!=( PhysicalDeviceShaderAtomicFloatFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceShaderAtomicFloatFeaturesEXT;
|
|
void * pNext = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 shaderBufferFloat32Atomics = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 shaderBufferFloat32AtomicAdd = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 shaderBufferFloat64Atomics = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 shaderBufferFloat64AtomicAdd = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 shaderSharedFloat32Atomics = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 shaderSharedFloat32AtomicAdd = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 shaderSharedFloat64Atomics = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 shaderSharedFloat64AtomicAdd = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 shaderImageFloat32Atomics = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 shaderImageFloat32AtomicAdd = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 sparseImageFloat32Atomics = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 sparseImageFloat32AtomicAdd = {};
|
|
};
|
|
|
|
template <>
|
|
struct CppType<StructureType, StructureType::ePhysicalDeviceShaderAtomicFloatFeaturesEXT>
|
|
{
|
|
using Type = PhysicalDeviceShaderAtomicFloatFeaturesEXT;
|
|
};
|
|
|
|
struct PhysicalDeviceShaderAtomicInt64Features
|
|
{
|
|
using NativeType = VkPhysicalDeviceShaderAtomicInt64Features;
|
|
|
|
static const bool allowDuplicate = false;
|
|
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceShaderAtomicInt64Features;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR PhysicalDeviceShaderAtomicInt64Features( VULKAN_HPP_NAMESPACE::Bool32 shaderBufferInt64Atomics_ = {},
|
|
VULKAN_HPP_NAMESPACE::Bool32 shaderSharedInt64Atomics_ = {},
|
|
void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
|
|
: pNext( pNext_ )
|
|
, shaderBufferInt64Atomics( shaderBufferInt64Atomics_ )
|
|
, shaderSharedInt64Atomics( shaderSharedInt64Atomics_ )
|
|
{
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR PhysicalDeviceShaderAtomicInt64Features( PhysicalDeviceShaderAtomicInt64Features const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
PhysicalDeviceShaderAtomicInt64Features( VkPhysicalDeviceShaderAtomicInt64Features const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: PhysicalDeviceShaderAtomicInt64Features( *reinterpret_cast<PhysicalDeviceShaderAtomicInt64Features const *>( &rhs ) )
|
|
{
|
|
}
|
|
|
|
PhysicalDeviceShaderAtomicInt64Features & operator=( PhysicalDeviceShaderAtomicInt64Features const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
PhysicalDeviceShaderAtomicInt64Features & operator=( VkPhysicalDeviceShaderAtomicInt64Features const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderAtomicInt64Features const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
|
|
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderAtomicInt64Features & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pNext = pNext_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderAtomicInt64Features &
|
|
setShaderBufferInt64Atomics( VULKAN_HPP_NAMESPACE::Bool32 shaderBufferInt64Atomics_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
shaderBufferInt64Atomics = shaderBufferInt64Atomics_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderAtomicInt64Features &
|
|
setShaderSharedInt64Atomics( VULKAN_HPP_NAMESPACE::Bool32 shaderSharedInt64Atomics_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
shaderSharedInt64Atomics = shaderSharedInt64Atomics_;
|
|
return *this;
|
|
}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
|
|
|
|
operator VkPhysicalDeviceShaderAtomicInt64Features const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkPhysicalDeviceShaderAtomicInt64Features *>( this );
|
|
}
|
|
|
|
operator VkPhysicalDeviceShaderAtomicInt64Features &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkPhysicalDeviceShaderAtomicInt64Features *>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
# if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
# else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &>
|
|
# endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( sType, pNext, shaderBufferInt64Atomics, shaderSharedInt64Atomics );
|
|
}
|
|
#endif
|
|
|
|
#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
|
|
auto operator<=>( PhysicalDeviceShaderAtomicInt64Features const & ) const = default;
|
|
#else
|
|
bool operator==( PhysicalDeviceShaderAtomicInt64Features const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
# if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
# else
|
|
return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( shaderBufferInt64Atomics == rhs.shaderBufferInt64Atomics ) &&
|
|
( shaderSharedInt64Atomics == rhs.shaderSharedInt64Atomics );
|
|
# endif
|
|
}
|
|
|
|
bool operator!=( PhysicalDeviceShaderAtomicInt64Features const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceShaderAtomicInt64Features;
|
|
void * pNext = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 shaderBufferInt64Atomics = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 shaderSharedInt64Atomics = {};
|
|
};
|
|
|
|
template <>
|
|
struct CppType<StructureType, StructureType::ePhysicalDeviceShaderAtomicInt64Features>
|
|
{
|
|
using Type = PhysicalDeviceShaderAtomicInt64Features;
|
|
};
|
|
|
|
using PhysicalDeviceShaderAtomicInt64FeaturesKHR = PhysicalDeviceShaderAtomicInt64Features;
|
|
|
|
struct PhysicalDeviceShaderClockFeaturesKHR
|
|
{
|
|
using NativeType = VkPhysicalDeviceShaderClockFeaturesKHR;
|
|
|
|
static const bool allowDuplicate = false;
|
|
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceShaderClockFeaturesKHR;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR PhysicalDeviceShaderClockFeaturesKHR( VULKAN_HPP_NAMESPACE::Bool32 shaderSubgroupClock_ = {},
|
|
VULKAN_HPP_NAMESPACE::Bool32 shaderDeviceClock_ = {},
|
|
void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
|
|
: pNext( pNext_ )
|
|
, shaderSubgroupClock( shaderSubgroupClock_ )
|
|
, shaderDeviceClock( shaderDeviceClock_ )
|
|
{
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR PhysicalDeviceShaderClockFeaturesKHR( PhysicalDeviceShaderClockFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
PhysicalDeviceShaderClockFeaturesKHR( VkPhysicalDeviceShaderClockFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: PhysicalDeviceShaderClockFeaturesKHR( *reinterpret_cast<PhysicalDeviceShaderClockFeaturesKHR const *>( &rhs ) )
|
|
{
|
|
}
|
|
|
|
PhysicalDeviceShaderClockFeaturesKHR & operator=( PhysicalDeviceShaderClockFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
PhysicalDeviceShaderClockFeaturesKHR & operator=( VkPhysicalDeviceShaderClockFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderClockFeaturesKHR const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
|
|
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderClockFeaturesKHR & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pNext = pNext_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderClockFeaturesKHR &
|
|
setShaderSubgroupClock( VULKAN_HPP_NAMESPACE::Bool32 shaderSubgroupClock_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
shaderSubgroupClock = shaderSubgroupClock_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderClockFeaturesKHR & setShaderDeviceClock( VULKAN_HPP_NAMESPACE::Bool32 shaderDeviceClock_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
shaderDeviceClock = shaderDeviceClock_;
|
|
return *this;
|
|
}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
|
|
|
|
operator VkPhysicalDeviceShaderClockFeaturesKHR const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkPhysicalDeviceShaderClockFeaturesKHR *>( this );
|
|
}
|
|
|
|
operator VkPhysicalDeviceShaderClockFeaturesKHR &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkPhysicalDeviceShaderClockFeaturesKHR *>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
# if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
# else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &>
|
|
# endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( sType, pNext, shaderSubgroupClock, shaderDeviceClock );
|
|
}
|
|
#endif
|
|
|
|
#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
|
|
auto operator<=>( PhysicalDeviceShaderClockFeaturesKHR const & ) const = default;
|
|
#else
|
|
bool operator==( PhysicalDeviceShaderClockFeaturesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
# if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
# else
|
|
return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( shaderSubgroupClock == rhs.shaderSubgroupClock ) &&
|
|
( shaderDeviceClock == rhs.shaderDeviceClock );
|
|
# endif
|
|
}
|
|
|
|
bool operator!=( PhysicalDeviceShaderClockFeaturesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceShaderClockFeaturesKHR;
|
|
void * pNext = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 shaderSubgroupClock = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 shaderDeviceClock = {};
|
|
};
|
|
|
|
template <>
|
|
struct CppType<StructureType, StructureType::ePhysicalDeviceShaderClockFeaturesKHR>
|
|
{
|
|
using Type = PhysicalDeviceShaderClockFeaturesKHR;
|
|
};
|
|
|
|
struct PhysicalDeviceShaderDemoteToHelperInvocationFeatures
|
|
{
|
|
using NativeType = VkPhysicalDeviceShaderDemoteToHelperInvocationFeatures;
|
|
|
|
static const bool allowDuplicate = false;
|
|
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceShaderDemoteToHelperInvocationFeatures;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR PhysicalDeviceShaderDemoteToHelperInvocationFeatures( VULKAN_HPP_NAMESPACE::Bool32 shaderDemoteToHelperInvocation_ = {},
|
|
void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
|
|
: pNext( pNext_ )
|
|
, shaderDemoteToHelperInvocation( shaderDemoteToHelperInvocation_ )
|
|
{
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR
|
|
PhysicalDeviceShaderDemoteToHelperInvocationFeatures( PhysicalDeviceShaderDemoteToHelperInvocationFeatures const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
PhysicalDeviceShaderDemoteToHelperInvocationFeatures( VkPhysicalDeviceShaderDemoteToHelperInvocationFeatures const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: PhysicalDeviceShaderDemoteToHelperInvocationFeatures( *reinterpret_cast<PhysicalDeviceShaderDemoteToHelperInvocationFeatures const *>( &rhs ) )
|
|
{
|
|
}
|
|
|
|
PhysicalDeviceShaderDemoteToHelperInvocationFeatures &
|
|
operator=( PhysicalDeviceShaderDemoteToHelperInvocationFeatures const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
PhysicalDeviceShaderDemoteToHelperInvocationFeatures & operator=( VkPhysicalDeviceShaderDemoteToHelperInvocationFeatures const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderDemoteToHelperInvocationFeatures const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
|
|
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderDemoteToHelperInvocationFeatures & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pNext = pNext_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderDemoteToHelperInvocationFeatures &
|
|
setShaderDemoteToHelperInvocation( VULKAN_HPP_NAMESPACE::Bool32 shaderDemoteToHelperInvocation_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
shaderDemoteToHelperInvocation = shaderDemoteToHelperInvocation_;
|
|
return *this;
|
|
}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
|
|
|
|
operator VkPhysicalDeviceShaderDemoteToHelperInvocationFeatures const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkPhysicalDeviceShaderDemoteToHelperInvocationFeatures *>( this );
|
|
}
|
|
|
|
operator VkPhysicalDeviceShaderDemoteToHelperInvocationFeatures &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkPhysicalDeviceShaderDemoteToHelperInvocationFeatures *>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
# if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
# else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::Bool32 const &>
|
|
# endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( sType, pNext, shaderDemoteToHelperInvocation );
|
|
}
|
|
#endif
|
|
|
|
#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
|
|
auto operator<=>( PhysicalDeviceShaderDemoteToHelperInvocationFeatures const & ) const = default;
|
|
#else
|
|
bool operator==( PhysicalDeviceShaderDemoteToHelperInvocationFeatures const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
# if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
# else
|
|
return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( shaderDemoteToHelperInvocation == rhs.shaderDemoteToHelperInvocation );
|
|
# endif
|
|
}
|
|
|
|
bool operator!=( PhysicalDeviceShaderDemoteToHelperInvocationFeatures const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceShaderDemoteToHelperInvocationFeatures;
|
|
void * pNext = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 shaderDemoteToHelperInvocation = {};
|
|
};
|
|
|
|
template <>
|
|
struct CppType<StructureType, StructureType::ePhysicalDeviceShaderDemoteToHelperInvocationFeatures>
|
|
{
|
|
using Type = PhysicalDeviceShaderDemoteToHelperInvocationFeatures;
|
|
};
|
|
|
|
using PhysicalDeviceShaderDemoteToHelperInvocationFeaturesEXT = PhysicalDeviceShaderDemoteToHelperInvocationFeatures;
|
|
|
|
struct PhysicalDeviceShaderDrawParametersFeatures
|
|
{
|
|
using NativeType = VkPhysicalDeviceShaderDrawParametersFeatures;
|
|
|
|
static const bool allowDuplicate = false;
|
|
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceShaderDrawParametersFeatures;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR PhysicalDeviceShaderDrawParametersFeatures( VULKAN_HPP_NAMESPACE::Bool32 shaderDrawParameters_ = {},
|
|
void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
|
|
: pNext( pNext_ )
|
|
, shaderDrawParameters( shaderDrawParameters_ )
|
|
{
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR PhysicalDeviceShaderDrawParametersFeatures( PhysicalDeviceShaderDrawParametersFeatures const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
PhysicalDeviceShaderDrawParametersFeatures( VkPhysicalDeviceShaderDrawParametersFeatures const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: PhysicalDeviceShaderDrawParametersFeatures( *reinterpret_cast<PhysicalDeviceShaderDrawParametersFeatures const *>( &rhs ) )
|
|
{
|
|
}
|
|
|
|
PhysicalDeviceShaderDrawParametersFeatures & operator=( PhysicalDeviceShaderDrawParametersFeatures const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
PhysicalDeviceShaderDrawParametersFeatures & operator=( VkPhysicalDeviceShaderDrawParametersFeatures const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderDrawParametersFeatures const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
|
|
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderDrawParametersFeatures & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pNext = pNext_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderDrawParametersFeatures &
|
|
setShaderDrawParameters( VULKAN_HPP_NAMESPACE::Bool32 shaderDrawParameters_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
shaderDrawParameters = shaderDrawParameters_;
|
|
return *this;
|
|
}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
|
|
|
|
operator VkPhysicalDeviceShaderDrawParametersFeatures const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkPhysicalDeviceShaderDrawParametersFeatures *>( this );
|
|
}
|
|
|
|
operator VkPhysicalDeviceShaderDrawParametersFeatures &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkPhysicalDeviceShaderDrawParametersFeatures *>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
# if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
# else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::Bool32 const &>
|
|
# endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( sType, pNext, shaderDrawParameters );
|
|
}
|
|
#endif
|
|
|
|
#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
|
|
auto operator<=>( PhysicalDeviceShaderDrawParametersFeatures const & ) const = default;
|
|
#else
|
|
bool operator==( PhysicalDeviceShaderDrawParametersFeatures const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
# if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
# else
|
|
return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( shaderDrawParameters == rhs.shaderDrawParameters );
|
|
# endif
|
|
}
|
|
|
|
bool operator!=( PhysicalDeviceShaderDrawParametersFeatures const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceShaderDrawParametersFeatures;
|
|
void * pNext = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 shaderDrawParameters = {};
|
|
};
|
|
|
|
template <>
|
|
struct CppType<StructureType, StructureType::ePhysicalDeviceShaderDrawParametersFeatures>
|
|
{
|
|
using Type = PhysicalDeviceShaderDrawParametersFeatures;
|
|
};
|
|
|
|
using PhysicalDeviceShaderDrawParameterFeatures = PhysicalDeviceShaderDrawParametersFeatures;
|
|
|
|
struct PhysicalDeviceShaderFloat16Int8Features
|
|
{
|
|
using NativeType = VkPhysicalDeviceShaderFloat16Int8Features;
|
|
|
|
static const bool allowDuplicate = false;
|
|
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceShaderFloat16Int8Features;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR PhysicalDeviceShaderFloat16Int8Features( VULKAN_HPP_NAMESPACE::Bool32 shaderFloat16_ = {},
|
|
VULKAN_HPP_NAMESPACE::Bool32 shaderInt8_ = {},
|
|
void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
|
|
: pNext( pNext_ )
|
|
, shaderFloat16( shaderFloat16_ )
|
|
, shaderInt8( shaderInt8_ )
|
|
{
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR PhysicalDeviceShaderFloat16Int8Features( PhysicalDeviceShaderFloat16Int8Features const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
PhysicalDeviceShaderFloat16Int8Features( VkPhysicalDeviceShaderFloat16Int8Features const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: PhysicalDeviceShaderFloat16Int8Features( *reinterpret_cast<PhysicalDeviceShaderFloat16Int8Features const *>( &rhs ) )
|
|
{
|
|
}
|
|
|
|
PhysicalDeviceShaderFloat16Int8Features & operator=( PhysicalDeviceShaderFloat16Int8Features const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
PhysicalDeviceShaderFloat16Int8Features & operator=( VkPhysicalDeviceShaderFloat16Int8Features const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderFloat16Int8Features const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
|
|
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderFloat16Int8Features & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pNext = pNext_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderFloat16Int8Features & setShaderFloat16( VULKAN_HPP_NAMESPACE::Bool32 shaderFloat16_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
shaderFloat16 = shaderFloat16_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderFloat16Int8Features & setShaderInt8( VULKAN_HPP_NAMESPACE::Bool32 shaderInt8_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
shaderInt8 = shaderInt8_;
|
|
return *this;
|
|
}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
|
|
|
|
operator VkPhysicalDeviceShaderFloat16Int8Features const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkPhysicalDeviceShaderFloat16Int8Features *>( this );
|
|
}
|
|
|
|
operator VkPhysicalDeviceShaderFloat16Int8Features &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkPhysicalDeviceShaderFloat16Int8Features *>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
# if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
# else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &>
|
|
# endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( sType, pNext, shaderFloat16, shaderInt8 );
|
|
}
|
|
#endif
|
|
|
|
#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
|
|
auto operator<=>( PhysicalDeviceShaderFloat16Int8Features const & ) const = default;
|
|
#else
|
|
bool operator==( PhysicalDeviceShaderFloat16Int8Features const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
# if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
# else
|
|
return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( shaderFloat16 == rhs.shaderFloat16 ) && ( shaderInt8 == rhs.shaderInt8 );
|
|
# endif
|
|
}
|
|
|
|
bool operator!=( PhysicalDeviceShaderFloat16Int8Features const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceShaderFloat16Int8Features;
|
|
void * pNext = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 shaderFloat16 = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 shaderInt8 = {};
|
|
};
|
|
|
|
template <>
|
|
struct CppType<StructureType, StructureType::ePhysicalDeviceShaderFloat16Int8Features>
|
|
{
|
|
using Type = PhysicalDeviceShaderFloat16Int8Features;
|
|
};
|
|
|
|
using PhysicalDeviceFloat16Int8FeaturesKHR = PhysicalDeviceShaderFloat16Int8Features;
|
|
using PhysicalDeviceShaderFloat16Int8FeaturesKHR = PhysicalDeviceShaderFloat16Int8Features;
|
|
|
|
struct PhysicalDeviceShaderImageAtomicInt64FeaturesEXT
|
|
{
|
|
using NativeType = VkPhysicalDeviceShaderImageAtomicInt64FeaturesEXT;
|
|
|
|
static const bool allowDuplicate = false;
|
|
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceShaderImageAtomicInt64FeaturesEXT;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR PhysicalDeviceShaderImageAtomicInt64FeaturesEXT( VULKAN_HPP_NAMESPACE::Bool32 shaderImageInt64Atomics_ = {},
|
|
VULKAN_HPP_NAMESPACE::Bool32 sparseImageInt64Atomics_ = {},
|
|
void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
|
|
: pNext( pNext_ )
|
|
, shaderImageInt64Atomics( shaderImageInt64Atomics_ )
|
|
, sparseImageInt64Atomics( sparseImageInt64Atomics_ )
|
|
{
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR
|
|
PhysicalDeviceShaderImageAtomicInt64FeaturesEXT( PhysicalDeviceShaderImageAtomicInt64FeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
PhysicalDeviceShaderImageAtomicInt64FeaturesEXT( VkPhysicalDeviceShaderImageAtomicInt64FeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: PhysicalDeviceShaderImageAtomicInt64FeaturesEXT( *reinterpret_cast<PhysicalDeviceShaderImageAtomicInt64FeaturesEXT const *>( &rhs ) )
|
|
{
|
|
}
|
|
|
|
PhysicalDeviceShaderImageAtomicInt64FeaturesEXT & operator=( PhysicalDeviceShaderImageAtomicInt64FeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
PhysicalDeviceShaderImageAtomicInt64FeaturesEXT & operator=( VkPhysicalDeviceShaderImageAtomicInt64FeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderImageAtomicInt64FeaturesEXT const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
|
|
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderImageAtomicInt64FeaturesEXT & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pNext = pNext_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderImageAtomicInt64FeaturesEXT &
|
|
setShaderImageInt64Atomics( VULKAN_HPP_NAMESPACE::Bool32 shaderImageInt64Atomics_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
shaderImageInt64Atomics = shaderImageInt64Atomics_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderImageAtomicInt64FeaturesEXT &
|
|
setSparseImageInt64Atomics( VULKAN_HPP_NAMESPACE::Bool32 sparseImageInt64Atomics_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
sparseImageInt64Atomics = sparseImageInt64Atomics_;
|
|
return *this;
|
|
}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
|
|
|
|
operator VkPhysicalDeviceShaderImageAtomicInt64FeaturesEXT const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkPhysicalDeviceShaderImageAtomicInt64FeaturesEXT *>( this );
|
|
}
|
|
|
|
operator VkPhysicalDeviceShaderImageAtomicInt64FeaturesEXT &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkPhysicalDeviceShaderImageAtomicInt64FeaturesEXT *>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
# if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
# else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &>
|
|
# endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( sType, pNext, shaderImageInt64Atomics, sparseImageInt64Atomics );
|
|
}
|
|
#endif
|
|
|
|
#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
|
|
auto operator<=>( PhysicalDeviceShaderImageAtomicInt64FeaturesEXT const & ) const = default;
|
|
#else
|
|
bool operator==( PhysicalDeviceShaderImageAtomicInt64FeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
# if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
# else
|
|
return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( shaderImageInt64Atomics == rhs.shaderImageInt64Atomics ) &&
|
|
( sparseImageInt64Atomics == rhs.sparseImageInt64Atomics );
|
|
# endif
|
|
}
|
|
|
|
bool operator!=( PhysicalDeviceShaderImageAtomicInt64FeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceShaderImageAtomicInt64FeaturesEXT;
|
|
void * pNext = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 shaderImageInt64Atomics = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 sparseImageInt64Atomics = {};
|
|
};
|
|
|
|
template <>
|
|
struct CppType<StructureType, StructureType::ePhysicalDeviceShaderImageAtomicInt64FeaturesEXT>
|
|
{
|
|
using Type = PhysicalDeviceShaderImageAtomicInt64FeaturesEXT;
|
|
};
|
|
|
|
struct PhysicalDeviceShaderIntegerDotProductFeatures
|
|
{
|
|
using NativeType = VkPhysicalDeviceShaderIntegerDotProductFeatures;
|
|
|
|
static const bool allowDuplicate = false;
|
|
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceShaderIntegerDotProductFeatures;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR PhysicalDeviceShaderIntegerDotProductFeatures( VULKAN_HPP_NAMESPACE::Bool32 shaderIntegerDotProduct_ = {},
|
|
void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
|
|
: pNext( pNext_ )
|
|
, shaderIntegerDotProduct( shaderIntegerDotProduct_ )
|
|
{
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR
|
|
PhysicalDeviceShaderIntegerDotProductFeatures( PhysicalDeviceShaderIntegerDotProductFeatures const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
PhysicalDeviceShaderIntegerDotProductFeatures( VkPhysicalDeviceShaderIntegerDotProductFeatures const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: PhysicalDeviceShaderIntegerDotProductFeatures( *reinterpret_cast<PhysicalDeviceShaderIntegerDotProductFeatures const *>( &rhs ) )
|
|
{
|
|
}
|
|
|
|
PhysicalDeviceShaderIntegerDotProductFeatures & operator=( PhysicalDeviceShaderIntegerDotProductFeatures const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
PhysicalDeviceShaderIntegerDotProductFeatures & operator=( VkPhysicalDeviceShaderIntegerDotProductFeatures const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderIntegerDotProductFeatures const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
|
|
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderIntegerDotProductFeatures & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pNext = pNext_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderIntegerDotProductFeatures &
|
|
setShaderIntegerDotProduct( VULKAN_HPP_NAMESPACE::Bool32 shaderIntegerDotProduct_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
shaderIntegerDotProduct = shaderIntegerDotProduct_;
|
|
return *this;
|
|
}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
|
|
|
|
operator VkPhysicalDeviceShaderIntegerDotProductFeatures const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkPhysicalDeviceShaderIntegerDotProductFeatures *>( this );
|
|
}
|
|
|
|
operator VkPhysicalDeviceShaderIntegerDotProductFeatures &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkPhysicalDeviceShaderIntegerDotProductFeatures *>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
# if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
# else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::Bool32 const &>
|
|
# endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( sType, pNext, shaderIntegerDotProduct );
|
|
}
|
|
#endif
|
|
|
|
#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
|
|
auto operator<=>( PhysicalDeviceShaderIntegerDotProductFeatures const & ) const = default;
|
|
#else
|
|
bool operator==( PhysicalDeviceShaderIntegerDotProductFeatures const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
# if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
# else
|
|
return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( shaderIntegerDotProduct == rhs.shaderIntegerDotProduct );
|
|
# endif
|
|
}
|
|
|
|
bool operator!=( PhysicalDeviceShaderIntegerDotProductFeatures const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceShaderIntegerDotProductFeatures;
|
|
void * pNext = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 shaderIntegerDotProduct = {};
|
|
};
|
|
|
|
template <>
|
|
struct CppType<StructureType, StructureType::ePhysicalDeviceShaderIntegerDotProductFeatures>
|
|
{
|
|
using Type = PhysicalDeviceShaderIntegerDotProductFeatures;
|
|
};
|
|
|
|
using PhysicalDeviceShaderIntegerDotProductFeaturesKHR = PhysicalDeviceShaderIntegerDotProductFeatures;
|
|
|
|
struct PhysicalDeviceShaderIntegerDotProductProperties
|
|
{
|
|
using NativeType = VkPhysicalDeviceShaderIntegerDotProductProperties;
|
|
|
|
static const bool allowDuplicate = false;
|
|
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceShaderIntegerDotProductProperties;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR PhysicalDeviceShaderIntegerDotProductProperties(
|
|
VULKAN_HPP_NAMESPACE::Bool32 integerDotProduct8BitUnsignedAccelerated_ = {},
|
|
VULKAN_HPP_NAMESPACE::Bool32 integerDotProduct8BitSignedAccelerated_ = {},
|
|
VULKAN_HPP_NAMESPACE::Bool32 integerDotProduct8BitMixedSignednessAccelerated_ = {},
|
|
VULKAN_HPP_NAMESPACE::Bool32 integerDotProduct4x8BitPackedUnsignedAccelerated_ = {},
|
|
VULKAN_HPP_NAMESPACE::Bool32 integerDotProduct4x8BitPackedSignedAccelerated_ = {},
|
|
VULKAN_HPP_NAMESPACE::Bool32 integerDotProduct4x8BitPackedMixedSignednessAccelerated_ = {},
|
|
VULKAN_HPP_NAMESPACE::Bool32 integerDotProduct16BitUnsignedAccelerated_ = {},
|
|
VULKAN_HPP_NAMESPACE::Bool32 integerDotProduct16BitSignedAccelerated_ = {},
|
|
VULKAN_HPP_NAMESPACE::Bool32 integerDotProduct16BitMixedSignednessAccelerated_ = {},
|
|
VULKAN_HPP_NAMESPACE::Bool32 integerDotProduct32BitUnsignedAccelerated_ = {},
|
|
VULKAN_HPP_NAMESPACE::Bool32 integerDotProduct32BitSignedAccelerated_ = {},
|
|
VULKAN_HPP_NAMESPACE::Bool32 integerDotProduct32BitMixedSignednessAccelerated_ = {},
|
|
VULKAN_HPP_NAMESPACE::Bool32 integerDotProduct64BitUnsignedAccelerated_ = {},
|
|
VULKAN_HPP_NAMESPACE::Bool32 integerDotProduct64BitSignedAccelerated_ = {},
|
|
VULKAN_HPP_NAMESPACE::Bool32 integerDotProduct64BitMixedSignednessAccelerated_ = {},
|
|
VULKAN_HPP_NAMESPACE::Bool32 integerDotProductAccumulatingSaturating8BitUnsignedAccelerated_ = {},
|
|
VULKAN_HPP_NAMESPACE::Bool32 integerDotProductAccumulatingSaturating8BitSignedAccelerated_ = {},
|
|
VULKAN_HPP_NAMESPACE::Bool32 integerDotProductAccumulatingSaturating8BitMixedSignednessAccelerated_ = {},
|
|
VULKAN_HPP_NAMESPACE::Bool32 integerDotProductAccumulatingSaturating4x8BitPackedUnsignedAccelerated_ = {},
|
|
VULKAN_HPP_NAMESPACE::Bool32 integerDotProductAccumulatingSaturating4x8BitPackedSignedAccelerated_ = {},
|
|
VULKAN_HPP_NAMESPACE::Bool32 integerDotProductAccumulatingSaturating4x8BitPackedMixedSignednessAccelerated_ = {},
|
|
VULKAN_HPP_NAMESPACE::Bool32 integerDotProductAccumulatingSaturating16BitUnsignedAccelerated_ = {},
|
|
VULKAN_HPP_NAMESPACE::Bool32 integerDotProductAccumulatingSaturating16BitSignedAccelerated_ = {},
|
|
VULKAN_HPP_NAMESPACE::Bool32 integerDotProductAccumulatingSaturating16BitMixedSignednessAccelerated_ = {},
|
|
VULKAN_HPP_NAMESPACE::Bool32 integerDotProductAccumulatingSaturating32BitUnsignedAccelerated_ = {},
|
|
VULKAN_HPP_NAMESPACE::Bool32 integerDotProductAccumulatingSaturating32BitSignedAccelerated_ = {},
|
|
VULKAN_HPP_NAMESPACE::Bool32 integerDotProductAccumulatingSaturating32BitMixedSignednessAccelerated_ = {},
|
|
VULKAN_HPP_NAMESPACE::Bool32 integerDotProductAccumulatingSaturating64BitUnsignedAccelerated_ = {},
|
|
VULKAN_HPP_NAMESPACE::Bool32 integerDotProductAccumulatingSaturating64BitSignedAccelerated_ = {},
|
|
VULKAN_HPP_NAMESPACE::Bool32 integerDotProductAccumulatingSaturating64BitMixedSignednessAccelerated_ = {},
|
|
void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
|
|
: pNext( pNext_ )
|
|
, integerDotProduct8BitUnsignedAccelerated( integerDotProduct8BitUnsignedAccelerated_ )
|
|
, integerDotProduct8BitSignedAccelerated( integerDotProduct8BitSignedAccelerated_ )
|
|
, integerDotProduct8BitMixedSignednessAccelerated( integerDotProduct8BitMixedSignednessAccelerated_ )
|
|
, integerDotProduct4x8BitPackedUnsignedAccelerated( integerDotProduct4x8BitPackedUnsignedAccelerated_ )
|
|
, integerDotProduct4x8BitPackedSignedAccelerated( integerDotProduct4x8BitPackedSignedAccelerated_ )
|
|
, integerDotProduct4x8BitPackedMixedSignednessAccelerated( integerDotProduct4x8BitPackedMixedSignednessAccelerated_ )
|
|
, integerDotProduct16BitUnsignedAccelerated( integerDotProduct16BitUnsignedAccelerated_ )
|
|
, integerDotProduct16BitSignedAccelerated( integerDotProduct16BitSignedAccelerated_ )
|
|
, integerDotProduct16BitMixedSignednessAccelerated( integerDotProduct16BitMixedSignednessAccelerated_ )
|
|
, integerDotProduct32BitUnsignedAccelerated( integerDotProduct32BitUnsignedAccelerated_ )
|
|
, integerDotProduct32BitSignedAccelerated( integerDotProduct32BitSignedAccelerated_ )
|
|
, integerDotProduct32BitMixedSignednessAccelerated( integerDotProduct32BitMixedSignednessAccelerated_ )
|
|
, integerDotProduct64BitUnsignedAccelerated( integerDotProduct64BitUnsignedAccelerated_ )
|
|
, integerDotProduct64BitSignedAccelerated( integerDotProduct64BitSignedAccelerated_ )
|
|
, integerDotProduct64BitMixedSignednessAccelerated( integerDotProduct64BitMixedSignednessAccelerated_ )
|
|
, integerDotProductAccumulatingSaturating8BitUnsignedAccelerated( integerDotProductAccumulatingSaturating8BitUnsignedAccelerated_ )
|
|
, integerDotProductAccumulatingSaturating8BitSignedAccelerated( integerDotProductAccumulatingSaturating8BitSignedAccelerated_ )
|
|
, integerDotProductAccumulatingSaturating8BitMixedSignednessAccelerated( integerDotProductAccumulatingSaturating8BitMixedSignednessAccelerated_ )
|
|
, integerDotProductAccumulatingSaturating4x8BitPackedUnsignedAccelerated( integerDotProductAccumulatingSaturating4x8BitPackedUnsignedAccelerated_ )
|
|
, integerDotProductAccumulatingSaturating4x8BitPackedSignedAccelerated( integerDotProductAccumulatingSaturating4x8BitPackedSignedAccelerated_ )
|
|
, integerDotProductAccumulatingSaturating4x8BitPackedMixedSignednessAccelerated(
|
|
integerDotProductAccumulatingSaturating4x8BitPackedMixedSignednessAccelerated_ )
|
|
, integerDotProductAccumulatingSaturating16BitUnsignedAccelerated( integerDotProductAccumulatingSaturating16BitUnsignedAccelerated_ )
|
|
, integerDotProductAccumulatingSaturating16BitSignedAccelerated( integerDotProductAccumulatingSaturating16BitSignedAccelerated_ )
|
|
, integerDotProductAccumulatingSaturating16BitMixedSignednessAccelerated( integerDotProductAccumulatingSaturating16BitMixedSignednessAccelerated_ )
|
|
, integerDotProductAccumulatingSaturating32BitUnsignedAccelerated( integerDotProductAccumulatingSaturating32BitUnsignedAccelerated_ )
|
|
, integerDotProductAccumulatingSaturating32BitSignedAccelerated( integerDotProductAccumulatingSaturating32BitSignedAccelerated_ )
|
|
, integerDotProductAccumulatingSaturating32BitMixedSignednessAccelerated( integerDotProductAccumulatingSaturating32BitMixedSignednessAccelerated_ )
|
|
, integerDotProductAccumulatingSaturating64BitUnsignedAccelerated( integerDotProductAccumulatingSaturating64BitUnsignedAccelerated_ )
|
|
, integerDotProductAccumulatingSaturating64BitSignedAccelerated( integerDotProductAccumulatingSaturating64BitSignedAccelerated_ )
|
|
, integerDotProductAccumulatingSaturating64BitMixedSignednessAccelerated( integerDotProductAccumulatingSaturating64BitMixedSignednessAccelerated_ )
|
|
{
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR
|
|
PhysicalDeviceShaderIntegerDotProductProperties( PhysicalDeviceShaderIntegerDotProductProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
PhysicalDeviceShaderIntegerDotProductProperties( VkPhysicalDeviceShaderIntegerDotProductProperties const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: PhysicalDeviceShaderIntegerDotProductProperties( *reinterpret_cast<PhysicalDeviceShaderIntegerDotProductProperties const *>( &rhs ) )
|
|
{
|
|
}
|
|
|
|
PhysicalDeviceShaderIntegerDotProductProperties & operator=( PhysicalDeviceShaderIntegerDotProductProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
PhysicalDeviceShaderIntegerDotProductProperties & operator=( VkPhysicalDeviceShaderIntegerDotProductProperties const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderIntegerDotProductProperties const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
operator VkPhysicalDeviceShaderIntegerDotProductProperties const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkPhysicalDeviceShaderIntegerDotProductProperties *>( this );
|
|
}
|
|
|
|
operator VkPhysicalDeviceShaderIntegerDotProductProperties &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkPhysicalDeviceShaderIntegerDotProductProperties *>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
# if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
# else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &,
|
|
void * const &,
|
|
VULKAN_HPP_NAMESPACE::Bool32 const &,
|
|
VULKAN_HPP_NAMESPACE::Bool32 const &,
|
|
VULKAN_HPP_NAMESPACE::Bool32 const &,
|
|
VULKAN_HPP_NAMESPACE::Bool32 const &,
|
|
VULKAN_HPP_NAMESPACE::Bool32 const &,
|
|
VULKAN_HPP_NAMESPACE::Bool32 const &,
|
|
VULKAN_HPP_NAMESPACE::Bool32 const &,
|
|
VULKAN_HPP_NAMESPACE::Bool32 const &,
|
|
VULKAN_HPP_NAMESPACE::Bool32 const &,
|
|
VULKAN_HPP_NAMESPACE::Bool32 const &,
|
|
VULKAN_HPP_NAMESPACE::Bool32 const &,
|
|
VULKAN_HPP_NAMESPACE::Bool32 const &,
|
|
VULKAN_HPP_NAMESPACE::Bool32 const &,
|
|
VULKAN_HPP_NAMESPACE::Bool32 const &,
|
|
VULKAN_HPP_NAMESPACE::Bool32 const &,
|
|
VULKAN_HPP_NAMESPACE::Bool32 const &,
|
|
VULKAN_HPP_NAMESPACE::Bool32 const &,
|
|
VULKAN_HPP_NAMESPACE::Bool32 const &,
|
|
VULKAN_HPP_NAMESPACE::Bool32 const &,
|
|
VULKAN_HPP_NAMESPACE::Bool32 const &,
|
|
VULKAN_HPP_NAMESPACE::Bool32 const &,
|
|
VULKAN_HPP_NAMESPACE::Bool32 const &,
|
|
VULKAN_HPP_NAMESPACE::Bool32 const &,
|
|
VULKAN_HPP_NAMESPACE::Bool32 const &,
|
|
VULKAN_HPP_NAMESPACE::Bool32 const &,
|
|
VULKAN_HPP_NAMESPACE::Bool32 const &,
|
|
VULKAN_HPP_NAMESPACE::Bool32 const &,
|
|
VULKAN_HPP_NAMESPACE::Bool32 const &,
|
|
VULKAN_HPP_NAMESPACE::Bool32 const &,
|
|
VULKAN_HPP_NAMESPACE::Bool32 const &>
|
|
# endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( sType,
|
|
pNext,
|
|
integerDotProduct8BitUnsignedAccelerated,
|
|
integerDotProduct8BitSignedAccelerated,
|
|
integerDotProduct8BitMixedSignednessAccelerated,
|
|
integerDotProduct4x8BitPackedUnsignedAccelerated,
|
|
integerDotProduct4x8BitPackedSignedAccelerated,
|
|
integerDotProduct4x8BitPackedMixedSignednessAccelerated,
|
|
integerDotProduct16BitUnsignedAccelerated,
|
|
integerDotProduct16BitSignedAccelerated,
|
|
integerDotProduct16BitMixedSignednessAccelerated,
|
|
integerDotProduct32BitUnsignedAccelerated,
|
|
integerDotProduct32BitSignedAccelerated,
|
|
integerDotProduct32BitMixedSignednessAccelerated,
|
|
integerDotProduct64BitUnsignedAccelerated,
|
|
integerDotProduct64BitSignedAccelerated,
|
|
integerDotProduct64BitMixedSignednessAccelerated,
|
|
integerDotProductAccumulatingSaturating8BitUnsignedAccelerated,
|
|
integerDotProductAccumulatingSaturating8BitSignedAccelerated,
|
|
integerDotProductAccumulatingSaturating8BitMixedSignednessAccelerated,
|
|
integerDotProductAccumulatingSaturating4x8BitPackedUnsignedAccelerated,
|
|
integerDotProductAccumulatingSaturating4x8BitPackedSignedAccelerated,
|
|
integerDotProductAccumulatingSaturating4x8BitPackedMixedSignednessAccelerated,
|
|
integerDotProductAccumulatingSaturating16BitUnsignedAccelerated,
|
|
integerDotProductAccumulatingSaturating16BitSignedAccelerated,
|
|
integerDotProductAccumulatingSaturating16BitMixedSignednessAccelerated,
|
|
integerDotProductAccumulatingSaturating32BitUnsignedAccelerated,
|
|
integerDotProductAccumulatingSaturating32BitSignedAccelerated,
|
|
integerDotProductAccumulatingSaturating32BitMixedSignednessAccelerated,
|
|
integerDotProductAccumulatingSaturating64BitUnsignedAccelerated,
|
|
integerDotProductAccumulatingSaturating64BitSignedAccelerated,
|
|
integerDotProductAccumulatingSaturating64BitMixedSignednessAccelerated );
|
|
}
|
|
#endif
|
|
|
|
#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
|
|
auto operator<=>( PhysicalDeviceShaderIntegerDotProductProperties const & ) const = default;
|
|
#else
|
|
bool operator==( PhysicalDeviceShaderIntegerDotProductProperties const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
# if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
# else
|
|
return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( integerDotProduct8BitUnsignedAccelerated == rhs.integerDotProduct8BitUnsignedAccelerated ) &&
|
|
( integerDotProduct8BitSignedAccelerated == rhs.integerDotProduct8BitSignedAccelerated ) &&
|
|
( integerDotProduct8BitMixedSignednessAccelerated == rhs.integerDotProduct8BitMixedSignednessAccelerated ) &&
|
|
( integerDotProduct4x8BitPackedUnsignedAccelerated == rhs.integerDotProduct4x8BitPackedUnsignedAccelerated ) &&
|
|
( integerDotProduct4x8BitPackedSignedAccelerated == rhs.integerDotProduct4x8BitPackedSignedAccelerated ) &&
|
|
( integerDotProduct4x8BitPackedMixedSignednessAccelerated == rhs.integerDotProduct4x8BitPackedMixedSignednessAccelerated ) &&
|
|
( integerDotProduct16BitUnsignedAccelerated == rhs.integerDotProduct16BitUnsignedAccelerated ) &&
|
|
( integerDotProduct16BitSignedAccelerated == rhs.integerDotProduct16BitSignedAccelerated ) &&
|
|
( integerDotProduct16BitMixedSignednessAccelerated == rhs.integerDotProduct16BitMixedSignednessAccelerated ) &&
|
|
( integerDotProduct32BitUnsignedAccelerated == rhs.integerDotProduct32BitUnsignedAccelerated ) &&
|
|
( integerDotProduct32BitSignedAccelerated == rhs.integerDotProduct32BitSignedAccelerated ) &&
|
|
( integerDotProduct32BitMixedSignednessAccelerated == rhs.integerDotProduct32BitMixedSignednessAccelerated ) &&
|
|
( integerDotProduct64BitUnsignedAccelerated == rhs.integerDotProduct64BitUnsignedAccelerated ) &&
|
|
( integerDotProduct64BitSignedAccelerated == rhs.integerDotProduct64BitSignedAccelerated ) &&
|
|
( integerDotProduct64BitMixedSignednessAccelerated == rhs.integerDotProduct64BitMixedSignednessAccelerated ) &&
|
|
( integerDotProductAccumulatingSaturating8BitUnsignedAccelerated == rhs.integerDotProductAccumulatingSaturating8BitUnsignedAccelerated ) &&
|
|
( integerDotProductAccumulatingSaturating8BitSignedAccelerated == rhs.integerDotProductAccumulatingSaturating8BitSignedAccelerated ) &&
|
|
( integerDotProductAccumulatingSaturating8BitMixedSignednessAccelerated ==
|
|
rhs.integerDotProductAccumulatingSaturating8BitMixedSignednessAccelerated ) &&
|
|
( integerDotProductAccumulatingSaturating4x8BitPackedUnsignedAccelerated ==
|
|
rhs.integerDotProductAccumulatingSaturating4x8BitPackedUnsignedAccelerated ) &&
|
|
( integerDotProductAccumulatingSaturating4x8BitPackedSignedAccelerated ==
|
|
rhs.integerDotProductAccumulatingSaturating4x8BitPackedSignedAccelerated ) &&
|
|
( integerDotProductAccumulatingSaturating4x8BitPackedMixedSignednessAccelerated ==
|
|
rhs.integerDotProductAccumulatingSaturating4x8BitPackedMixedSignednessAccelerated ) &&
|
|
( integerDotProductAccumulatingSaturating16BitUnsignedAccelerated == rhs.integerDotProductAccumulatingSaturating16BitUnsignedAccelerated ) &&
|
|
( integerDotProductAccumulatingSaturating16BitSignedAccelerated == rhs.integerDotProductAccumulatingSaturating16BitSignedAccelerated ) &&
|
|
( integerDotProductAccumulatingSaturating16BitMixedSignednessAccelerated ==
|
|
rhs.integerDotProductAccumulatingSaturating16BitMixedSignednessAccelerated ) &&
|
|
( integerDotProductAccumulatingSaturating32BitUnsignedAccelerated == rhs.integerDotProductAccumulatingSaturating32BitUnsignedAccelerated ) &&
|
|
( integerDotProductAccumulatingSaturating32BitSignedAccelerated == rhs.integerDotProductAccumulatingSaturating32BitSignedAccelerated ) &&
|
|
( integerDotProductAccumulatingSaturating32BitMixedSignednessAccelerated ==
|
|
rhs.integerDotProductAccumulatingSaturating32BitMixedSignednessAccelerated ) &&
|
|
( integerDotProductAccumulatingSaturating64BitUnsignedAccelerated == rhs.integerDotProductAccumulatingSaturating64BitUnsignedAccelerated ) &&
|
|
( integerDotProductAccumulatingSaturating64BitSignedAccelerated == rhs.integerDotProductAccumulatingSaturating64BitSignedAccelerated ) &&
|
|
( integerDotProductAccumulatingSaturating64BitMixedSignednessAccelerated ==
|
|
rhs.integerDotProductAccumulatingSaturating64BitMixedSignednessAccelerated );
|
|
# endif
|
|
}
|
|
|
|
bool operator!=( PhysicalDeviceShaderIntegerDotProductProperties const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceShaderIntegerDotProductProperties;
|
|
void * pNext = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 integerDotProduct8BitUnsignedAccelerated = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 integerDotProduct8BitSignedAccelerated = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 integerDotProduct8BitMixedSignednessAccelerated = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 integerDotProduct4x8BitPackedUnsignedAccelerated = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 integerDotProduct4x8BitPackedSignedAccelerated = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 integerDotProduct4x8BitPackedMixedSignednessAccelerated = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 integerDotProduct16BitUnsignedAccelerated = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 integerDotProduct16BitSignedAccelerated = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 integerDotProduct16BitMixedSignednessAccelerated = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 integerDotProduct32BitUnsignedAccelerated = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 integerDotProduct32BitSignedAccelerated = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 integerDotProduct32BitMixedSignednessAccelerated = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 integerDotProduct64BitUnsignedAccelerated = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 integerDotProduct64BitSignedAccelerated = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 integerDotProduct64BitMixedSignednessAccelerated = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 integerDotProductAccumulatingSaturating8BitUnsignedAccelerated = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 integerDotProductAccumulatingSaturating8BitSignedAccelerated = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 integerDotProductAccumulatingSaturating8BitMixedSignednessAccelerated = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 integerDotProductAccumulatingSaturating4x8BitPackedUnsignedAccelerated = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 integerDotProductAccumulatingSaturating4x8BitPackedSignedAccelerated = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 integerDotProductAccumulatingSaturating4x8BitPackedMixedSignednessAccelerated = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 integerDotProductAccumulatingSaturating16BitUnsignedAccelerated = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 integerDotProductAccumulatingSaturating16BitSignedAccelerated = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 integerDotProductAccumulatingSaturating16BitMixedSignednessAccelerated = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 integerDotProductAccumulatingSaturating32BitUnsignedAccelerated = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 integerDotProductAccumulatingSaturating32BitSignedAccelerated = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 integerDotProductAccumulatingSaturating32BitMixedSignednessAccelerated = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 integerDotProductAccumulatingSaturating64BitUnsignedAccelerated = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 integerDotProductAccumulatingSaturating64BitSignedAccelerated = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 integerDotProductAccumulatingSaturating64BitMixedSignednessAccelerated = {};
|
|
};
|
|
|
|
template <>
|
|
struct CppType<StructureType, StructureType::ePhysicalDeviceShaderIntegerDotProductProperties>
|
|
{
|
|
using Type = PhysicalDeviceShaderIntegerDotProductProperties;
|
|
};
|
|
|
|
using PhysicalDeviceShaderIntegerDotProductPropertiesKHR = PhysicalDeviceShaderIntegerDotProductProperties;
|
|
|
|
struct PhysicalDeviceShaderSubgroupExtendedTypesFeatures
|
|
{
|
|
using NativeType = VkPhysicalDeviceShaderSubgroupExtendedTypesFeatures;
|
|
|
|
static const bool allowDuplicate = false;
|
|
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceShaderSubgroupExtendedTypesFeatures;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR PhysicalDeviceShaderSubgroupExtendedTypesFeatures( VULKAN_HPP_NAMESPACE::Bool32 shaderSubgroupExtendedTypes_ = {},
|
|
void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
|
|
: pNext( pNext_ )
|
|
, shaderSubgroupExtendedTypes( shaderSubgroupExtendedTypes_ )
|
|
{
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR
|
|
PhysicalDeviceShaderSubgroupExtendedTypesFeatures( PhysicalDeviceShaderSubgroupExtendedTypesFeatures const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
PhysicalDeviceShaderSubgroupExtendedTypesFeatures( VkPhysicalDeviceShaderSubgroupExtendedTypesFeatures const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: PhysicalDeviceShaderSubgroupExtendedTypesFeatures( *reinterpret_cast<PhysicalDeviceShaderSubgroupExtendedTypesFeatures const *>( &rhs ) )
|
|
{
|
|
}
|
|
|
|
PhysicalDeviceShaderSubgroupExtendedTypesFeatures &
|
|
operator=( PhysicalDeviceShaderSubgroupExtendedTypesFeatures const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
PhysicalDeviceShaderSubgroupExtendedTypesFeatures & operator=( VkPhysicalDeviceShaderSubgroupExtendedTypesFeatures const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderSubgroupExtendedTypesFeatures const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
|
|
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderSubgroupExtendedTypesFeatures & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pNext = pNext_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderSubgroupExtendedTypesFeatures &
|
|
setShaderSubgroupExtendedTypes( VULKAN_HPP_NAMESPACE::Bool32 shaderSubgroupExtendedTypes_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
shaderSubgroupExtendedTypes = shaderSubgroupExtendedTypes_;
|
|
return *this;
|
|
}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
|
|
|
|
operator VkPhysicalDeviceShaderSubgroupExtendedTypesFeatures const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkPhysicalDeviceShaderSubgroupExtendedTypesFeatures *>( this );
|
|
}
|
|
|
|
operator VkPhysicalDeviceShaderSubgroupExtendedTypesFeatures &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkPhysicalDeviceShaderSubgroupExtendedTypesFeatures *>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
# if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
# else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::Bool32 const &>
|
|
# endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( sType, pNext, shaderSubgroupExtendedTypes );
|
|
}
|
|
#endif
|
|
|
|
#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
|
|
auto operator<=>( PhysicalDeviceShaderSubgroupExtendedTypesFeatures const & ) const = default;
|
|
#else
|
|
bool operator==( PhysicalDeviceShaderSubgroupExtendedTypesFeatures const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
# if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
# else
|
|
return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( shaderSubgroupExtendedTypes == rhs.shaderSubgroupExtendedTypes );
|
|
# endif
|
|
}
|
|
|
|
bool operator!=( PhysicalDeviceShaderSubgroupExtendedTypesFeatures const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceShaderSubgroupExtendedTypesFeatures;
|
|
void * pNext = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 shaderSubgroupExtendedTypes = {};
|
|
};
|
|
|
|
template <>
|
|
struct CppType<StructureType, StructureType::ePhysicalDeviceShaderSubgroupExtendedTypesFeatures>
|
|
{
|
|
using Type = PhysicalDeviceShaderSubgroupExtendedTypesFeatures;
|
|
};
|
|
|
|
using PhysicalDeviceShaderSubgroupExtendedTypesFeaturesKHR = PhysicalDeviceShaderSubgroupExtendedTypesFeatures;
|
|
|
|
struct PhysicalDeviceShaderTerminateInvocationFeatures
|
|
{
|
|
using NativeType = VkPhysicalDeviceShaderTerminateInvocationFeatures;
|
|
|
|
static const bool allowDuplicate = false;
|
|
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceShaderTerminateInvocationFeatures;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR PhysicalDeviceShaderTerminateInvocationFeatures( VULKAN_HPP_NAMESPACE::Bool32 shaderTerminateInvocation_ = {},
|
|
void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
|
|
: pNext( pNext_ )
|
|
, shaderTerminateInvocation( shaderTerminateInvocation_ )
|
|
{
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR
|
|
PhysicalDeviceShaderTerminateInvocationFeatures( PhysicalDeviceShaderTerminateInvocationFeatures const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
PhysicalDeviceShaderTerminateInvocationFeatures( VkPhysicalDeviceShaderTerminateInvocationFeatures const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: PhysicalDeviceShaderTerminateInvocationFeatures( *reinterpret_cast<PhysicalDeviceShaderTerminateInvocationFeatures const *>( &rhs ) )
|
|
{
|
|
}
|
|
|
|
PhysicalDeviceShaderTerminateInvocationFeatures & operator=( PhysicalDeviceShaderTerminateInvocationFeatures const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
PhysicalDeviceShaderTerminateInvocationFeatures & operator=( VkPhysicalDeviceShaderTerminateInvocationFeatures const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderTerminateInvocationFeatures const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
|
|
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderTerminateInvocationFeatures & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pNext = pNext_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderTerminateInvocationFeatures &
|
|
setShaderTerminateInvocation( VULKAN_HPP_NAMESPACE::Bool32 shaderTerminateInvocation_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
shaderTerminateInvocation = shaderTerminateInvocation_;
|
|
return *this;
|
|
}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
|
|
|
|
operator VkPhysicalDeviceShaderTerminateInvocationFeatures const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkPhysicalDeviceShaderTerminateInvocationFeatures *>( this );
|
|
}
|
|
|
|
operator VkPhysicalDeviceShaderTerminateInvocationFeatures &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkPhysicalDeviceShaderTerminateInvocationFeatures *>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
# if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
# else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::Bool32 const &>
|
|
# endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( sType, pNext, shaderTerminateInvocation );
|
|
}
|
|
#endif
|
|
|
|
#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
|
|
auto operator<=>( PhysicalDeviceShaderTerminateInvocationFeatures const & ) const = default;
|
|
#else
|
|
bool operator==( PhysicalDeviceShaderTerminateInvocationFeatures const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
# if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
# else
|
|
return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( shaderTerminateInvocation == rhs.shaderTerminateInvocation );
|
|
# endif
|
|
}
|
|
|
|
bool operator!=( PhysicalDeviceShaderTerminateInvocationFeatures const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceShaderTerminateInvocationFeatures;
|
|
void * pNext = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 shaderTerminateInvocation = {};
|
|
};
|
|
|
|
template <>
|
|
struct CppType<StructureType, StructureType::ePhysicalDeviceShaderTerminateInvocationFeatures>
|
|
{
|
|
using Type = PhysicalDeviceShaderTerminateInvocationFeatures;
|
|
};
|
|
|
|
using PhysicalDeviceShaderTerminateInvocationFeaturesKHR = PhysicalDeviceShaderTerminateInvocationFeatures;
|
|
|
|
struct PhysicalDeviceSubgroupProperties
|
|
{
|
|
using NativeType = VkPhysicalDeviceSubgroupProperties;
|
|
|
|
static const bool allowDuplicate = false;
|
|
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceSubgroupProperties;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR PhysicalDeviceSubgroupProperties( uint32_t subgroupSize_ = {},
|
|
VULKAN_HPP_NAMESPACE::ShaderStageFlags supportedStages_ = {},
|
|
VULKAN_HPP_NAMESPACE::SubgroupFeatureFlags supportedOperations_ = {},
|
|
VULKAN_HPP_NAMESPACE::Bool32 quadOperationsInAllStages_ = {},
|
|
void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
|
|
: pNext( pNext_ )
|
|
, subgroupSize( subgroupSize_ )
|
|
, supportedStages( supportedStages_ )
|
|
, supportedOperations( supportedOperations_ )
|
|
, quadOperationsInAllStages( quadOperationsInAllStages_ )
|
|
{
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR PhysicalDeviceSubgroupProperties( PhysicalDeviceSubgroupProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
PhysicalDeviceSubgroupProperties( VkPhysicalDeviceSubgroupProperties const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: PhysicalDeviceSubgroupProperties( *reinterpret_cast<PhysicalDeviceSubgroupProperties const *>( &rhs ) )
|
|
{
|
|
}
|
|
|
|
PhysicalDeviceSubgroupProperties & operator=( PhysicalDeviceSubgroupProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
PhysicalDeviceSubgroupProperties & operator=( VkPhysicalDeviceSubgroupProperties const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceSubgroupProperties const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
operator VkPhysicalDeviceSubgroupProperties const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkPhysicalDeviceSubgroupProperties *>( this );
|
|
}
|
|
|
|
operator VkPhysicalDeviceSubgroupProperties &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkPhysicalDeviceSubgroupProperties *>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
# if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
# else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &,
|
|
void * const &,
|
|
uint32_t const &,
|
|
VULKAN_HPP_NAMESPACE::ShaderStageFlags const &,
|
|
VULKAN_HPP_NAMESPACE::SubgroupFeatureFlags const &,
|
|
VULKAN_HPP_NAMESPACE::Bool32 const &>
|
|
# endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( sType, pNext, subgroupSize, supportedStages, supportedOperations, quadOperationsInAllStages );
|
|
}
|
|
#endif
|
|
|
|
#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
|
|
auto operator<=>( PhysicalDeviceSubgroupProperties const & ) const = default;
|
|
#else
|
|
bool operator==( PhysicalDeviceSubgroupProperties const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
# if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
# else
|
|
return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( subgroupSize == rhs.subgroupSize ) && ( supportedStages == rhs.supportedStages ) &&
|
|
( supportedOperations == rhs.supportedOperations ) && ( quadOperationsInAllStages == rhs.quadOperationsInAllStages );
|
|
# endif
|
|
}
|
|
|
|
bool operator!=( PhysicalDeviceSubgroupProperties const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceSubgroupProperties;
|
|
void * pNext = {};
|
|
uint32_t subgroupSize = {};
|
|
VULKAN_HPP_NAMESPACE::ShaderStageFlags supportedStages = {};
|
|
VULKAN_HPP_NAMESPACE::SubgroupFeatureFlags supportedOperations = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 quadOperationsInAllStages = {};
|
|
};
|
|
|
|
template <>
|
|
struct CppType<StructureType, StructureType::ePhysicalDeviceSubgroupProperties>
|
|
{
|
|
using Type = PhysicalDeviceSubgroupProperties;
|
|
};
|
|
|
|
struct PhysicalDeviceSubgroupSizeControlFeatures
|
|
{
|
|
using NativeType = VkPhysicalDeviceSubgroupSizeControlFeatures;
|
|
|
|
static const bool allowDuplicate = false;
|
|
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceSubgroupSizeControlFeatures;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR PhysicalDeviceSubgroupSizeControlFeatures( VULKAN_HPP_NAMESPACE::Bool32 subgroupSizeControl_ = {},
|
|
VULKAN_HPP_NAMESPACE::Bool32 computeFullSubgroups_ = {},
|
|
void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
|
|
: pNext( pNext_ )
|
|
, subgroupSizeControl( subgroupSizeControl_ )
|
|
, computeFullSubgroups( computeFullSubgroups_ )
|
|
{
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR PhysicalDeviceSubgroupSizeControlFeatures( PhysicalDeviceSubgroupSizeControlFeatures const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
PhysicalDeviceSubgroupSizeControlFeatures( VkPhysicalDeviceSubgroupSizeControlFeatures const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: PhysicalDeviceSubgroupSizeControlFeatures( *reinterpret_cast<PhysicalDeviceSubgroupSizeControlFeatures const *>( &rhs ) )
|
|
{
|
|
}
|
|
|
|
PhysicalDeviceSubgroupSizeControlFeatures & operator=( PhysicalDeviceSubgroupSizeControlFeatures const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
PhysicalDeviceSubgroupSizeControlFeatures & operator=( VkPhysicalDeviceSubgroupSizeControlFeatures const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceSubgroupSizeControlFeatures const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
|
|
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceSubgroupSizeControlFeatures & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pNext = pNext_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceSubgroupSizeControlFeatures &
|
|
setSubgroupSizeControl( VULKAN_HPP_NAMESPACE::Bool32 subgroupSizeControl_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
subgroupSizeControl = subgroupSizeControl_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceSubgroupSizeControlFeatures &
|
|
setComputeFullSubgroups( VULKAN_HPP_NAMESPACE::Bool32 computeFullSubgroups_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
computeFullSubgroups = computeFullSubgroups_;
|
|
return *this;
|
|
}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
|
|
|
|
operator VkPhysicalDeviceSubgroupSizeControlFeatures const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkPhysicalDeviceSubgroupSizeControlFeatures *>( this );
|
|
}
|
|
|
|
operator VkPhysicalDeviceSubgroupSizeControlFeatures &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkPhysicalDeviceSubgroupSizeControlFeatures *>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
# if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
# else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &>
|
|
# endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( sType, pNext, subgroupSizeControl, computeFullSubgroups );
|
|
}
|
|
#endif
|
|
|
|
#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
|
|
auto operator<=>( PhysicalDeviceSubgroupSizeControlFeatures const & ) const = default;
|
|
#else
|
|
bool operator==( PhysicalDeviceSubgroupSizeControlFeatures const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
# if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
# else
|
|
return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( subgroupSizeControl == rhs.subgroupSizeControl ) &&
|
|
( computeFullSubgroups == rhs.computeFullSubgroups );
|
|
# endif
|
|
}
|
|
|
|
bool operator!=( PhysicalDeviceSubgroupSizeControlFeatures const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceSubgroupSizeControlFeatures;
|
|
void * pNext = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 subgroupSizeControl = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 computeFullSubgroups = {};
|
|
};
|
|
|
|
template <>
|
|
struct CppType<StructureType, StructureType::ePhysicalDeviceSubgroupSizeControlFeatures>
|
|
{
|
|
using Type = PhysicalDeviceSubgroupSizeControlFeatures;
|
|
};
|
|
|
|
using PhysicalDeviceSubgroupSizeControlFeaturesEXT = PhysicalDeviceSubgroupSizeControlFeatures;
|
|
|
|
struct PhysicalDeviceSubgroupSizeControlProperties
|
|
{
|
|
using NativeType = VkPhysicalDeviceSubgroupSizeControlProperties;
|
|
|
|
static const bool allowDuplicate = false;
|
|
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceSubgroupSizeControlProperties;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR PhysicalDeviceSubgroupSizeControlProperties( uint32_t minSubgroupSize_ = {},
|
|
uint32_t maxSubgroupSize_ = {},
|
|
uint32_t maxComputeWorkgroupSubgroups_ = {},
|
|
VULKAN_HPP_NAMESPACE::ShaderStageFlags requiredSubgroupSizeStages_ = {},
|
|
void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
|
|
: pNext( pNext_ )
|
|
, minSubgroupSize( minSubgroupSize_ )
|
|
, maxSubgroupSize( maxSubgroupSize_ )
|
|
, maxComputeWorkgroupSubgroups( maxComputeWorkgroupSubgroups_ )
|
|
, requiredSubgroupSizeStages( requiredSubgroupSizeStages_ )
|
|
{
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR PhysicalDeviceSubgroupSizeControlProperties( PhysicalDeviceSubgroupSizeControlProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
PhysicalDeviceSubgroupSizeControlProperties( VkPhysicalDeviceSubgroupSizeControlProperties const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: PhysicalDeviceSubgroupSizeControlProperties( *reinterpret_cast<PhysicalDeviceSubgroupSizeControlProperties const *>( &rhs ) )
|
|
{
|
|
}
|
|
|
|
PhysicalDeviceSubgroupSizeControlProperties & operator=( PhysicalDeviceSubgroupSizeControlProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
PhysicalDeviceSubgroupSizeControlProperties & operator=( VkPhysicalDeviceSubgroupSizeControlProperties const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceSubgroupSizeControlProperties const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
operator VkPhysicalDeviceSubgroupSizeControlProperties const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkPhysicalDeviceSubgroupSizeControlProperties *>( this );
|
|
}
|
|
|
|
operator VkPhysicalDeviceSubgroupSizeControlProperties &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkPhysicalDeviceSubgroupSizeControlProperties *>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
# if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
# else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &,
|
|
void * const &,
|
|
uint32_t const &,
|
|
uint32_t const &,
|
|
uint32_t const &,
|
|
VULKAN_HPP_NAMESPACE::ShaderStageFlags const &>
|
|
# endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( sType, pNext, minSubgroupSize, maxSubgroupSize, maxComputeWorkgroupSubgroups, requiredSubgroupSizeStages );
|
|
}
|
|
#endif
|
|
|
|
#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
|
|
auto operator<=>( PhysicalDeviceSubgroupSizeControlProperties const & ) const = default;
|
|
#else
|
|
bool operator==( PhysicalDeviceSubgroupSizeControlProperties const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
# if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
# else
|
|
return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( minSubgroupSize == rhs.minSubgroupSize ) && ( maxSubgroupSize == rhs.maxSubgroupSize ) &&
|
|
( maxComputeWorkgroupSubgroups == rhs.maxComputeWorkgroupSubgroups ) && ( requiredSubgroupSizeStages == rhs.requiredSubgroupSizeStages );
|
|
# endif
|
|
}
|
|
|
|
bool operator!=( PhysicalDeviceSubgroupSizeControlProperties const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceSubgroupSizeControlProperties;
|
|
void * pNext = {};
|
|
uint32_t minSubgroupSize = {};
|
|
uint32_t maxSubgroupSize = {};
|
|
uint32_t maxComputeWorkgroupSubgroups = {};
|
|
VULKAN_HPP_NAMESPACE::ShaderStageFlags requiredSubgroupSizeStages = {};
|
|
};
|
|
|
|
template <>
|
|
struct CppType<StructureType, StructureType::ePhysicalDeviceSubgroupSizeControlProperties>
|
|
{
|
|
using Type = PhysicalDeviceSubgroupSizeControlProperties;
|
|
};
|
|
|
|
using PhysicalDeviceSubgroupSizeControlPropertiesEXT = PhysicalDeviceSubgroupSizeControlProperties;
|
|
|
|
struct PhysicalDeviceSurfaceInfo2KHR
|
|
{
|
|
using NativeType = VkPhysicalDeviceSurfaceInfo2KHR;
|
|
|
|
static const bool allowDuplicate = false;
|
|
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceSurfaceInfo2KHR;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR PhysicalDeviceSurfaceInfo2KHR( VULKAN_HPP_NAMESPACE::SurfaceKHR surface_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
|
|
: pNext( pNext_ )
|
|
, surface( surface_ )
|
|
{
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR PhysicalDeviceSurfaceInfo2KHR( PhysicalDeviceSurfaceInfo2KHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
PhysicalDeviceSurfaceInfo2KHR( VkPhysicalDeviceSurfaceInfo2KHR const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: PhysicalDeviceSurfaceInfo2KHR( *reinterpret_cast<PhysicalDeviceSurfaceInfo2KHR const *>( &rhs ) )
|
|
{
|
|
}
|
|
|
|
PhysicalDeviceSurfaceInfo2KHR & operator=( PhysicalDeviceSurfaceInfo2KHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
PhysicalDeviceSurfaceInfo2KHR & operator=( VkPhysicalDeviceSurfaceInfo2KHR const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceSurfaceInfo2KHR const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
|
|
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceSurfaceInfo2KHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pNext = pNext_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceSurfaceInfo2KHR & setSurface( VULKAN_HPP_NAMESPACE::SurfaceKHR surface_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
surface = surface_;
|
|
return *this;
|
|
}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
|
|
|
|
operator VkPhysicalDeviceSurfaceInfo2KHR const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkPhysicalDeviceSurfaceInfo2KHR *>( this );
|
|
}
|
|
|
|
operator VkPhysicalDeviceSurfaceInfo2KHR &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkPhysicalDeviceSurfaceInfo2KHR *>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
# if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
# else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::SurfaceKHR const &>
|
|
# endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( sType, pNext, surface );
|
|
}
|
|
#endif
|
|
|
|
#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
|
|
auto operator<=>( PhysicalDeviceSurfaceInfo2KHR const & ) const = default;
|
|
#else
|
|
bool operator==( PhysicalDeviceSurfaceInfo2KHR const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
# if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
# else
|
|
return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( surface == rhs.surface );
|
|
# endif
|
|
}
|
|
|
|
bool operator!=( PhysicalDeviceSurfaceInfo2KHR const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceSurfaceInfo2KHR;
|
|
const void * pNext = {};
|
|
VULKAN_HPP_NAMESPACE::SurfaceKHR surface = {};
|
|
};
|
|
|
|
template <>
|
|
struct CppType<StructureType, StructureType::ePhysicalDeviceSurfaceInfo2KHR>
|
|
{
|
|
using Type = PhysicalDeviceSurfaceInfo2KHR;
|
|
};
|
|
|
|
struct PhysicalDeviceSynchronization2Features
|
|
{
|
|
using NativeType = VkPhysicalDeviceSynchronization2Features;
|
|
|
|
static const bool allowDuplicate = false;
|
|
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceSynchronization2Features;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR PhysicalDeviceSynchronization2Features( VULKAN_HPP_NAMESPACE::Bool32 synchronization2_ = {},
|
|
void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
|
|
: pNext( pNext_ )
|
|
, synchronization2( synchronization2_ )
|
|
{
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR PhysicalDeviceSynchronization2Features( PhysicalDeviceSynchronization2Features const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
PhysicalDeviceSynchronization2Features( VkPhysicalDeviceSynchronization2Features const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: PhysicalDeviceSynchronization2Features( *reinterpret_cast<PhysicalDeviceSynchronization2Features const *>( &rhs ) )
|
|
{
|
|
}
|
|
|
|
PhysicalDeviceSynchronization2Features & operator=( PhysicalDeviceSynchronization2Features const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
PhysicalDeviceSynchronization2Features & operator=( VkPhysicalDeviceSynchronization2Features const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceSynchronization2Features const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
|
|
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceSynchronization2Features & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pNext = pNext_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceSynchronization2Features & setSynchronization2( VULKAN_HPP_NAMESPACE::Bool32 synchronization2_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
synchronization2 = synchronization2_;
|
|
return *this;
|
|
}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
|
|
|
|
operator VkPhysicalDeviceSynchronization2Features const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkPhysicalDeviceSynchronization2Features *>( this );
|
|
}
|
|
|
|
operator VkPhysicalDeviceSynchronization2Features &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkPhysicalDeviceSynchronization2Features *>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
# if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
# else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::Bool32 const &>
|
|
# endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( sType, pNext, synchronization2 );
|
|
}
|
|
#endif
|
|
|
|
#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
|
|
auto operator<=>( PhysicalDeviceSynchronization2Features const & ) const = default;
|
|
#else
|
|
bool operator==( PhysicalDeviceSynchronization2Features const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
# if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
# else
|
|
return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( synchronization2 == rhs.synchronization2 );
|
|
# endif
|
|
}
|
|
|
|
bool operator!=( PhysicalDeviceSynchronization2Features const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceSynchronization2Features;
|
|
void * pNext = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 synchronization2 = {};
|
|
};
|
|
|
|
template <>
|
|
struct CppType<StructureType, StructureType::ePhysicalDeviceSynchronization2Features>
|
|
{
|
|
using Type = PhysicalDeviceSynchronization2Features;
|
|
};
|
|
|
|
using PhysicalDeviceSynchronization2FeaturesKHR = PhysicalDeviceSynchronization2Features;
|
|
|
|
struct PhysicalDeviceTexelBufferAlignmentFeaturesEXT
|
|
{
|
|
using NativeType = VkPhysicalDeviceTexelBufferAlignmentFeaturesEXT;
|
|
|
|
static const bool allowDuplicate = false;
|
|
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceTexelBufferAlignmentFeaturesEXT;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR PhysicalDeviceTexelBufferAlignmentFeaturesEXT( VULKAN_HPP_NAMESPACE::Bool32 texelBufferAlignment_ = {},
|
|
void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
|
|
: pNext( pNext_ )
|
|
, texelBufferAlignment( texelBufferAlignment_ )
|
|
{
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR
|
|
PhysicalDeviceTexelBufferAlignmentFeaturesEXT( PhysicalDeviceTexelBufferAlignmentFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
PhysicalDeviceTexelBufferAlignmentFeaturesEXT( VkPhysicalDeviceTexelBufferAlignmentFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: PhysicalDeviceTexelBufferAlignmentFeaturesEXT( *reinterpret_cast<PhysicalDeviceTexelBufferAlignmentFeaturesEXT const *>( &rhs ) )
|
|
{
|
|
}
|
|
|
|
PhysicalDeviceTexelBufferAlignmentFeaturesEXT & operator=( PhysicalDeviceTexelBufferAlignmentFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
PhysicalDeviceTexelBufferAlignmentFeaturesEXT & operator=( VkPhysicalDeviceTexelBufferAlignmentFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceTexelBufferAlignmentFeaturesEXT const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
|
|
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceTexelBufferAlignmentFeaturesEXT & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pNext = pNext_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceTexelBufferAlignmentFeaturesEXT &
|
|
setTexelBufferAlignment( VULKAN_HPP_NAMESPACE::Bool32 texelBufferAlignment_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
texelBufferAlignment = texelBufferAlignment_;
|
|
return *this;
|
|
}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
|
|
|
|
operator VkPhysicalDeviceTexelBufferAlignmentFeaturesEXT const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkPhysicalDeviceTexelBufferAlignmentFeaturesEXT *>( this );
|
|
}
|
|
|
|
operator VkPhysicalDeviceTexelBufferAlignmentFeaturesEXT &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkPhysicalDeviceTexelBufferAlignmentFeaturesEXT *>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
# if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
# else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::Bool32 const &>
|
|
# endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( sType, pNext, texelBufferAlignment );
|
|
}
|
|
#endif
|
|
|
|
#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
|
|
auto operator<=>( PhysicalDeviceTexelBufferAlignmentFeaturesEXT const & ) const = default;
|
|
#else
|
|
bool operator==( PhysicalDeviceTexelBufferAlignmentFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
# if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
# else
|
|
return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( texelBufferAlignment == rhs.texelBufferAlignment );
|
|
# endif
|
|
}
|
|
|
|
bool operator!=( PhysicalDeviceTexelBufferAlignmentFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceTexelBufferAlignmentFeaturesEXT;
|
|
void * pNext = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 texelBufferAlignment = {};
|
|
};
|
|
|
|
template <>
|
|
struct CppType<StructureType, StructureType::ePhysicalDeviceTexelBufferAlignmentFeaturesEXT>
|
|
{
|
|
using Type = PhysicalDeviceTexelBufferAlignmentFeaturesEXT;
|
|
};
|
|
|
|
struct PhysicalDeviceTexelBufferAlignmentProperties
|
|
{
|
|
using NativeType = VkPhysicalDeviceTexelBufferAlignmentProperties;
|
|
|
|
static const bool allowDuplicate = false;
|
|
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceTexelBufferAlignmentProperties;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR PhysicalDeviceTexelBufferAlignmentProperties( VULKAN_HPP_NAMESPACE::DeviceSize storageTexelBufferOffsetAlignmentBytes_ = {},
|
|
VULKAN_HPP_NAMESPACE::Bool32 storageTexelBufferOffsetSingleTexelAlignment_ = {},
|
|
VULKAN_HPP_NAMESPACE::DeviceSize uniformTexelBufferOffsetAlignmentBytes_ = {},
|
|
VULKAN_HPP_NAMESPACE::Bool32 uniformTexelBufferOffsetSingleTexelAlignment_ = {},
|
|
void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
|
|
: pNext( pNext_ )
|
|
, storageTexelBufferOffsetAlignmentBytes( storageTexelBufferOffsetAlignmentBytes_ )
|
|
, storageTexelBufferOffsetSingleTexelAlignment( storageTexelBufferOffsetSingleTexelAlignment_ )
|
|
, uniformTexelBufferOffsetAlignmentBytes( uniformTexelBufferOffsetAlignmentBytes_ )
|
|
, uniformTexelBufferOffsetSingleTexelAlignment( uniformTexelBufferOffsetSingleTexelAlignment_ )
|
|
{
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR PhysicalDeviceTexelBufferAlignmentProperties( PhysicalDeviceTexelBufferAlignmentProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
PhysicalDeviceTexelBufferAlignmentProperties( VkPhysicalDeviceTexelBufferAlignmentProperties const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: PhysicalDeviceTexelBufferAlignmentProperties( *reinterpret_cast<PhysicalDeviceTexelBufferAlignmentProperties const *>( &rhs ) )
|
|
{
|
|
}
|
|
|
|
PhysicalDeviceTexelBufferAlignmentProperties & operator=( PhysicalDeviceTexelBufferAlignmentProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
PhysicalDeviceTexelBufferAlignmentProperties & operator=( VkPhysicalDeviceTexelBufferAlignmentProperties const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceTexelBufferAlignmentProperties const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
operator VkPhysicalDeviceTexelBufferAlignmentProperties const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkPhysicalDeviceTexelBufferAlignmentProperties *>( this );
|
|
}
|
|
|
|
operator VkPhysicalDeviceTexelBufferAlignmentProperties &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkPhysicalDeviceTexelBufferAlignmentProperties *>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
# if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
# else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &,
|
|
void * const &,
|
|
VULKAN_HPP_NAMESPACE::DeviceSize const &,
|
|
VULKAN_HPP_NAMESPACE::Bool32 const &,
|
|
VULKAN_HPP_NAMESPACE::DeviceSize const &,
|
|
VULKAN_HPP_NAMESPACE::Bool32 const &>
|
|
# endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( sType,
|
|
pNext,
|
|
storageTexelBufferOffsetAlignmentBytes,
|
|
storageTexelBufferOffsetSingleTexelAlignment,
|
|
uniformTexelBufferOffsetAlignmentBytes,
|
|
uniformTexelBufferOffsetSingleTexelAlignment );
|
|
}
|
|
#endif
|
|
|
|
#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
|
|
auto operator<=>( PhysicalDeviceTexelBufferAlignmentProperties const & ) const = default;
|
|
#else
|
|
bool operator==( PhysicalDeviceTexelBufferAlignmentProperties const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
# if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
# else
|
|
return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( storageTexelBufferOffsetAlignmentBytes == rhs.storageTexelBufferOffsetAlignmentBytes ) &&
|
|
( storageTexelBufferOffsetSingleTexelAlignment == rhs.storageTexelBufferOffsetSingleTexelAlignment ) &&
|
|
( uniformTexelBufferOffsetAlignmentBytes == rhs.uniformTexelBufferOffsetAlignmentBytes ) &&
|
|
( uniformTexelBufferOffsetSingleTexelAlignment == rhs.uniformTexelBufferOffsetSingleTexelAlignment );
|
|
# endif
|
|
}
|
|
|
|
bool operator!=( PhysicalDeviceTexelBufferAlignmentProperties const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceTexelBufferAlignmentProperties;
|
|
void * pNext = {};
|
|
VULKAN_HPP_NAMESPACE::DeviceSize storageTexelBufferOffsetAlignmentBytes = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 storageTexelBufferOffsetSingleTexelAlignment = {};
|
|
VULKAN_HPP_NAMESPACE::DeviceSize uniformTexelBufferOffsetAlignmentBytes = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 uniformTexelBufferOffsetSingleTexelAlignment = {};
|
|
};
|
|
|
|
template <>
|
|
struct CppType<StructureType, StructureType::ePhysicalDeviceTexelBufferAlignmentProperties>
|
|
{
|
|
using Type = PhysicalDeviceTexelBufferAlignmentProperties;
|
|
};
|
|
|
|
using PhysicalDeviceTexelBufferAlignmentPropertiesEXT = PhysicalDeviceTexelBufferAlignmentProperties;
|
|
|
|
struct PhysicalDeviceTextureCompressionASTCHDRFeatures
|
|
{
|
|
using NativeType = VkPhysicalDeviceTextureCompressionASTCHDRFeatures;
|
|
|
|
static const bool allowDuplicate = false;
|
|
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceTextureCompressionAstcHdrFeatures;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR PhysicalDeviceTextureCompressionASTCHDRFeatures( VULKAN_HPP_NAMESPACE::Bool32 textureCompressionASTC_HDR_ = {},
|
|
void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
|
|
: pNext( pNext_ )
|
|
, textureCompressionASTC_HDR( textureCompressionASTC_HDR_ )
|
|
{
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR
|
|
PhysicalDeviceTextureCompressionASTCHDRFeatures( PhysicalDeviceTextureCompressionASTCHDRFeatures const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
PhysicalDeviceTextureCompressionASTCHDRFeatures( VkPhysicalDeviceTextureCompressionASTCHDRFeatures const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: PhysicalDeviceTextureCompressionASTCHDRFeatures( *reinterpret_cast<PhysicalDeviceTextureCompressionASTCHDRFeatures const *>( &rhs ) )
|
|
{
|
|
}
|
|
|
|
PhysicalDeviceTextureCompressionASTCHDRFeatures & operator=( PhysicalDeviceTextureCompressionASTCHDRFeatures const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
PhysicalDeviceTextureCompressionASTCHDRFeatures & operator=( VkPhysicalDeviceTextureCompressionASTCHDRFeatures const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceTextureCompressionASTCHDRFeatures const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
|
|
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceTextureCompressionASTCHDRFeatures & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pNext = pNext_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceTextureCompressionASTCHDRFeatures &
|
|
setTextureCompressionASTC_HDR( VULKAN_HPP_NAMESPACE::Bool32 textureCompressionASTC_HDR_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
textureCompressionASTC_HDR = textureCompressionASTC_HDR_;
|
|
return *this;
|
|
}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
|
|
|
|
operator VkPhysicalDeviceTextureCompressionASTCHDRFeatures const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkPhysicalDeviceTextureCompressionASTCHDRFeatures *>( this );
|
|
}
|
|
|
|
operator VkPhysicalDeviceTextureCompressionASTCHDRFeatures &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkPhysicalDeviceTextureCompressionASTCHDRFeatures *>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
# if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
# else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::Bool32 const &>
|
|
# endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( sType, pNext, textureCompressionASTC_HDR );
|
|
}
|
|
#endif
|
|
|
|
#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
|
|
auto operator<=>( PhysicalDeviceTextureCompressionASTCHDRFeatures const & ) const = default;
|
|
#else
|
|
bool operator==( PhysicalDeviceTextureCompressionASTCHDRFeatures const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
# if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
# else
|
|
return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( textureCompressionASTC_HDR == rhs.textureCompressionASTC_HDR );
|
|
# endif
|
|
}
|
|
|
|
bool operator!=( PhysicalDeviceTextureCompressionASTCHDRFeatures const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceTextureCompressionAstcHdrFeatures;
|
|
void * pNext = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 textureCompressionASTC_HDR = {};
|
|
};
|
|
|
|
template <>
|
|
struct CppType<StructureType, StructureType::ePhysicalDeviceTextureCompressionAstcHdrFeatures>
|
|
{
|
|
using Type = PhysicalDeviceTextureCompressionASTCHDRFeatures;
|
|
};
|
|
|
|
using PhysicalDeviceTextureCompressionASTCHDRFeaturesEXT = PhysicalDeviceTextureCompressionASTCHDRFeatures;
|
|
|
|
struct PhysicalDeviceTimelineSemaphoreFeatures
|
|
{
|
|
using NativeType = VkPhysicalDeviceTimelineSemaphoreFeatures;
|
|
|
|
static const bool allowDuplicate = false;
|
|
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceTimelineSemaphoreFeatures;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR PhysicalDeviceTimelineSemaphoreFeatures( VULKAN_HPP_NAMESPACE::Bool32 timelineSemaphore_ = {},
|
|
void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
|
|
: pNext( pNext_ )
|
|
, timelineSemaphore( timelineSemaphore_ )
|
|
{
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR PhysicalDeviceTimelineSemaphoreFeatures( PhysicalDeviceTimelineSemaphoreFeatures const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
PhysicalDeviceTimelineSemaphoreFeatures( VkPhysicalDeviceTimelineSemaphoreFeatures const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: PhysicalDeviceTimelineSemaphoreFeatures( *reinterpret_cast<PhysicalDeviceTimelineSemaphoreFeatures const *>( &rhs ) )
|
|
{
|
|
}
|
|
|
|
PhysicalDeviceTimelineSemaphoreFeatures & operator=( PhysicalDeviceTimelineSemaphoreFeatures const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
PhysicalDeviceTimelineSemaphoreFeatures & operator=( VkPhysicalDeviceTimelineSemaphoreFeatures const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceTimelineSemaphoreFeatures const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
|
|
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceTimelineSemaphoreFeatures & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pNext = pNext_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceTimelineSemaphoreFeatures &
|
|
setTimelineSemaphore( VULKAN_HPP_NAMESPACE::Bool32 timelineSemaphore_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
timelineSemaphore = timelineSemaphore_;
|
|
return *this;
|
|
}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
|
|
|
|
operator VkPhysicalDeviceTimelineSemaphoreFeatures const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkPhysicalDeviceTimelineSemaphoreFeatures *>( this );
|
|
}
|
|
|
|
operator VkPhysicalDeviceTimelineSemaphoreFeatures &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkPhysicalDeviceTimelineSemaphoreFeatures *>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
# if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
# else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::Bool32 const &>
|
|
# endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( sType, pNext, timelineSemaphore );
|
|
}
|
|
#endif
|
|
|
|
#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
|
|
auto operator<=>( PhysicalDeviceTimelineSemaphoreFeatures const & ) const = default;
|
|
#else
|
|
bool operator==( PhysicalDeviceTimelineSemaphoreFeatures const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
# if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
# else
|
|
return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( timelineSemaphore == rhs.timelineSemaphore );
|
|
# endif
|
|
}
|
|
|
|
bool operator!=( PhysicalDeviceTimelineSemaphoreFeatures const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceTimelineSemaphoreFeatures;
|
|
void * pNext = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 timelineSemaphore = {};
|
|
};
|
|
|
|
template <>
|
|
struct CppType<StructureType, StructureType::ePhysicalDeviceTimelineSemaphoreFeatures>
|
|
{
|
|
using Type = PhysicalDeviceTimelineSemaphoreFeatures;
|
|
};
|
|
|
|
using PhysicalDeviceTimelineSemaphoreFeaturesKHR = PhysicalDeviceTimelineSemaphoreFeatures;
|
|
|
|
struct PhysicalDeviceTimelineSemaphoreProperties
|
|
{
|
|
using NativeType = VkPhysicalDeviceTimelineSemaphoreProperties;
|
|
|
|
static const bool allowDuplicate = false;
|
|
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceTimelineSemaphoreProperties;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR PhysicalDeviceTimelineSemaphoreProperties( uint64_t maxTimelineSemaphoreValueDifference_ = {},
|
|
void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
|
|
: pNext( pNext_ )
|
|
, maxTimelineSemaphoreValueDifference( maxTimelineSemaphoreValueDifference_ )
|
|
{
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR PhysicalDeviceTimelineSemaphoreProperties( PhysicalDeviceTimelineSemaphoreProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
PhysicalDeviceTimelineSemaphoreProperties( VkPhysicalDeviceTimelineSemaphoreProperties const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: PhysicalDeviceTimelineSemaphoreProperties( *reinterpret_cast<PhysicalDeviceTimelineSemaphoreProperties const *>( &rhs ) )
|
|
{
|
|
}
|
|
|
|
PhysicalDeviceTimelineSemaphoreProperties & operator=( PhysicalDeviceTimelineSemaphoreProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
PhysicalDeviceTimelineSemaphoreProperties & operator=( VkPhysicalDeviceTimelineSemaphoreProperties const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceTimelineSemaphoreProperties const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
operator VkPhysicalDeviceTimelineSemaphoreProperties const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkPhysicalDeviceTimelineSemaphoreProperties *>( this );
|
|
}
|
|
|
|
operator VkPhysicalDeviceTimelineSemaphoreProperties &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkPhysicalDeviceTimelineSemaphoreProperties *>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
# if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
# else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, uint64_t const &>
|
|
# endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( sType, pNext, maxTimelineSemaphoreValueDifference );
|
|
}
|
|
#endif
|
|
|
|
#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
|
|
auto operator<=>( PhysicalDeviceTimelineSemaphoreProperties const & ) const = default;
|
|
#else
|
|
bool operator==( PhysicalDeviceTimelineSemaphoreProperties const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
# if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
# else
|
|
return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( maxTimelineSemaphoreValueDifference == rhs.maxTimelineSemaphoreValueDifference );
|
|
# endif
|
|
}
|
|
|
|
bool operator!=( PhysicalDeviceTimelineSemaphoreProperties const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceTimelineSemaphoreProperties;
|
|
void * pNext = {};
|
|
uint64_t maxTimelineSemaphoreValueDifference = {};
|
|
};
|
|
|
|
template <>
|
|
struct CppType<StructureType, StructureType::ePhysicalDeviceTimelineSemaphoreProperties>
|
|
{
|
|
using Type = PhysicalDeviceTimelineSemaphoreProperties;
|
|
};
|
|
|
|
using PhysicalDeviceTimelineSemaphorePropertiesKHR = PhysicalDeviceTimelineSemaphoreProperties;
|
|
|
|
struct PhysicalDeviceToolProperties
|
|
{
|
|
using NativeType = VkPhysicalDeviceToolProperties;
|
|
|
|
static const bool allowDuplicate = false;
|
|
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceToolProperties;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceToolProperties( std::array<char, VK_MAX_EXTENSION_NAME_SIZE> const & name_ = {},
|
|
std::array<char, VK_MAX_EXTENSION_NAME_SIZE> const & version_ = {},
|
|
VULKAN_HPP_NAMESPACE::ToolPurposeFlags purposes_ = {},
|
|
std::array<char, VK_MAX_DESCRIPTION_SIZE> const & description_ = {},
|
|
std::array<char, VK_MAX_EXTENSION_NAME_SIZE> const & layer_ = {},
|
|
void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
|
|
: pNext( pNext_ )
|
|
, name( name_ )
|
|
, version( version_ )
|
|
, purposes( purposes_ )
|
|
, description( description_ )
|
|
, layer( layer_ )
|
|
{
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceToolProperties( PhysicalDeviceToolProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
PhysicalDeviceToolProperties( VkPhysicalDeviceToolProperties const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: PhysicalDeviceToolProperties( *reinterpret_cast<PhysicalDeviceToolProperties const *>( &rhs ) )
|
|
{
|
|
}
|
|
|
|
# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
|
|
PhysicalDeviceToolProperties( std::string const & name_,
|
|
std::string const & version_ = {},
|
|
VULKAN_HPP_NAMESPACE::ToolPurposeFlags purposes_ = {},
|
|
std::string const & description_ = {},
|
|
std::string const & layer_ = {},
|
|
void * pNext_ = nullptr )
|
|
: pNext( pNext_ ), purposes( purposes_ )
|
|
{
|
|
VULKAN_HPP_ASSERT( name_.size() < VK_MAX_EXTENSION_NAME_SIZE );
|
|
# if defined( WIN32 )
|
|
strncpy_s( name, VK_MAX_EXTENSION_NAME_SIZE, name_.data(), name_.size() );
|
|
# else
|
|
strncpy( name, name_.data(), std::min<size_t>( VK_MAX_EXTENSION_NAME_SIZE, name_.size() ) );
|
|
# endif
|
|
|
|
VULKAN_HPP_ASSERT( version_.size() < VK_MAX_EXTENSION_NAME_SIZE );
|
|
# if defined( WIN32 )
|
|
strncpy_s( version, VK_MAX_EXTENSION_NAME_SIZE, version_.data(), version_.size() );
|
|
# else
|
|
strncpy( version, version_.data(), std::min<size_t>( VK_MAX_EXTENSION_NAME_SIZE, version_.size() ) );
|
|
# endif
|
|
|
|
VULKAN_HPP_ASSERT( description_.size() < VK_MAX_DESCRIPTION_SIZE );
|
|
# if defined( WIN32 )
|
|
strncpy_s( description, VK_MAX_DESCRIPTION_SIZE, description_.data(), description_.size() );
|
|
# else
|
|
strncpy( description, description_.data(), std::min<size_t>( VK_MAX_DESCRIPTION_SIZE, description_.size() ) );
|
|
# endif
|
|
|
|
VULKAN_HPP_ASSERT( layer_.size() < VK_MAX_EXTENSION_NAME_SIZE );
|
|
# if defined( WIN32 )
|
|
strncpy_s( layer, VK_MAX_EXTENSION_NAME_SIZE, layer_.data(), layer_.size() );
|
|
# else
|
|
strncpy( layer, layer_.data(), std::min<size_t>( VK_MAX_EXTENSION_NAME_SIZE, layer_.size() ) );
|
|
# endif
|
|
}
|
|
# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
|
|
|
|
PhysicalDeviceToolProperties & operator=( PhysicalDeviceToolProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
PhysicalDeviceToolProperties & operator=( VkPhysicalDeviceToolProperties const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceToolProperties const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
operator VkPhysicalDeviceToolProperties const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkPhysicalDeviceToolProperties *>( this );
|
|
}
|
|
|
|
operator VkPhysicalDeviceToolProperties &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkPhysicalDeviceToolProperties *>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
# if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
# else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &,
|
|
void * const &,
|
|
VULKAN_HPP_NAMESPACE::ArrayWrapper1D<char, VK_MAX_EXTENSION_NAME_SIZE> const &,
|
|
VULKAN_HPP_NAMESPACE::ArrayWrapper1D<char, VK_MAX_EXTENSION_NAME_SIZE> const &,
|
|
VULKAN_HPP_NAMESPACE::ToolPurposeFlags const &,
|
|
VULKAN_HPP_NAMESPACE::ArrayWrapper1D<char, VK_MAX_DESCRIPTION_SIZE> const &,
|
|
VULKAN_HPP_NAMESPACE::ArrayWrapper1D<char, VK_MAX_EXTENSION_NAME_SIZE> const &>
|
|
# endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( sType, pNext, name, version, purposes, description, layer );
|
|
}
|
|
#endif
|
|
|
|
#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
|
|
std::strong_ordering operator<=>( PhysicalDeviceToolProperties const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
if ( auto cmp = sType <=> rhs.sType; cmp != 0 )
|
|
return cmp;
|
|
if ( auto cmp = pNext <=> rhs.pNext; cmp != 0 )
|
|
return cmp;
|
|
if ( auto cmp = strcmp( name, rhs.name ); cmp != 0 )
|
|
return ( cmp < 0 ) ? std::strong_ordering::less : std::strong_ordering::greater;
|
|
if ( auto cmp = strcmp( version, rhs.version ); cmp != 0 )
|
|
return ( cmp < 0 ) ? std::strong_ordering::less : std::strong_ordering::greater;
|
|
if ( auto cmp = purposes <=> rhs.purposes; cmp != 0 )
|
|
return cmp;
|
|
if ( auto cmp = strcmp( description, rhs.description ); cmp != 0 )
|
|
return ( cmp < 0 ) ? std::strong_ordering::less : std::strong_ordering::greater;
|
|
if ( auto cmp = strcmp( layer, rhs.layer ); cmp != 0 )
|
|
return ( cmp < 0 ) ? std::strong_ordering::less : std::strong_ordering::greater;
|
|
|
|
return std::strong_ordering::equivalent;
|
|
}
|
|
#endif
|
|
|
|
bool operator==( PhysicalDeviceToolProperties const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( strcmp( name, rhs.name ) == 0 ) && ( strcmp( version, rhs.version ) == 0 ) &&
|
|
( purposes == rhs.purposes ) && ( strcmp( description, rhs.description ) == 0 ) && ( strcmp( layer, rhs.layer ) == 0 );
|
|
}
|
|
|
|
bool operator!=( PhysicalDeviceToolProperties const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceToolProperties;
|
|
void * pNext = {};
|
|
VULKAN_HPP_NAMESPACE::ArrayWrapper1D<char, VK_MAX_EXTENSION_NAME_SIZE> name = {};
|
|
VULKAN_HPP_NAMESPACE::ArrayWrapper1D<char, VK_MAX_EXTENSION_NAME_SIZE> version = {};
|
|
VULKAN_HPP_NAMESPACE::ToolPurposeFlags purposes = {};
|
|
VULKAN_HPP_NAMESPACE::ArrayWrapper1D<char, VK_MAX_DESCRIPTION_SIZE> description = {};
|
|
VULKAN_HPP_NAMESPACE::ArrayWrapper1D<char, VK_MAX_EXTENSION_NAME_SIZE> layer = {};
|
|
};
|
|
|
|
template <>
|
|
struct CppType<StructureType, StructureType::ePhysicalDeviceToolProperties>
|
|
{
|
|
using Type = PhysicalDeviceToolProperties;
|
|
};
|
|
|
|
using PhysicalDeviceToolPropertiesEXT = PhysicalDeviceToolProperties;
|
|
|
|
struct PhysicalDeviceUniformBufferStandardLayoutFeatures
|
|
{
|
|
using NativeType = VkPhysicalDeviceUniformBufferStandardLayoutFeatures;
|
|
|
|
static const bool allowDuplicate = false;
|
|
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceUniformBufferStandardLayoutFeatures;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR PhysicalDeviceUniformBufferStandardLayoutFeatures( VULKAN_HPP_NAMESPACE::Bool32 uniformBufferStandardLayout_ = {},
|
|
void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
|
|
: pNext( pNext_ )
|
|
, uniformBufferStandardLayout( uniformBufferStandardLayout_ )
|
|
{
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR
|
|
PhysicalDeviceUniformBufferStandardLayoutFeatures( PhysicalDeviceUniformBufferStandardLayoutFeatures const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
PhysicalDeviceUniformBufferStandardLayoutFeatures( VkPhysicalDeviceUniformBufferStandardLayoutFeatures const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: PhysicalDeviceUniformBufferStandardLayoutFeatures( *reinterpret_cast<PhysicalDeviceUniformBufferStandardLayoutFeatures const *>( &rhs ) )
|
|
{
|
|
}
|
|
|
|
PhysicalDeviceUniformBufferStandardLayoutFeatures &
|
|
operator=( PhysicalDeviceUniformBufferStandardLayoutFeatures const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
PhysicalDeviceUniformBufferStandardLayoutFeatures & operator=( VkPhysicalDeviceUniformBufferStandardLayoutFeatures const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceUniformBufferStandardLayoutFeatures const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
|
|
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceUniformBufferStandardLayoutFeatures & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pNext = pNext_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceUniformBufferStandardLayoutFeatures &
|
|
setUniformBufferStandardLayout( VULKAN_HPP_NAMESPACE::Bool32 uniformBufferStandardLayout_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
uniformBufferStandardLayout = uniformBufferStandardLayout_;
|
|
return *this;
|
|
}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
|
|
|
|
operator VkPhysicalDeviceUniformBufferStandardLayoutFeatures const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkPhysicalDeviceUniformBufferStandardLayoutFeatures *>( this );
|
|
}
|
|
|
|
operator VkPhysicalDeviceUniformBufferStandardLayoutFeatures &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkPhysicalDeviceUniformBufferStandardLayoutFeatures *>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
# if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
# else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::Bool32 const &>
|
|
# endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( sType, pNext, uniformBufferStandardLayout );
|
|
}
|
|
#endif
|
|
|
|
#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
|
|
auto operator<=>( PhysicalDeviceUniformBufferStandardLayoutFeatures const & ) const = default;
|
|
#else
|
|
bool operator==( PhysicalDeviceUniformBufferStandardLayoutFeatures const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
# if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
# else
|
|
return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( uniformBufferStandardLayout == rhs.uniformBufferStandardLayout );
|
|
# endif
|
|
}
|
|
|
|
bool operator!=( PhysicalDeviceUniformBufferStandardLayoutFeatures const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceUniformBufferStandardLayoutFeatures;
|
|
void * pNext = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 uniformBufferStandardLayout = {};
|
|
};
|
|
|
|
template <>
|
|
struct CppType<StructureType, StructureType::ePhysicalDeviceUniformBufferStandardLayoutFeatures>
|
|
{
|
|
using Type = PhysicalDeviceUniformBufferStandardLayoutFeatures;
|
|
};
|
|
|
|
using PhysicalDeviceUniformBufferStandardLayoutFeaturesKHR = PhysicalDeviceUniformBufferStandardLayoutFeatures;
|
|
|
|
struct PhysicalDeviceVariablePointersFeatures
|
|
{
|
|
using NativeType = VkPhysicalDeviceVariablePointersFeatures;
|
|
|
|
static const bool allowDuplicate = false;
|
|
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceVariablePointersFeatures;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR PhysicalDeviceVariablePointersFeatures( VULKAN_HPP_NAMESPACE::Bool32 variablePointersStorageBuffer_ = {},
|
|
VULKAN_HPP_NAMESPACE::Bool32 variablePointers_ = {},
|
|
void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
|
|
: pNext( pNext_ )
|
|
, variablePointersStorageBuffer( variablePointersStorageBuffer_ )
|
|
, variablePointers( variablePointers_ )
|
|
{
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR PhysicalDeviceVariablePointersFeatures( PhysicalDeviceVariablePointersFeatures const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
PhysicalDeviceVariablePointersFeatures( VkPhysicalDeviceVariablePointersFeatures const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: PhysicalDeviceVariablePointersFeatures( *reinterpret_cast<PhysicalDeviceVariablePointersFeatures const *>( &rhs ) )
|
|
{
|
|
}
|
|
|
|
PhysicalDeviceVariablePointersFeatures & operator=( PhysicalDeviceVariablePointersFeatures const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
PhysicalDeviceVariablePointersFeatures & operator=( VkPhysicalDeviceVariablePointersFeatures const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceVariablePointersFeatures const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
|
|
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVariablePointersFeatures & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pNext = pNext_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVariablePointersFeatures &
|
|
setVariablePointersStorageBuffer( VULKAN_HPP_NAMESPACE::Bool32 variablePointersStorageBuffer_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
variablePointersStorageBuffer = variablePointersStorageBuffer_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVariablePointersFeatures & setVariablePointers( VULKAN_HPP_NAMESPACE::Bool32 variablePointers_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
variablePointers = variablePointers_;
|
|
return *this;
|
|
}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
|
|
|
|
operator VkPhysicalDeviceVariablePointersFeatures const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkPhysicalDeviceVariablePointersFeatures *>( this );
|
|
}
|
|
|
|
operator VkPhysicalDeviceVariablePointersFeatures &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkPhysicalDeviceVariablePointersFeatures *>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
# if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
# else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &>
|
|
# endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( sType, pNext, variablePointersStorageBuffer, variablePointers );
|
|
}
|
|
#endif
|
|
|
|
#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
|
|
auto operator<=>( PhysicalDeviceVariablePointersFeatures const & ) const = default;
|
|
#else
|
|
bool operator==( PhysicalDeviceVariablePointersFeatures const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
# if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
# else
|
|
return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( variablePointersStorageBuffer == rhs.variablePointersStorageBuffer ) &&
|
|
( variablePointers == rhs.variablePointers );
|
|
# endif
|
|
}
|
|
|
|
bool operator!=( PhysicalDeviceVariablePointersFeatures const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceVariablePointersFeatures;
|
|
void * pNext = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 variablePointersStorageBuffer = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 variablePointers = {};
|
|
};
|
|
|
|
template <>
|
|
struct CppType<StructureType, StructureType::ePhysicalDeviceVariablePointersFeatures>
|
|
{
|
|
using Type = PhysicalDeviceVariablePointersFeatures;
|
|
};
|
|
|
|
using PhysicalDeviceVariablePointerFeatures = PhysicalDeviceVariablePointersFeatures;
|
|
using PhysicalDeviceVariablePointerFeaturesKHR = PhysicalDeviceVariablePointersFeatures;
|
|
using PhysicalDeviceVariablePointersFeaturesKHR = PhysicalDeviceVariablePointersFeatures;
|
|
|
|
struct PhysicalDeviceVertexAttributeDivisorPropertiesEXT
|
|
{
|
|
using NativeType = VkPhysicalDeviceVertexAttributeDivisorPropertiesEXT;
|
|
|
|
static const bool allowDuplicate = false;
|
|
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceVertexAttributeDivisorPropertiesEXT;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR PhysicalDeviceVertexAttributeDivisorPropertiesEXT( uint32_t maxVertexAttribDivisor_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
|
|
: pNext( pNext_ )
|
|
, maxVertexAttribDivisor( maxVertexAttribDivisor_ )
|
|
{
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR
|
|
PhysicalDeviceVertexAttributeDivisorPropertiesEXT( PhysicalDeviceVertexAttributeDivisorPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
PhysicalDeviceVertexAttributeDivisorPropertiesEXT( VkPhysicalDeviceVertexAttributeDivisorPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: PhysicalDeviceVertexAttributeDivisorPropertiesEXT( *reinterpret_cast<PhysicalDeviceVertexAttributeDivisorPropertiesEXT const *>( &rhs ) )
|
|
{
|
|
}
|
|
|
|
PhysicalDeviceVertexAttributeDivisorPropertiesEXT &
|
|
operator=( PhysicalDeviceVertexAttributeDivisorPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
PhysicalDeviceVertexAttributeDivisorPropertiesEXT & operator=( VkPhysicalDeviceVertexAttributeDivisorPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceVertexAttributeDivisorPropertiesEXT const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
operator VkPhysicalDeviceVertexAttributeDivisorPropertiesEXT const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkPhysicalDeviceVertexAttributeDivisorPropertiesEXT *>( this );
|
|
}
|
|
|
|
operator VkPhysicalDeviceVertexAttributeDivisorPropertiesEXT &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkPhysicalDeviceVertexAttributeDivisorPropertiesEXT *>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
# if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
# else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, uint32_t const &>
|
|
# endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( sType, pNext, maxVertexAttribDivisor );
|
|
}
|
|
#endif
|
|
|
|
#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
|
|
auto operator<=>( PhysicalDeviceVertexAttributeDivisorPropertiesEXT const & ) const = default;
|
|
#else
|
|
bool operator==( PhysicalDeviceVertexAttributeDivisorPropertiesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
# if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
# else
|
|
return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( maxVertexAttribDivisor == rhs.maxVertexAttribDivisor );
|
|
# endif
|
|
}
|
|
|
|
bool operator!=( PhysicalDeviceVertexAttributeDivisorPropertiesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceVertexAttributeDivisorPropertiesEXT;
|
|
void * pNext = {};
|
|
uint32_t maxVertexAttribDivisor = {};
|
|
};
|
|
|
|
template <>
|
|
struct CppType<StructureType, StructureType::ePhysicalDeviceVertexAttributeDivisorPropertiesEXT>
|
|
{
|
|
using Type = PhysicalDeviceVertexAttributeDivisorPropertiesEXT;
|
|
};
|
|
|
|
struct PhysicalDeviceVertexInputDynamicStateFeaturesEXT
|
|
{
|
|
using NativeType = VkPhysicalDeviceVertexInputDynamicStateFeaturesEXT;
|
|
|
|
static const bool allowDuplicate = false;
|
|
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceVertexInputDynamicStateFeaturesEXT;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR PhysicalDeviceVertexInputDynamicStateFeaturesEXT( VULKAN_HPP_NAMESPACE::Bool32 vertexInputDynamicState_ = {},
|
|
void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
|
|
: pNext( pNext_ )
|
|
, vertexInputDynamicState( vertexInputDynamicState_ )
|
|
{
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR
|
|
PhysicalDeviceVertexInputDynamicStateFeaturesEXT( PhysicalDeviceVertexInputDynamicStateFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
PhysicalDeviceVertexInputDynamicStateFeaturesEXT( VkPhysicalDeviceVertexInputDynamicStateFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: PhysicalDeviceVertexInputDynamicStateFeaturesEXT( *reinterpret_cast<PhysicalDeviceVertexInputDynamicStateFeaturesEXT const *>( &rhs ) )
|
|
{
|
|
}
|
|
|
|
PhysicalDeviceVertexInputDynamicStateFeaturesEXT & operator=( PhysicalDeviceVertexInputDynamicStateFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
PhysicalDeviceVertexInputDynamicStateFeaturesEXT & operator=( VkPhysicalDeviceVertexInputDynamicStateFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceVertexInputDynamicStateFeaturesEXT const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
|
|
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVertexInputDynamicStateFeaturesEXT & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pNext = pNext_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVertexInputDynamicStateFeaturesEXT &
|
|
setVertexInputDynamicState( VULKAN_HPP_NAMESPACE::Bool32 vertexInputDynamicState_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
vertexInputDynamicState = vertexInputDynamicState_;
|
|
return *this;
|
|
}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
|
|
|
|
operator VkPhysicalDeviceVertexInputDynamicStateFeaturesEXT const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkPhysicalDeviceVertexInputDynamicStateFeaturesEXT *>( this );
|
|
}
|
|
|
|
operator VkPhysicalDeviceVertexInputDynamicStateFeaturesEXT &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkPhysicalDeviceVertexInputDynamicStateFeaturesEXT *>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
# if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
# else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::Bool32 const &>
|
|
# endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( sType, pNext, vertexInputDynamicState );
|
|
}
|
|
#endif
|
|
|
|
#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
|
|
auto operator<=>( PhysicalDeviceVertexInputDynamicStateFeaturesEXT const & ) const = default;
|
|
#else
|
|
bool operator==( PhysicalDeviceVertexInputDynamicStateFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
# if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
# else
|
|
return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( vertexInputDynamicState == rhs.vertexInputDynamicState );
|
|
# endif
|
|
}
|
|
|
|
bool operator!=( PhysicalDeviceVertexInputDynamicStateFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceVertexInputDynamicStateFeaturesEXT;
|
|
void * pNext = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 vertexInputDynamicState = {};
|
|
};
|
|
|
|
template <>
|
|
struct CppType<StructureType, StructureType::ePhysicalDeviceVertexInputDynamicStateFeaturesEXT>
|
|
{
|
|
using Type = PhysicalDeviceVertexInputDynamicStateFeaturesEXT;
|
|
};
|
|
|
|
struct PhysicalDeviceVulkan11Features
|
|
{
|
|
using NativeType = VkPhysicalDeviceVulkan11Features;
|
|
|
|
static const bool allowDuplicate = false;
|
|
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceVulkan11Features;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR PhysicalDeviceVulkan11Features( VULKAN_HPP_NAMESPACE::Bool32 storageBuffer16BitAccess_ = {},
|
|
VULKAN_HPP_NAMESPACE::Bool32 uniformAndStorageBuffer16BitAccess_ = {},
|
|
VULKAN_HPP_NAMESPACE::Bool32 storagePushConstant16_ = {},
|
|
VULKAN_HPP_NAMESPACE::Bool32 storageInputOutput16_ = {},
|
|
VULKAN_HPP_NAMESPACE::Bool32 multiview_ = {},
|
|
VULKAN_HPP_NAMESPACE::Bool32 multiviewGeometryShader_ = {},
|
|
VULKAN_HPP_NAMESPACE::Bool32 multiviewTessellationShader_ = {},
|
|
VULKAN_HPP_NAMESPACE::Bool32 variablePointersStorageBuffer_ = {},
|
|
VULKAN_HPP_NAMESPACE::Bool32 variablePointers_ = {},
|
|
VULKAN_HPP_NAMESPACE::Bool32 protectedMemory_ = {},
|
|
VULKAN_HPP_NAMESPACE::Bool32 samplerYcbcrConversion_ = {},
|
|
VULKAN_HPP_NAMESPACE::Bool32 shaderDrawParameters_ = {},
|
|
void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
|
|
: pNext( pNext_ )
|
|
, storageBuffer16BitAccess( storageBuffer16BitAccess_ )
|
|
, uniformAndStorageBuffer16BitAccess( uniformAndStorageBuffer16BitAccess_ )
|
|
, storagePushConstant16( storagePushConstant16_ )
|
|
, storageInputOutput16( storageInputOutput16_ )
|
|
, multiview( multiview_ )
|
|
, multiviewGeometryShader( multiviewGeometryShader_ )
|
|
, multiviewTessellationShader( multiviewTessellationShader_ )
|
|
, variablePointersStorageBuffer( variablePointersStorageBuffer_ )
|
|
, variablePointers( variablePointers_ )
|
|
, protectedMemory( protectedMemory_ )
|
|
, samplerYcbcrConversion( samplerYcbcrConversion_ )
|
|
, shaderDrawParameters( shaderDrawParameters_ )
|
|
{
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR PhysicalDeviceVulkan11Features( PhysicalDeviceVulkan11Features const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
PhysicalDeviceVulkan11Features( VkPhysicalDeviceVulkan11Features const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: PhysicalDeviceVulkan11Features( *reinterpret_cast<PhysicalDeviceVulkan11Features const *>( &rhs ) )
|
|
{
|
|
}
|
|
|
|
PhysicalDeviceVulkan11Features & operator=( PhysicalDeviceVulkan11Features const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
PhysicalDeviceVulkan11Features & operator=( VkPhysicalDeviceVulkan11Features const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceVulkan11Features const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
|
|
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan11Features & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pNext = pNext_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan11Features &
|
|
setStorageBuffer16BitAccess( VULKAN_HPP_NAMESPACE::Bool32 storageBuffer16BitAccess_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
storageBuffer16BitAccess = storageBuffer16BitAccess_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan11Features &
|
|
setUniformAndStorageBuffer16BitAccess( VULKAN_HPP_NAMESPACE::Bool32 uniformAndStorageBuffer16BitAccess_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
uniformAndStorageBuffer16BitAccess = uniformAndStorageBuffer16BitAccess_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan11Features & setStoragePushConstant16( VULKAN_HPP_NAMESPACE::Bool32 storagePushConstant16_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
storagePushConstant16 = storagePushConstant16_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan11Features & setStorageInputOutput16( VULKAN_HPP_NAMESPACE::Bool32 storageInputOutput16_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
storageInputOutput16 = storageInputOutput16_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan11Features & setMultiview( VULKAN_HPP_NAMESPACE::Bool32 multiview_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
multiview = multiview_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan11Features &
|
|
setMultiviewGeometryShader( VULKAN_HPP_NAMESPACE::Bool32 multiviewGeometryShader_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
multiviewGeometryShader = multiviewGeometryShader_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan11Features &
|
|
setMultiviewTessellationShader( VULKAN_HPP_NAMESPACE::Bool32 multiviewTessellationShader_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
multiviewTessellationShader = multiviewTessellationShader_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan11Features &
|
|
setVariablePointersStorageBuffer( VULKAN_HPP_NAMESPACE::Bool32 variablePointersStorageBuffer_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
variablePointersStorageBuffer = variablePointersStorageBuffer_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan11Features & setVariablePointers( VULKAN_HPP_NAMESPACE::Bool32 variablePointers_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
variablePointers = variablePointers_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan11Features & setProtectedMemory( VULKAN_HPP_NAMESPACE::Bool32 protectedMemory_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
protectedMemory = protectedMemory_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan11Features &
|
|
setSamplerYcbcrConversion( VULKAN_HPP_NAMESPACE::Bool32 samplerYcbcrConversion_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
samplerYcbcrConversion = samplerYcbcrConversion_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan11Features & setShaderDrawParameters( VULKAN_HPP_NAMESPACE::Bool32 shaderDrawParameters_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
shaderDrawParameters = shaderDrawParameters_;
|
|
return *this;
|
|
}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
|
|
|
|
operator VkPhysicalDeviceVulkan11Features const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkPhysicalDeviceVulkan11Features *>( this );
|
|
}
|
|
|
|
operator VkPhysicalDeviceVulkan11Features &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkPhysicalDeviceVulkan11Features *>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
# if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
# else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &,
|
|
void * const &,
|
|
VULKAN_HPP_NAMESPACE::Bool32 const &,
|
|
VULKAN_HPP_NAMESPACE::Bool32 const &,
|
|
VULKAN_HPP_NAMESPACE::Bool32 const &,
|
|
VULKAN_HPP_NAMESPACE::Bool32 const &,
|
|
VULKAN_HPP_NAMESPACE::Bool32 const &,
|
|
VULKAN_HPP_NAMESPACE::Bool32 const &,
|
|
VULKAN_HPP_NAMESPACE::Bool32 const &,
|
|
VULKAN_HPP_NAMESPACE::Bool32 const &,
|
|
VULKAN_HPP_NAMESPACE::Bool32 const &,
|
|
VULKAN_HPP_NAMESPACE::Bool32 const &,
|
|
VULKAN_HPP_NAMESPACE::Bool32 const &,
|
|
VULKAN_HPP_NAMESPACE::Bool32 const &>
|
|
# endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( sType,
|
|
pNext,
|
|
storageBuffer16BitAccess,
|
|
uniformAndStorageBuffer16BitAccess,
|
|
storagePushConstant16,
|
|
storageInputOutput16,
|
|
multiview,
|
|
multiviewGeometryShader,
|
|
multiviewTessellationShader,
|
|
variablePointersStorageBuffer,
|
|
variablePointers,
|
|
protectedMemory,
|
|
samplerYcbcrConversion,
|
|
shaderDrawParameters );
|
|
}
|
|
#endif
|
|
|
|
#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
|
|
auto operator<=>( PhysicalDeviceVulkan11Features const & ) const = default;
|
|
#else
|
|
bool operator==( PhysicalDeviceVulkan11Features const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
# if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
# else
|
|
return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( storageBuffer16BitAccess == rhs.storageBuffer16BitAccess ) &&
|
|
( uniformAndStorageBuffer16BitAccess == rhs.uniformAndStorageBuffer16BitAccess ) && ( storagePushConstant16 == rhs.storagePushConstant16 ) &&
|
|
( storageInputOutput16 == rhs.storageInputOutput16 ) && ( multiview == rhs.multiview ) &&
|
|
( multiviewGeometryShader == rhs.multiviewGeometryShader ) && ( multiviewTessellationShader == rhs.multiviewTessellationShader ) &&
|
|
( variablePointersStorageBuffer == rhs.variablePointersStorageBuffer ) && ( variablePointers == rhs.variablePointers ) &&
|
|
( protectedMemory == rhs.protectedMemory ) && ( samplerYcbcrConversion == rhs.samplerYcbcrConversion ) &&
|
|
( shaderDrawParameters == rhs.shaderDrawParameters );
|
|
# endif
|
|
}
|
|
|
|
bool operator!=( PhysicalDeviceVulkan11Features const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceVulkan11Features;
|
|
void * pNext = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 storageBuffer16BitAccess = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 uniformAndStorageBuffer16BitAccess = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 storagePushConstant16 = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 storageInputOutput16 = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 multiview = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 multiviewGeometryShader = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 multiviewTessellationShader = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 variablePointersStorageBuffer = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 variablePointers = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 protectedMemory = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 samplerYcbcrConversion = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 shaderDrawParameters = {};
|
|
};
|
|
|
|
template <>
|
|
struct CppType<StructureType, StructureType::ePhysicalDeviceVulkan11Features>
|
|
{
|
|
using Type = PhysicalDeviceVulkan11Features;
|
|
};
|
|
|
|
struct PhysicalDeviceVulkan11Properties
|
|
{
|
|
using NativeType = VkPhysicalDeviceVulkan11Properties;
|
|
|
|
static const bool allowDuplicate = false;
|
|
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceVulkan11Properties;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan11Properties(
|
|
std::array<uint8_t, VK_UUID_SIZE> const & deviceUUID_ = {},
|
|
std::array<uint8_t, VK_UUID_SIZE> const & driverUUID_ = {},
|
|
std::array<uint8_t, VK_LUID_SIZE> const & deviceLUID_ = {},
|
|
uint32_t deviceNodeMask_ = {},
|
|
VULKAN_HPP_NAMESPACE::Bool32 deviceLUIDValid_ = {},
|
|
uint32_t subgroupSize_ = {},
|
|
VULKAN_HPP_NAMESPACE::ShaderStageFlags subgroupSupportedStages_ = {},
|
|
VULKAN_HPP_NAMESPACE::SubgroupFeatureFlags subgroupSupportedOperations_ = {},
|
|
VULKAN_HPP_NAMESPACE::Bool32 subgroupQuadOperationsInAllStages_ = {},
|
|
VULKAN_HPP_NAMESPACE::PointClippingBehavior pointClippingBehavior_ = VULKAN_HPP_NAMESPACE::PointClippingBehavior::eAllClipPlanes,
|
|
uint32_t maxMultiviewViewCount_ = {},
|
|
uint32_t maxMultiviewInstanceIndex_ = {},
|
|
VULKAN_HPP_NAMESPACE::Bool32 protectedNoFault_ = {},
|
|
uint32_t maxPerSetDescriptors_ = {},
|
|
VULKAN_HPP_NAMESPACE::DeviceSize maxMemoryAllocationSize_ = {},
|
|
void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
|
|
: pNext( pNext_ )
|
|
, deviceUUID( deviceUUID_ )
|
|
, driverUUID( driverUUID_ )
|
|
, deviceLUID( deviceLUID_ )
|
|
, deviceNodeMask( deviceNodeMask_ )
|
|
, deviceLUIDValid( deviceLUIDValid_ )
|
|
, subgroupSize( subgroupSize_ )
|
|
, subgroupSupportedStages( subgroupSupportedStages_ )
|
|
, subgroupSupportedOperations( subgroupSupportedOperations_ )
|
|
, subgroupQuadOperationsInAllStages( subgroupQuadOperationsInAllStages_ )
|
|
, pointClippingBehavior( pointClippingBehavior_ )
|
|
, maxMultiviewViewCount( maxMultiviewViewCount_ )
|
|
, maxMultiviewInstanceIndex( maxMultiviewInstanceIndex_ )
|
|
, protectedNoFault( protectedNoFault_ )
|
|
, maxPerSetDescriptors( maxPerSetDescriptors_ )
|
|
, maxMemoryAllocationSize( maxMemoryAllocationSize_ )
|
|
{
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan11Properties( PhysicalDeviceVulkan11Properties const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
PhysicalDeviceVulkan11Properties( VkPhysicalDeviceVulkan11Properties const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: PhysicalDeviceVulkan11Properties( *reinterpret_cast<PhysicalDeviceVulkan11Properties const *>( &rhs ) )
|
|
{
|
|
}
|
|
|
|
PhysicalDeviceVulkan11Properties & operator=( PhysicalDeviceVulkan11Properties const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
PhysicalDeviceVulkan11Properties & operator=( VkPhysicalDeviceVulkan11Properties const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceVulkan11Properties const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
operator VkPhysicalDeviceVulkan11Properties const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkPhysicalDeviceVulkan11Properties *>( this );
|
|
}
|
|
|
|
operator VkPhysicalDeviceVulkan11Properties &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkPhysicalDeviceVulkan11Properties *>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
# if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
# else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &,
|
|
void * const &,
|
|
VULKAN_HPP_NAMESPACE::ArrayWrapper1D<uint8_t, VK_UUID_SIZE> const &,
|
|
VULKAN_HPP_NAMESPACE::ArrayWrapper1D<uint8_t, VK_UUID_SIZE> const &,
|
|
VULKAN_HPP_NAMESPACE::ArrayWrapper1D<uint8_t, VK_LUID_SIZE> const &,
|
|
uint32_t const &,
|
|
VULKAN_HPP_NAMESPACE::Bool32 const &,
|
|
uint32_t const &,
|
|
VULKAN_HPP_NAMESPACE::ShaderStageFlags const &,
|
|
VULKAN_HPP_NAMESPACE::SubgroupFeatureFlags const &,
|
|
VULKAN_HPP_NAMESPACE::Bool32 const &,
|
|
VULKAN_HPP_NAMESPACE::PointClippingBehavior const &,
|
|
uint32_t const &,
|
|
uint32_t const &,
|
|
VULKAN_HPP_NAMESPACE::Bool32 const &,
|
|
uint32_t const &,
|
|
VULKAN_HPP_NAMESPACE::DeviceSize const &>
|
|
# endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( sType,
|
|
pNext,
|
|
deviceUUID,
|
|
driverUUID,
|
|
deviceLUID,
|
|
deviceNodeMask,
|
|
deviceLUIDValid,
|
|
subgroupSize,
|
|
subgroupSupportedStages,
|
|
subgroupSupportedOperations,
|
|
subgroupQuadOperationsInAllStages,
|
|
pointClippingBehavior,
|
|
maxMultiviewViewCount,
|
|
maxMultiviewInstanceIndex,
|
|
protectedNoFault,
|
|
maxPerSetDescriptors,
|
|
maxMemoryAllocationSize );
|
|
}
|
|
#endif
|
|
|
|
#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
|
|
auto operator<=>( PhysicalDeviceVulkan11Properties const & ) const = default;
|
|
#else
|
|
bool operator==( PhysicalDeviceVulkan11Properties const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
# if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
# else
|
|
return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( deviceUUID == rhs.deviceUUID ) && ( driverUUID == rhs.driverUUID ) &&
|
|
( deviceLUID == rhs.deviceLUID ) && ( deviceNodeMask == rhs.deviceNodeMask ) && ( deviceLUIDValid == rhs.deviceLUIDValid ) &&
|
|
( subgroupSize == rhs.subgroupSize ) && ( subgroupSupportedStages == rhs.subgroupSupportedStages ) &&
|
|
( subgroupSupportedOperations == rhs.subgroupSupportedOperations ) &&
|
|
( subgroupQuadOperationsInAllStages == rhs.subgroupQuadOperationsInAllStages ) && ( pointClippingBehavior == rhs.pointClippingBehavior ) &&
|
|
( maxMultiviewViewCount == rhs.maxMultiviewViewCount ) && ( maxMultiviewInstanceIndex == rhs.maxMultiviewInstanceIndex ) &&
|
|
( protectedNoFault == rhs.protectedNoFault ) && ( maxPerSetDescriptors == rhs.maxPerSetDescriptors ) &&
|
|
( maxMemoryAllocationSize == rhs.maxMemoryAllocationSize );
|
|
# endif
|
|
}
|
|
|
|
bool operator!=( PhysicalDeviceVulkan11Properties const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceVulkan11Properties;
|
|
void * pNext = {};
|
|
VULKAN_HPP_NAMESPACE::ArrayWrapper1D<uint8_t, VK_UUID_SIZE> deviceUUID = {};
|
|
VULKAN_HPP_NAMESPACE::ArrayWrapper1D<uint8_t, VK_UUID_SIZE> driverUUID = {};
|
|
VULKAN_HPP_NAMESPACE::ArrayWrapper1D<uint8_t, VK_LUID_SIZE> deviceLUID = {};
|
|
uint32_t deviceNodeMask = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 deviceLUIDValid = {};
|
|
uint32_t subgroupSize = {};
|
|
VULKAN_HPP_NAMESPACE::ShaderStageFlags subgroupSupportedStages = {};
|
|
VULKAN_HPP_NAMESPACE::SubgroupFeatureFlags subgroupSupportedOperations = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 subgroupQuadOperationsInAllStages = {};
|
|
VULKAN_HPP_NAMESPACE::PointClippingBehavior pointClippingBehavior = VULKAN_HPP_NAMESPACE::PointClippingBehavior::eAllClipPlanes;
|
|
uint32_t maxMultiviewViewCount = {};
|
|
uint32_t maxMultiviewInstanceIndex = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 protectedNoFault = {};
|
|
uint32_t maxPerSetDescriptors = {};
|
|
VULKAN_HPP_NAMESPACE::DeviceSize maxMemoryAllocationSize = {};
|
|
};
|
|
|
|
template <>
|
|
struct CppType<StructureType, StructureType::ePhysicalDeviceVulkan11Properties>
|
|
{
|
|
using Type = PhysicalDeviceVulkan11Properties;
|
|
};
|
|
|
|
struct PhysicalDeviceVulkan12Features
|
|
{
|
|
using NativeType = VkPhysicalDeviceVulkan12Features;
|
|
|
|
static const bool allowDuplicate = false;
|
|
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceVulkan12Features;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR PhysicalDeviceVulkan12Features( VULKAN_HPP_NAMESPACE::Bool32 samplerMirrorClampToEdge_ = {},
|
|
VULKAN_HPP_NAMESPACE::Bool32 drawIndirectCount_ = {},
|
|
VULKAN_HPP_NAMESPACE::Bool32 storageBuffer8BitAccess_ = {},
|
|
VULKAN_HPP_NAMESPACE::Bool32 uniformAndStorageBuffer8BitAccess_ = {},
|
|
VULKAN_HPP_NAMESPACE::Bool32 storagePushConstant8_ = {},
|
|
VULKAN_HPP_NAMESPACE::Bool32 shaderBufferInt64Atomics_ = {},
|
|
VULKAN_HPP_NAMESPACE::Bool32 shaderSharedInt64Atomics_ = {},
|
|
VULKAN_HPP_NAMESPACE::Bool32 shaderFloat16_ = {},
|
|
VULKAN_HPP_NAMESPACE::Bool32 shaderInt8_ = {},
|
|
VULKAN_HPP_NAMESPACE::Bool32 descriptorIndexing_ = {},
|
|
VULKAN_HPP_NAMESPACE::Bool32 shaderInputAttachmentArrayDynamicIndexing_ = {},
|
|
VULKAN_HPP_NAMESPACE::Bool32 shaderUniformTexelBufferArrayDynamicIndexing_ = {},
|
|
VULKAN_HPP_NAMESPACE::Bool32 shaderStorageTexelBufferArrayDynamicIndexing_ = {},
|
|
VULKAN_HPP_NAMESPACE::Bool32 shaderUniformBufferArrayNonUniformIndexing_ = {},
|
|
VULKAN_HPP_NAMESPACE::Bool32 shaderSampledImageArrayNonUniformIndexing_ = {},
|
|
VULKAN_HPP_NAMESPACE::Bool32 shaderStorageBufferArrayNonUniformIndexing_ = {},
|
|
VULKAN_HPP_NAMESPACE::Bool32 shaderStorageImageArrayNonUniformIndexing_ = {},
|
|
VULKAN_HPP_NAMESPACE::Bool32 shaderInputAttachmentArrayNonUniformIndexing_ = {},
|
|
VULKAN_HPP_NAMESPACE::Bool32 shaderUniformTexelBufferArrayNonUniformIndexing_ = {},
|
|
VULKAN_HPP_NAMESPACE::Bool32 shaderStorageTexelBufferArrayNonUniformIndexing_ = {},
|
|
VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingUniformBufferUpdateAfterBind_ = {},
|
|
VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingSampledImageUpdateAfterBind_ = {},
|
|
VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingStorageImageUpdateAfterBind_ = {},
|
|
VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingStorageBufferUpdateAfterBind_ = {},
|
|
VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingUniformTexelBufferUpdateAfterBind_ = {},
|
|
VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingStorageTexelBufferUpdateAfterBind_ = {},
|
|
VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingUpdateUnusedWhilePending_ = {},
|
|
VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingPartiallyBound_ = {},
|
|
VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingVariableDescriptorCount_ = {},
|
|
VULKAN_HPP_NAMESPACE::Bool32 runtimeDescriptorArray_ = {},
|
|
VULKAN_HPP_NAMESPACE::Bool32 samplerFilterMinmax_ = {},
|
|
VULKAN_HPP_NAMESPACE::Bool32 scalarBlockLayout_ = {},
|
|
VULKAN_HPP_NAMESPACE::Bool32 imagelessFramebuffer_ = {},
|
|
VULKAN_HPP_NAMESPACE::Bool32 uniformBufferStandardLayout_ = {},
|
|
VULKAN_HPP_NAMESPACE::Bool32 shaderSubgroupExtendedTypes_ = {},
|
|
VULKAN_HPP_NAMESPACE::Bool32 separateDepthStencilLayouts_ = {},
|
|
VULKAN_HPP_NAMESPACE::Bool32 hostQueryReset_ = {},
|
|
VULKAN_HPP_NAMESPACE::Bool32 timelineSemaphore_ = {},
|
|
VULKAN_HPP_NAMESPACE::Bool32 bufferDeviceAddress_ = {},
|
|
VULKAN_HPP_NAMESPACE::Bool32 bufferDeviceAddressCaptureReplay_ = {},
|
|
VULKAN_HPP_NAMESPACE::Bool32 bufferDeviceAddressMultiDevice_ = {},
|
|
VULKAN_HPP_NAMESPACE::Bool32 vulkanMemoryModel_ = {},
|
|
VULKAN_HPP_NAMESPACE::Bool32 vulkanMemoryModelDeviceScope_ = {},
|
|
VULKAN_HPP_NAMESPACE::Bool32 vulkanMemoryModelAvailabilityVisibilityChains_ = {},
|
|
VULKAN_HPP_NAMESPACE::Bool32 shaderOutputViewportIndex_ = {},
|
|
VULKAN_HPP_NAMESPACE::Bool32 shaderOutputLayer_ = {},
|
|
VULKAN_HPP_NAMESPACE::Bool32 subgroupBroadcastDynamicId_ = {},
|
|
void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
|
|
: pNext( pNext_ )
|
|
, samplerMirrorClampToEdge( samplerMirrorClampToEdge_ )
|
|
, drawIndirectCount( drawIndirectCount_ )
|
|
, storageBuffer8BitAccess( storageBuffer8BitAccess_ )
|
|
, uniformAndStorageBuffer8BitAccess( uniformAndStorageBuffer8BitAccess_ )
|
|
, storagePushConstant8( storagePushConstant8_ )
|
|
, shaderBufferInt64Atomics( shaderBufferInt64Atomics_ )
|
|
, shaderSharedInt64Atomics( shaderSharedInt64Atomics_ )
|
|
, shaderFloat16( shaderFloat16_ )
|
|
, shaderInt8( shaderInt8_ )
|
|
, descriptorIndexing( descriptorIndexing_ )
|
|
, shaderInputAttachmentArrayDynamicIndexing( shaderInputAttachmentArrayDynamicIndexing_ )
|
|
, shaderUniformTexelBufferArrayDynamicIndexing( shaderUniformTexelBufferArrayDynamicIndexing_ )
|
|
, shaderStorageTexelBufferArrayDynamicIndexing( shaderStorageTexelBufferArrayDynamicIndexing_ )
|
|
, shaderUniformBufferArrayNonUniformIndexing( shaderUniformBufferArrayNonUniformIndexing_ )
|
|
, shaderSampledImageArrayNonUniformIndexing( shaderSampledImageArrayNonUniformIndexing_ )
|
|
, shaderStorageBufferArrayNonUniformIndexing( shaderStorageBufferArrayNonUniformIndexing_ )
|
|
, shaderStorageImageArrayNonUniformIndexing( shaderStorageImageArrayNonUniformIndexing_ )
|
|
, shaderInputAttachmentArrayNonUniformIndexing( shaderInputAttachmentArrayNonUniformIndexing_ )
|
|
, shaderUniformTexelBufferArrayNonUniformIndexing( shaderUniformTexelBufferArrayNonUniformIndexing_ )
|
|
, shaderStorageTexelBufferArrayNonUniformIndexing( shaderStorageTexelBufferArrayNonUniformIndexing_ )
|
|
, descriptorBindingUniformBufferUpdateAfterBind( descriptorBindingUniformBufferUpdateAfterBind_ )
|
|
, descriptorBindingSampledImageUpdateAfterBind( descriptorBindingSampledImageUpdateAfterBind_ )
|
|
, descriptorBindingStorageImageUpdateAfterBind( descriptorBindingStorageImageUpdateAfterBind_ )
|
|
, descriptorBindingStorageBufferUpdateAfterBind( descriptorBindingStorageBufferUpdateAfterBind_ )
|
|
, descriptorBindingUniformTexelBufferUpdateAfterBind( descriptorBindingUniformTexelBufferUpdateAfterBind_ )
|
|
, descriptorBindingStorageTexelBufferUpdateAfterBind( descriptorBindingStorageTexelBufferUpdateAfterBind_ )
|
|
, descriptorBindingUpdateUnusedWhilePending( descriptorBindingUpdateUnusedWhilePending_ )
|
|
, descriptorBindingPartiallyBound( descriptorBindingPartiallyBound_ )
|
|
, descriptorBindingVariableDescriptorCount( descriptorBindingVariableDescriptorCount_ )
|
|
, runtimeDescriptorArray( runtimeDescriptorArray_ )
|
|
, samplerFilterMinmax( samplerFilterMinmax_ )
|
|
, scalarBlockLayout( scalarBlockLayout_ )
|
|
, imagelessFramebuffer( imagelessFramebuffer_ )
|
|
, uniformBufferStandardLayout( uniformBufferStandardLayout_ )
|
|
, shaderSubgroupExtendedTypes( shaderSubgroupExtendedTypes_ )
|
|
, separateDepthStencilLayouts( separateDepthStencilLayouts_ )
|
|
, hostQueryReset( hostQueryReset_ )
|
|
, timelineSemaphore( timelineSemaphore_ )
|
|
, bufferDeviceAddress( bufferDeviceAddress_ )
|
|
, bufferDeviceAddressCaptureReplay( bufferDeviceAddressCaptureReplay_ )
|
|
, bufferDeviceAddressMultiDevice( bufferDeviceAddressMultiDevice_ )
|
|
, vulkanMemoryModel( vulkanMemoryModel_ )
|
|
, vulkanMemoryModelDeviceScope( vulkanMemoryModelDeviceScope_ )
|
|
, vulkanMemoryModelAvailabilityVisibilityChains( vulkanMemoryModelAvailabilityVisibilityChains_ )
|
|
, shaderOutputViewportIndex( shaderOutputViewportIndex_ )
|
|
, shaderOutputLayer( shaderOutputLayer_ )
|
|
, subgroupBroadcastDynamicId( subgroupBroadcastDynamicId_ )
|
|
{
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR PhysicalDeviceVulkan12Features( PhysicalDeviceVulkan12Features const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
PhysicalDeviceVulkan12Features( VkPhysicalDeviceVulkan12Features const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: PhysicalDeviceVulkan12Features( *reinterpret_cast<PhysicalDeviceVulkan12Features const *>( &rhs ) )
|
|
{
|
|
}
|
|
|
|
PhysicalDeviceVulkan12Features & operator=( PhysicalDeviceVulkan12Features const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
PhysicalDeviceVulkan12Features & operator=( VkPhysicalDeviceVulkan12Features const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceVulkan12Features const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
|
|
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan12Features & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pNext = pNext_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan12Features &
|
|
setSamplerMirrorClampToEdge( VULKAN_HPP_NAMESPACE::Bool32 samplerMirrorClampToEdge_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
samplerMirrorClampToEdge = samplerMirrorClampToEdge_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan12Features & setDrawIndirectCount( VULKAN_HPP_NAMESPACE::Bool32 drawIndirectCount_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
drawIndirectCount = drawIndirectCount_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan12Features &
|
|
setStorageBuffer8BitAccess( VULKAN_HPP_NAMESPACE::Bool32 storageBuffer8BitAccess_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
storageBuffer8BitAccess = storageBuffer8BitAccess_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan12Features &
|
|
setUniformAndStorageBuffer8BitAccess( VULKAN_HPP_NAMESPACE::Bool32 uniformAndStorageBuffer8BitAccess_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
uniformAndStorageBuffer8BitAccess = uniformAndStorageBuffer8BitAccess_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan12Features & setStoragePushConstant8( VULKAN_HPP_NAMESPACE::Bool32 storagePushConstant8_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
storagePushConstant8 = storagePushConstant8_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan12Features &
|
|
setShaderBufferInt64Atomics( VULKAN_HPP_NAMESPACE::Bool32 shaderBufferInt64Atomics_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
shaderBufferInt64Atomics = shaderBufferInt64Atomics_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan12Features &
|
|
setShaderSharedInt64Atomics( VULKAN_HPP_NAMESPACE::Bool32 shaderSharedInt64Atomics_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
shaderSharedInt64Atomics = shaderSharedInt64Atomics_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan12Features & setShaderFloat16( VULKAN_HPP_NAMESPACE::Bool32 shaderFloat16_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
shaderFloat16 = shaderFloat16_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan12Features & setShaderInt8( VULKAN_HPP_NAMESPACE::Bool32 shaderInt8_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
shaderInt8 = shaderInt8_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan12Features & setDescriptorIndexing( VULKAN_HPP_NAMESPACE::Bool32 descriptorIndexing_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
descriptorIndexing = descriptorIndexing_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan12Features &
|
|
setShaderInputAttachmentArrayDynamicIndexing( VULKAN_HPP_NAMESPACE::Bool32 shaderInputAttachmentArrayDynamicIndexing_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
shaderInputAttachmentArrayDynamicIndexing = shaderInputAttachmentArrayDynamicIndexing_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan12Features &
|
|
setShaderUniformTexelBufferArrayDynamicIndexing( VULKAN_HPP_NAMESPACE::Bool32 shaderUniformTexelBufferArrayDynamicIndexing_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
shaderUniformTexelBufferArrayDynamicIndexing = shaderUniformTexelBufferArrayDynamicIndexing_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan12Features &
|
|
setShaderStorageTexelBufferArrayDynamicIndexing( VULKAN_HPP_NAMESPACE::Bool32 shaderStorageTexelBufferArrayDynamicIndexing_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
shaderStorageTexelBufferArrayDynamicIndexing = shaderStorageTexelBufferArrayDynamicIndexing_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan12Features &
|
|
setShaderUniformBufferArrayNonUniformIndexing( VULKAN_HPP_NAMESPACE::Bool32 shaderUniformBufferArrayNonUniformIndexing_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
shaderUniformBufferArrayNonUniformIndexing = shaderUniformBufferArrayNonUniformIndexing_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan12Features &
|
|
setShaderSampledImageArrayNonUniformIndexing( VULKAN_HPP_NAMESPACE::Bool32 shaderSampledImageArrayNonUniformIndexing_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
shaderSampledImageArrayNonUniformIndexing = shaderSampledImageArrayNonUniformIndexing_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan12Features &
|
|
setShaderStorageBufferArrayNonUniformIndexing( VULKAN_HPP_NAMESPACE::Bool32 shaderStorageBufferArrayNonUniformIndexing_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
shaderStorageBufferArrayNonUniformIndexing = shaderStorageBufferArrayNonUniformIndexing_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan12Features &
|
|
setShaderStorageImageArrayNonUniformIndexing( VULKAN_HPP_NAMESPACE::Bool32 shaderStorageImageArrayNonUniformIndexing_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
shaderStorageImageArrayNonUniformIndexing = shaderStorageImageArrayNonUniformIndexing_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan12Features &
|
|
setShaderInputAttachmentArrayNonUniformIndexing( VULKAN_HPP_NAMESPACE::Bool32 shaderInputAttachmentArrayNonUniformIndexing_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
shaderInputAttachmentArrayNonUniformIndexing = shaderInputAttachmentArrayNonUniformIndexing_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan12Features &
|
|
setShaderUniformTexelBufferArrayNonUniformIndexing( VULKAN_HPP_NAMESPACE::Bool32 shaderUniformTexelBufferArrayNonUniformIndexing_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
shaderUniformTexelBufferArrayNonUniformIndexing = shaderUniformTexelBufferArrayNonUniformIndexing_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan12Features &
|
|
setShaderStorageTexelBufferArrayNonUniformIndexing( VULKAN_HPP_NAMESPACE::Bool32 shaderStorageTexelBufferArrayNonUniformIndexing_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
shaderStorageTexelBufferArrayNonUniformIndexing = shaderStorageTexelBufferArrayNonUniformIndexing_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan12Features &
|
|
setDescriptorBindingUniformBufferUpdateAfterBind( VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingUniformBufferUpdateAfterBind_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
descriptorBindingUniformBufferUpdateAfterBind = descriptorBindingUniformBufferUpdateAfterBind_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan12Features &
|
|
setDescriptorBindingSampledImageUpdateAfterBind( VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingSampledImageUpdateAfterBind_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
descriptorBindingSampledImageUpdateAfterBind = descriptorBindingSampledImageUpdateAfterBind_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan12Features &
|
|
setDescriptorBindingStorageImageUpdateAfterBind( VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingStorageImageUpdateAfterBind_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
descriptorBindingStorageImageUpdateAfterBind = descriptorBindingStorageImageUpdateAfterBind_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan12Features &
|
|
setDescriptorBindingStorageBufferUpdateAfterBind( VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingStorageBufferUpdateAfterBind_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
descriptorBindingStorageBufferUpdateAfterBind = descriptorBindingStorageBufferUpdateAfterBind_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan12Features & setDescriptorBindingUniformTexelBufferUpdateAfterBind(
|
|
VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingUniformTexelBufferUpdateAfterBind_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
descriptorBindingUniformTexelBufferUpdateAfterBind = descriptorBindingUniformTexelBufferUpdateAfterBind_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan12Features & setDescriptorBindingStorageTexelBufferUpdateAfterBind(
|
|
VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingStorageTexelBufferUpdateAfterBind_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
descriptorBindingStorageTexelBufferUpdateAfterBind = descriptorBindingStorageTexelBufferUpdateAfterBind_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan12Features &
|
|
setDescriptorBindingUpdateUnusedWhilePending( VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingUpdateUnusedWhilePending_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
descriptorBindingUpdateUnusedWhilePending = descriptorBindingUpdateUnusedWhilePending_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan12Features &
|
|
setDescriptorBindingPartiallyBound( VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingPartiallyBound_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
descriptorBindingPartiallyBound = descriptorBindingPartiallyBound_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan12Features &
|
|
setDescriptorBindingVariableDescriptorCount( VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingVariableDescriptorCount_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
descriptorBindingVariableDescriptorCount = descriptorBindingVariableDescriptorCount_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan12Features &
|
|
setRuntimeDescriptorArray( VULKAN_HPP_NAMESPACE::Bool32 runtimeDescriptorArray_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
runtimeDescriptorArray = runtimeDescriptorArray_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan12Features & setSamplerFilterMinmax( VULKAN_HPP_NAMESPACE::Bool32 samplerFilterMinmax_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
samplerFilterMinmax = samplerFilterMinmax_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan12Features & setScalarBlockLayout( VULKAN_HPP_NAMESPACE::Bool32 scalarBlockLayout_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
scalarBlockLayout = scalarBlockLayout_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan12Features & setImagelessFramebuffer( VULKAN_HPP_NAMESPACE::Bool32 imagelessFramebuffer_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
imagelessFramebuffer = imagelessFramebuffer_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan12Features &
|
|
setUniformBufferStandardLayout( VULKAN_HPP_NAMESPACE::Bool32 uniformBufferStandardLayout_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
uniformBufferStandardLayout = uniformBufferStandardLayout_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan12Features &
|
|
setShaderSubgroupExtendedTypes( VULKAN_HPP_NAMESPACE::Bool32 shaderSubgroupExtendedTypes_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
shaderSubgroupExtendedTypes = shaderSubgroupExtendedTypes_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan12Features &
|
|
setSeparateDepthStencilLayouts( VULKAN_HPP_NAMESPACE::Bool32 separateDepthStencilLayouts_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
separateDepthStencilLayouts = separateDepthStencilLayouts_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan12Features & setHostQueryReset( VULKAN_HPP_NAMESPACE::Bool32 hostQueryReset_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
hostQueryReset = hostQueryReset_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan12Features & setTimelineSemaphore( VULKAN_HPP_NAMESPACE::Bool32 timelineSemaphore_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
timelineSemaphore = timelineSemaphore_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan12Features & setBufferDeviceAddress( VULKAN_HPP_NAMESPACE::Bool32 bufferDeviceAddress_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
bufferDeviceAddress = bufferDeviceAddress_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan12Features &
|
|
setBufferDeviceAddressCaptureReplay( VULKAN_HPP_NAMESPACE::Bool32 bufferDeviceAddressCaptureReplay_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
bufferDeviceAddressCaptureReplay = bufferDeviceAddressCaptureReplay_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan12Features &
|
|
setBufferDeviceAddressMultiDevice( VULKAN_HPP_NAMESPACE::Bool32 bufferDeviceAddressMultiDevice_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
bufferDeviceAddressMultiDevice = bufferDeviceAddressMultiDevice_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan12Features & setVulkanMemoryModel( VULKAN_HPP_NAMESPACE::Bool32 vulkanMemoryModel_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
vulkanMemoryModel = vulkanMemoryModel_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan12Features &
|
|
setVulkanMemoryModelDeviceScope( VULKAN_HPP_NAMESPACE::Bool32 vulkanMemoryModelDeviceScope_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
vulkanMemoryModelDeviceScope = vulkanMemoryModelDeviceScope_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan12Features &
|
|
setVulkanMemoryModelAvailabilityVisibilityChains( VULKAN_HPP_NAMESPACE::Bool32 vulkanMemoryModelAvailabilityVisibilityChains_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
vulkanMemoryModelAvailabilityVisibilityChains = vulkanMemoryModelAvailabilityVisibilityChains_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan12Features &
|
|
setShaderOutputViewportIndex( VULKAN_HPP_NAMESPACE::Bool32 shaderOutputViewportIndex_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
shaderOutputViewportIndex = shaderOutputViewportIndex_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan12Features & setShaderOutputLayer( VULKAN_HPP_NAMESPACE::Bool32 shaderOutputLayer_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
shaderOutputLayer = shaderOutputLayer_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan12Features &
|
|
setSubgroupBroadcastDynamicId( VULKAN_HPP_NAMESPACE::Bool32 subgroupBroadcastDynamicId_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
subgroupBroadcastDynamicId = subgroupBroadcastDynamicId_;
|
|
return *this;
|
|
}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
|
|
|
|
operator VkPhysicalDeviceVulkan12Features const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkPhysicalDeviceVulkan12Features *>( this );
|
|
}
|
|
|
|
operator VkPhysicalDeviceVulkan12Features &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkPhysicalDeviceVulkan12Features *>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
# if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
# else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &,
|
|
void * const &,
|
|
VULKAN_HPP_NAMESPACE::Bool32 const &,
|
|
VULKAN_HPP_NAMESPACE::Bool32 const &,
|
|
VULKAN_HPP_NAMESPACE::Bool32 const &,
|
|
VULKAN_HPP_NAMESPACE::Bool32 const &,
|
|
VULKAN_HPP_NAMESPACE::Bool32 const &,
|
|
VULKAN_HPP_NAMESPACE::Bool32 const &,
|
|
VULKAN_HPP_NAMESPACE::Bool32 const &,
|
|
VULKAN_HPP_NAMESPACE::Bool32 const &,
|
|
VULKAN_HPP_NAMESPACE::Bool32 const &,
|
|
VULKAN_HPP_NAMESPACE::Bool32 const &,
|
|
VULKAN_HPP_NAMESPACE::Bool32 const &,
|
|
VULKAN_HPP_NAMESPACE::Bool32 const &,
|
|
VULKAN_HPP_NAMESPACE::Bool32 const &,
|
|
VULKAN_HPP_NAMESPACE::Bool32 const &,
|
|
VULKAN_HPP_NAMESPACE::Bool32 const &,
|
|
VULKAN_HPP_NAMESPACE::Bool32 const &,
|
|
VULKAN_HPP_NAMESPACE::Bool32 const &,
|
|
VULKAN_HPP_NAMESPACE::Bool32 const &,
|
|
VULKAN_HPP_NAMESPACE::Bool32 const &,
|
|
VULKAN_HPP_NAMESPACE::Bool32 const &,
|
|
VULKAN_HPP_NAMESPACE::Bool32 const &,
|
|
VULKAN_HPP_NAMESPACE::Bool32 const &,
|
|
VULKAN_HPP_NAMESPACE::Bool32 const &,
|
|
VULKAN_HPP_NAMESPACE::Bool32 const &,
|
|
VULKAN_HPP_NAMESPACE::Bool32 const &,
|
|
VULKAN_HPP_NAMESPACE::Bool32 const &,
|
|
VULKAN_HPP_NAMESPACE::Bool32 const &,
|
|
VULKAN_HPP_NAMESPACE::Bool32 const &,
|
|
VULKAN_HPP_NAMESPACE::Bool32 const &,
|
|
VULKAN_HPP_NAMESPACE::Bool32 const &,
|
|
VULKAN_HPP_NAMESPACE::Bool32 const &,
|
|
VULKAN_HPP_NAMESPACE::Bool32 const &,
|
|
VULKAN_HPP_NAMESPACE::Bool32 const &,
|
|
VULKAN_HPP_NAMESPACE::Bool32 const &,
|
|
VULKAN_HPP_NAMESPACE::Bool32 const &,
|
|
VULKAN_HPP_NAMESPACE::Bool32 const &,
|
|
VULKAN_HPP_NAMESPACE::Bool32 const &,
|
|
VULKAN_HPP_NAMESPACE::Bool32 const &,
|
|
VULKAN_HPP_NAMESPACE::Bool32 const &,
|
|
VULKAN_HPP_NAMESPACE::Bool32 const &,
|
|
VULKAN_HPP_NAMESPACE::Bool32 const &,
|
|
VULKAN_HPP_NAMESPACE::Bool32 const &,
|
|
VULKAN_HPP_NAMESPACE::Bool32 const &,
|
|
VULKAN_HPP_NAMESPACE::Bool32 const &,
|
|
VULKAN_HPP_NAMESPACE::Bool32 const &,
|
|
VULKAN_HPP_NAMESPACE::Bool32 const &,
|
|
VULKAN_HPP_NAMESPACE::Bool32 const &>
|
|
# endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( sType,
|
|
pNext,
|
|
samplerMirrorClampToEdge,
|
|
drawIndirectCount,
|
|
storageBuffer8BitAccess,
|
|
uniformAndStorageBuffer8BitAccess,
|
|
storagePushConstant8,
|
|
shaderBufferInt64Atomics,
|
|
shaderSharedInt64Atomics,
|
|
shaderFloat16,
|
|
shaderInt8,
|
|
descriptorIndexing,
|
|
shaderInputAttachmentArrayDynamicIndexing,
|
|
shaderUniformTexelBufferArrayDynamicIndexing,
|
|
shaderStorageTexelBufferArrayDynamicIndexing,
|
|
shaderUniformBufferArrayNonUniformIndexing,
|
|
shaderSampledImageArrayNonUniformIndexing,
|
|
shaderStorageBufferArrayNonUniformIndexing,
|
|
shaderStorageImageArrayNonUniformIndexing,
|
|
shaderInputAttachmentArrayNonUniformIndexing,
|
|
shaderUniformTexelBufferArrayNonUniformIndexing,
|
|
shaderStorageTexelBufferArrayNonUniformIndexing,
|
|
descriptorBindingUniformBufferUpdateAfterBind,
|
|
descriptorBindingSampledImageUpdateAfterBind,
|
|
descriptorBindingStorageImageUpdateAfterBind,
|
|
descriptorBindingStorageBufferUpdateAfterBind,
|
|
descriptorBindingUniformTexelBufferUpdateAfterBind,
|
|
descriptorBindingStorageTexelBufferUpdateAfterBind,
|
|
descriptorBindingUpdateUnusedWhilePending,
|
|
descriptorBindingPartiallyBound,
|
|
descriptorBindingVariableDescriptorCount,
|
|
runtimeDescriptorArray,
|
|
samplerFilterMinmax,
|
|
scalarBlockLayout,
|
|
imagelessFramebuffer,
|
|
uniformBufferStandardLayout,
|
|
shaderSubgroupExtendedTypes,
|
|
separateDepthStencilLayouts,
|
|
hostQueryReset,
|
|
timelineSemaphore,
|
|
bufferDeviceAddress,
|
|
bufferDeviceAddressCaptureReplay,
|
|
bufferDeviceAddressMultiDevice,
|
|
vulkanMemoryModel,
|
|
vulkanMemoryModelDeviceScope,
|
|
vulkanMemoryModelAvailabilityVisibilityChains,
|
|
shaderOutputViewportIndex,
|
|
shaderOutputLayer,
|
|
subgroupBroadcastDynamicId );
|
|
}
|
|
#endif
|
|
|
|
#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
|
|
auto operator<=>( PhysicalDeviceVulkan12Features const & ) const = default;
|
|
#else
|
|
bool operator==( PhysicalDeviceVulkan12Features const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
# if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
# else
|
|
return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( samplerMirrorClampToEdge == rhs.samplerMirrorClampToEdge ) &&
|
|
( drawIndirectCount == rhs.drawIndirectCount ) && ( storageBuffer8BitAccess == rhs.storageBuffer8BitAccess ) &&
|
|
( uniformAndStorageBuffer8BitAccess == rhs.uniformAndStorageBuffer8BitAccess ) && ( storagePushConstant8 == rhs.storagePushConstant8 ) &&
|
|
( shaderBufferInt64Atomics == rhs.shaderBufferInt64Atomics ) && ( shaderSharedInt64Atomics == rhs.shaderSharedInt64Atomics ) &&
|
|
( shaderFloat16 == rhs.shaderFloat16 ) && ( shaderInt8 == rhs.shaderInt8 ) && ( descriptorIndexing == rhs.descriptorIndexing ) &&
|
|
( shaderInputAttachmentArrayDynamicIndexing == rhs.shaderInputAttachmentArrayDynamicIndexing ) &&
|
|
( shaderUniformTexelBufferArrayDynamicIndexing == rhs.shaderUniformTexelBufferArrayDynamicIndexing ) &&
|
|
( shaderStorageTexelBufferArrayDynamicIndexing == rhs.shaderStorageTexelBufferArrayDynamicIndexing ) &&
|
|
( shaderUniformBufferArrayNonUniformIndexing == rhs.shaderUniformBufferArrayNonUniformIndexing ) &&
|
|
( shaderSampledImageArrayNonUniformIndexing == rhs.shaderSampledImageArrayNonUniformIndexing ) &&
|
|
( shaderStorageBufferArrayNonUniformIndexing == rhs.shaderStorageBufferArrayNonUniformIndexing ) &&
|
|
( shaderStorageImageArrayNonUniformIndexing == rhs.shaderStorageImageArrayNonUniformIndexing ) &&
|
|
( shaderInputAttachmentArrayNonUniformIndexing == rhs.shaderInputAttachmentArrayNonUniformIndexing ) &&
|
|
( shaderUniformTexelBufferArrayNonUniformIndexing == rhs.shaderUniformTexelBufferArrayNonUniformIndexing ) &&
|
|
( shaderStorageTexelBufferArrayNonUniformIndexing == rhs.shaderStorageTexelBufferArrayNonUniformIndexing ) &&
|
|
( descriptorBindingUniformBufferUpdateAfterBind == rhs.descriptorBindingUniformBufferUpdateAfterBind ) &&
|
|
( descriptorBindingSampledImageUpdateAfterBind == rhs.descriptorBindingSampledImageUpdateAfterBind ) &&
|
|
( descriptorBindingStorageImageUpdateAfterBind == rhs.descriptorBindingStorageImageUpdateAfterBind ) &&
|
|
( descriptorBindingStorageBufferUpdateAfterBind == rhs.descriptorBindingStorageBufferUpdateAfterBind ) &&
|
|
( descriptorBindingUniformTexelBufferUpdateAfterBind == rhs.descriptorBindingUniformTexelBufferUpdateAfterBind ) &&
|
|
( descriptorBindingStorageTexelBufferUpdateAfterBind == rhs.descriptorBindingStorageTexelBufferUpdateAfterBind ) &&
|
|
( descriptorBindingUpdateUnusedWhilePending == rhs.descriptorBindingUpdateUnusedWhilePending ) &&
|
|
( descriptorBindingPartiallyBound == rhs.descriptorBindingPartiallyBound ) &&
|
|
( descriptorBindingVariableDescriptorCount == rhs.descriptorBindingVariableDescriptorCount ) &&
|
|
( runtimeDescriptorArray == rhs.runtimeDescriptorArray ) && ( samplerFilterMinmax == rhs.samplerFilterMinmax ) &&
|
|
( scalarBlockLayout == rhs.scalarBlockLayout ) && ( imagelessFramebuffer == rhs.imagelessFramebuffer ) &&
|
|
( uniformBufferStandardLayout == rhs.uniformBufferStandardLayout ) && ( shaderSubgroupExtendedTypes == rhs.shaderSubgroupExtendedTypes ) &&
|
|
( separateDepthStencilLayouts == rhs.separateDepthStencilLayouts ) && ( hostQueryReset == rhs.hostQueryReset ) &&
|
|
( timelineSemaphore == rhs.timelineSemaphore ) && ( bufferDeviceAddress == rhs.bufferDeviceAddress ) &&
|
|
( bufferDeviceAddressCaptureReplay == rhs.bufferDeviceAddressCaptureReplay ) &&
|
|
( bufferDeviceAddressMultiDevice == rhs.bufferDeviceAddressMultiDevice ) && ( vulkanMemoryModel == rhs.vulkanMemoryModel ) &&
|
|
( vulkanMemoryModelDeviceScope == rhs.vulkanMemoryModelDeviceScope ) &&
|
|
( vulkanMemoryModelAvailabilityVisibilityChains == rhs.vulkanMemoryModelAvailabilityVisibilityChains ) &&
|
|
( shaderOutputViewportIndex == rhs.shaderOutputViewportIndex ) && ( shaderOutputLayer == rhs.shaderOutputLayer ) &&
|
|
( subgroupBroadcastDynamicId == rhs.subgroupBroadcastDynamicId );
|
|
# endif
|
|
}
|
|
|
|
bool operator!=( PhysicalDeviceVulkan12Features const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceVulkan12Features;
|
|
void * pNext = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 samplerMirrorClampToEdge = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 drawIndirectCount = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 storageBuffer8BitAccess = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 uniformAndStorageBuffer8BitAccess = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 storagePushConstant8 = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 shaderBufferInt64Atomics = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 shaderSharedInt64Atomics = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 shaderFloat16 = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 shaderInt8 = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 descriptorIndexing = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 shaderInputAttachmentArrayDynamicIndexing = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 shaderUniformTexelBufferArrayDynamicIndexing = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 shaderStorageTexelBufferArrayDynamicIndexing = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 shaderUniformBufferArrayNonUniformIndexing = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 shaderSampledImageArrayNonUniformIndexing = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 shaderStorageBufferArrayNonUniformIndexing = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 shaderStorageImageArrayNonUniformIndexing = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 shaderInputAttachmentArrayNonUniformIndexing = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 shaderUniformTexelBufferArrayNonUniformIndexing = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 shaderStorageTexelBufferArrayNonUniformIndexing = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingUniformBufferUpdateAfterBind = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingSampledImageUpdateAfterBind = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingStorageImageUpdateAfterBind = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingStorageBufferUpdateAfterBind = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingUniformTexelBufferUpdateAfterBind = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingStorageTexelBufferUpdateAfterBind = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingUpdateUnusedWhilePending = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingPartiallyBound = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingVariableDescriptorCount = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 runtimeDescriptorArray = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 samplerFilterMinmax = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 scalarBlockLayout = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 imagelessFramebuffer = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 uniformBufferStandardLayout = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 shaderSubgroupExtendedTypes = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 separateDepthStencilLayouts = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 hostQueryReset = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 timelineSemaphore = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 bufferDeviceAddress = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 bufferDeviceAddressCaptureReplay = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 bufferDeviceAddressMultiDevice = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 vulkanMemoryModel = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 vulkanMemoryModelDeviceScope = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 vulkanMemoryModelAvailabilityVisibilityChains = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 shaderOutputViewportIndex = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 shaderOutputLayer = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 subgroupBroadcastDynamicId = {};
|
|
};
|
|
|
|
template <>
|
|
struct CppType<StructureType, StructureType::ePhysicalDeviceVulkan12Features>
|
|
{
|
|
using Type = PhysicalDeviceVulkan12Features;
|
|
};
|
|
|
|
struct PhysicalDeviceVulkan12Properties
|
|
{
|
|
using NativeType = VkPhysicalDeviceVulkan12Properties;
|
|
|
|
static const bool allowDuplicate = false;
|
|
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceVulkan12Properties;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan12Properties(
|
|
VULKAN_HPP_NAMESPACE::DriverId driverID_ = VULKAN_HPP_NAMESPACE::DriverId::eAmdProprietary,
|
|
std::array<char, VK_MAX_DRIVER_NAME_SIZE> const & driverName_ = {},
|
|
std::array<char, VK_MAX_DRIVER_INFO_SIZE> const & driverInfo_ = {},
|
|
VULKAN_HPP_NAMESPACE::ConformanceVersion conformanceVersion_ = {},
|
|
VULKAN_HPP_NAMESPACE::ShaderFloatControlsIndependence denormBehaviorIndependence_ = VULKAN_HPP_NAMESPACE::ShaderFloatControlsIndependence::e32BitOnly,
|
|
VULKAN_HPP_NAMESPACE::ShaderFloatControlsIndependence roundingModeIndependence_ = VULKAN_HPP_NAMESPACE::ShaderFloatControlsIndependence::e32BitOnly,
|
|
VULKAN_HPP_NAMESPACE::Bool32 shaderSignedZeroInfNanPreserveFloat16_ = {},
|
|
VULKAN_HPP_NAMESPACE::Bool32 shaderSignedZeroInfNanPreserveFloat32_ = {},
|
|
VULKAN_HPP_NAMESPACE::Bool32 shaderSignedZeroInfNanPreserveFloat64_ = {},
|
|
VULKAN_HPP_NAMESPACE::Bool32 shaderDenormPreserveFloat16_ = {},
|
|
VULKAN_HPP_NAMESPACE::Bool32 shaderDenormPreserveFloat32_ = {},
|
|
VULKAN_HPP_NAMESPACE::Bool32 shaderDenormPreserveFloat64_ = {},
|
|
VULKAN_HPP_NAMESPACE::Bool32 shaderDenormFlushToZeroFloat16_ = {},
|
|
VULKAN_HPP_NAMESPACE::Bool32 shaderDenormFlushToZeroFloat32_ = {},
|
|
VULKAN_HPP_NAMESPACE::Bool32 shaderDenormFlushToZeroFloat64_ = {},
|
|
VULKAN_HPP_NAMESPACE::Bool32 shaderRoundingModeRTEFloat16_ = {},
|
|
VULKAN_HPP_NAMESPACE::Bool32 shaderRoundingModeRTEFloat32_ = {},
|
|
VULKAN_HPP_NAMESPACE::Bool32 shaderRoundingModeRTEFloat64_ = {},
|
|
VULKAN_HPP_NAMESPACE::Bool32 shaderRoundingModeRTZFloat16_ = {},
|
|
VULKAN_HPP_NAMESPACE::Bool32 shaderRoundingModeRTZFloat32_ = {},
|
|
VULKAN_HPP_NAMESPACE::Bool32 shaderRoundingModeRTZFloat64_ = {},
|
|
uint32_t maxUpdateAfterBindDescriptorsInAllPools_ = {},
|
|
VULKAN_HPP_NAMESPACE::Bool32 shaderUniformBufferArrayNonUniformIndexingNative_ = {},
|
|
VULKAN_HPP_NAMESPACE::Bool32 shaderSampledImageArrayNonUniformIndexingNative_ = {},
|
|
VULKAN_HPP_NAMESPACE::Bool32 shaderStorageBufferArrayNonUniformIndexingNative_ = {},
|
|
VULKAN_HPP_NAMESPACE::Bool32 shaderStorageImageArrayNonUniformIndexingNative_ = {},
|
|
VULKAN_HPP_NAMESPACE::Bool32 shaderInputAttachmentArrayNonUniformIndexingNative_ = {},
|
|
VULKAN_HPP_NAMESPACE::Bool32 robustBufferAccessUpdateAfterBind_ = {},
|
|
VULKAN_HPP_NAMESPACE::Bool32 quadDivergentImplicitLod_ = {},
|
|
uint32_t maxPerStageDescriptorUpdateAfterBindSamplers_ = {},
|
|
uint32_t maxPerStageDescriptorUpdateAfterBindUniformBuffers_ = {},
|
|
uint32_t maxPerStageDescriptorUpdateAfterBindStorageBuffers_ = {},
|
|
uint32_t maxPerStageDescriptorUpdateAfterBindSampledImages_ = {},
|
|
uint32_t maxPerStageDescriptorUpdateAfterBindStorageImages_ = {},
|
|
uint32_t maxPerStageDescriptorUpdateAfterBindInputAttachments_ = {},
|
|
uint32_t maxPerStageUpdateAfterBindResources_ = {},
|
|
uint32_t maxDescriptorSetUpdateAfterBindSamplers_ = {},
|
|
uint32_t maxDescriptorSetUpdateAfterBindUniformBuffers_ = {},
|
|
uint32_t maxDescriptorSetUpdateAfterBindUniformBuffersDynamic_ = {},
|
|
uint32_t maxDescriptorSetUpdateAfterBindStorageBuffers_ = {},
|
|
uint32_t maxDescriptorSetUpdateAfterBindStorageBuffersDynamic_ = {},
|
|
uint32_t maxDescriptorSetUpdateAfterBindSampledImages_ = {},
|
|
uint32_t maxDescriptorSetUpdateAfterBindStorageImages_ = {},
|
|
uint32_t maxDescriptorSetUpdateAfterBindInputAttachments_ = {},
|
|
VULKAN_HPP_NAMESPACE::ResolveModeFlags supportedDepthResolveModes_ = {},
|
|
VULKAN_HPP_NAMESPACE::ResolveModeFlags supportedStencilResolveModes_ = {},
|
|
VULKAN_HPP_NAMESPACE::Bool32 independentResolveNone_ = {},
|
|
VULKAN_HPP_NAMESPACE::Bool32 independentResolve_ = {},
|
|
VULKAN_HPP_NAMESPACE::Bool32 filterMinmaxSingleComponentFormats_ = {},
|
|
VULKAN_HPP_NAMESPACE::Bool32 filterMinmaxImageComponentMapping_ = {},
|
|
uint64_t maxTimelineSemaphoreValueDifference_ = {},
|
|
VULKAN_HPP_NAMESPACE::SampleCountFlags framebufferIntegerColorSampleCounts_ = {},
|
|
void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
|
|
: pNext( pNext_ )
|
|
, driverID( driverID_ )
|
|
, driverName( driverName_ )
|
|
, driverInfo( driverInfo_ )
|
|
, conformanceVersion( conformanceVersion_ )
|
|
, denormBehaviorIndependence( denormBehaviorIndependence_ )
|
|
, roundingModeIndependence( roundingModeIndependence_ )
|
|
, shaderSignedZeroInfNanPreserveFloat16( shaderSignedZeroInfNanPreserveFloat16_ )
|
|
, shaderSignedZeroInfNanPreserveFloat32( shaderSignedZeroInfNanPreserveFloat32_ )
|
|
, shaderSignedZeroInfNanPreserveFloat64( shaderSignedZeroInfNanPreserveFloat64_ )
|
|
, shaderDenormPreserveFloat16( shaderDenormPreserveFloat16_ )
|
|
, shaderDenormPreserveFloat32( shaderDenormPreserveFloat32_ )
|
|
, shaderDenormPreserveFloat64( shaderDenormPreserveFloat64_ )
|
|
, shaderDenormFlushToZeroFloat16( shaderDenormFlushToZeroFloat16_ )
|
|
, shaderDenormFlushToZeroFloat32( shaderDenormFlushToZeroFloat32_ )
|
|
, shaderDenormFlushToZeroFloat64( shaderDenormFlushToZeroFloat64_ )
|
|
, shaderRoundingModeRTEFloat16( shaderRoundingModeRTEFloat16_ )
|
|
, shaderRoundingModeRTEFloat32( shaderRoundingModeRTEFloat32_ )
|
|
, shaderRoundingModeRTEFloat64( shaderRoundingModeRTEFloat64_ )
|
|
, shaderRoundingModeRTZFloat16( shaderRoundingModeRTZFloat16_ )
|
|
, shaderRoundingModeRTZFloat32( shaderRoundingModeRTZFloat32_ )
|
|
, shaderRoundingModeRTZFloat64( shaderRoundingModeRTZFloat64_ )
|
|
, maxUpdateAfterBindDescriptorsInAllPools( maxUpdateAfterBindDescriptorsInAllPools_ )
|
|
, shaderUniformBufferArrayNonUniformIndexingNative( shaderUniformBufferArrayNonUniformIndexingNative_ )
|
|
, shaderSampledImageArrayNonUniformIndexingNative( shaderSampledImageArrayNonUniformIndexingNative_ )
|
|
, shaderStorageBufferArrayNonUniformIndexingNative( shaderStorageBufferArrayNonUniformIndexingNative_ )
|
|
, shaderStorageImageArrayNonUniformIndexingNative( shaderStorageImageArrayNonUniformIndexingNative_ )
|
|
, shaderInputAttachmentArrayNonUniformIndexingNative( shaderInputAttachmentArrayNonUniformIndexingNative_ )
|
|
, robustBufferAccessUpdateAfterBind( robustBufferAccessUpdateAfterBind_ )
|
|
, quadDivergentImplicitLod( quadDivergentImplicitLod_ )
|
|
, maxPerStageDescriptorUpdateAfterBindSamplers( maxPerStageDescriptorUpdateAfterBindSamplers_ )
|
|
, maxPerStageDescriptorUpdateAfterBindUniformBuffers( maxPerStageDescriptorUpdateAfterBindUniformBuffers_ )
|
|
, maxPerStageDescriptorUpdateAfterBindStorageBuffers( maxPerStageDescriptorUpdateAfterBindStorageBuffers_ )
|
|
, maxPerStageDescriptorUpdateAfterBindSampledImages( maxPerStageDescriptorUpdateAfterBindSampledImages_ )
|
|
, maxPerStageDescriptorUpdateAfterBindStorageImages( maxPerStageDescriptorUpdateAfterBindStorageImages_ )
|
|
, maxPerStageDescriptorUpdateAfterBindInputAttachments( maxPerStageDescriptorUpdateAfterBindInputAttachments_ )
|
|
, maxPerStageUpdateAfterBindResources( maxPerStageUpdateAfterBindResources_ )
|
|
, maxDescriptorSetUpdateAfterBindSamplers( maxDescriptorSetUpdateAfterBindSamplers_ )
|
|
, maxDescriptorSetUpdateAfterBindUniformBuffers( maxDescriptorSetUpdateAfterBindUniformBuffers_ )
|
|
, maxDescriptorSetUpdateAfterBindUniformBuffersDynamic( maxDescriptorSetUpdateAfterBindUniformBuffersDynamic_ )
|
|
, maxDescriptorSetUpdateAfterBindStorageBuffers( maxDescriptorSetUpdateAfterBindStorageBuffers_ )
|
|
, maxDescriptorSetUpdateAfterBindStorageBuffersDynamic( maxDescriptorSetUpdateAfterBindStorageBuffersDynamic_ )
|
|
, maxDescriptorSetUpdateAfterBindSampledImages( maxDescriptorSetUpdateAfterBindSampledImages_ )
|
|
, maxDescriptorSetUpdateAfterBindStorageImages( maxDescriptorSetUpdateAfterBindStorageImages_ )
|
|
, maxDescriptorSetUpdateAfterBindInputAttachments( maxDescriptorSetUpdateAfterBindInputAttachments_ )
|
|
, supportedDepthResolveModes( supportedDepthResolveModes_ )
|
|
, supportedStencilResolveModes( supportedStencilResolveModes_ )
|
|
, independentResolveNone( independentResolveNone_ )
|
|
, independentResolve( independentResolve_ )
|
|
, filterMinmaxSingleComponentFormats( filterMinmaxSingleComponentFormats_ )
|
|
, filterMinmaxImageComponentMapping( filterMinmaxImageComponentMapping_ )
|
|
, maxTimelineSemaphoreValueDifference( maxTimelineSemaphoreValueDifference_ )
|
|
, framebufferIntegerColorSampleCounts( framebufferIntegerColorSampleCounts_ )
|
|
{
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan12Properties( PhysicalDeviceVulkan12Properties const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
PhysicalDeviceVulkan12Properties( VkPhysicalDeviceVulkan12Properties const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: PhysicalDeviceVulkan12Properties( *reinterpret_cast<PhysicalDeviceVulkan12Properties const *>( &rhs ) )
|
|
{
|
|
}
|
|
|
|
# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
|
|
PhysicalDeviceVulkan12Properties(
|
|
VULKAN_HPP_NAMESPACE::DriverId driverID_,
|
|
std::string const & driverName_,
|
|
std::string const & driverInfo_ = {},
|
|
VULKAN_HPP_NAMESPACE::ConformanceVersion conformanceVersion_ = {},
|
|
VULKAN_HPP_NAMESPACE::ShaderFloatControlsIndependence denormBehaviorIndependence_ = VULKAN_HPP_NAMESPACE::ShaderFloatControlsIndependence::e32BitOnly,
|
|
VULKAN_HPP_NAMESPACE::ShaderFloatControlsIndependence roundingModeIndependence_ = VULKAN_HPP_NAMESPACE::ShaderFloatControlsIndependence::e32BitOnly,
|
|
VULKAN_HPP_NAMESPACE::Bool32 shaderSignedZeroInfNanPreserveFloat16_ = {},
|
|
VULKAN_HPP_NAMESPACE::Bool32 shaderSignedZeroInfNanPreserveFloat32_ = {},
|
|
VULKAN_HPP_NAMESPACE::Bool32 shaderSignedZeroInfNanPreserveFloat64_ = {},
|
|
VULKAN_HPP_NAMESPACE::Bool32 shaderDenormPreserveFloat16_ = {},
|
|
VULKAN_HPP_NAMESPACE::Bool32 shaderDenormPreserveFloat32_ = {},
|
|
VULKAN_HPP_NAMESPACE::Bool32 shaderDenormPreserveFloat64_ = {},
|
|
VULKAN_HPP_NAMESPACE::Bool32 shaderDenormFlushToZeroFloat16_ = {},
|
|
VULKAN_HPP_NAMESPACE::Bool32 shaderDenormFlushToZeroFloat32_ = {},
|
|
VULKAN_HPP_NAMESPACE::Bool32 shaderDenormFlushToZeroFloat64_ = {},
|
|
VULKAN_HPP_NAMESPACE::Bool32 shaderRoundingModeRTEFloat16_ = {},
|
|
VULKAN_HPP_NAMESPACE::Bool32 shaderRoundingModeRTEFloat32_ = {},
|
|
VULKAN_HPP_NAMESPACE::Bool32 shaderRoundingModeRTEFloat64_ = {},
|
|
VULKAN_HPP_NAMESPACE::Bool32 shaderRoundingModeRTZFloat16_ = {},
|
|
VULKAN_HPP_NAMESPACE::Bool32 shaderRoundingModeRTZFloat32_ = {},
|
|
VULKAN_HPP_NAMESPACE::Bool32 shaderRoundingModeRTZFloat64_ = {},
|
|
uint32_t maxUpdateAfterBindDescriptorsInAllPools_ = {},
|
|
VULKAN_HPP_NAMESPACE::Bool32 shaderUniformBufferArrayNonUniformIndexingNative_ = {},
|
|
VULKAN_HPP_NAMESPACE::Bool32 shaderSampledImageArrayNonUniformIndexingNative_ = {},
|
|
VULKAN_HPP_NAMESPACE::Bool32 shaderStorageBufferArrayNonUniformIndexingNative_ = {},
|
|
VULKAN_HPP_NAMESPACE::Bool32 shaderStorageImageArrayNonUniformIndexingNative_ = {},
|
|
VULKAN_HPP_NAMESPACE::Bool32 shaderInputAttachmentArrayNonUniformIndexingNative_ = {},
|
|
VULKAN_HPP_NAMESPACE::Bool32 robustBufferAccessUpdateAfterBind_ = {},
|
|
VULKAN_HPP_NAMESPACE::Bool32 quadDivergentImplicitLod_ = {},
|
|
uint32_t maxPerStageDescriptorUpdateAfterBindSamplers_ = {},
|
|
uint32_t maxPerStageDescriptorUpdateAfterBindUniformBuffers_ = {},
|
|
uint32_t maxPerStageDescriptorUpdateAfterBindStorageBuffers_ = {},
|
|
uint32_t maxPerStageDescriptorUpdateAfterBindSampledImages_ = {},
|
|
uint32_t maxPerStageDescriptorUpdateAfterBindStorageImages_ = {},
|
|
uint32_t maxPerStageDescriptorUpdateAfterBindInputAttachments_ = {},
|
|
uint32_t maxPerStageUpdateAfterBindResources_ = {},
|
|
uint32_t maxDescriptorSetUpdateAfterBindSamplers_ = {},
|
|
uint32_t maxDescriptorSetUpdateAfterBindUniformBuffers_ = {},
|
|
uint32_t maxDescriptorSetUpdateAfterBindUniformBuffersDynamic_ = {},
|
|
uint32_t maxDescriptorSetUpdateAfterBindStorageBuffers_ = {},
|
|
uint32_t maxDescriptorSetUpdateAfterBindStorageBuffersDynamic_ = {},
|
|
uint32_t maxDescriptorSetUpdateAfterBindSampledImages_ = {},
|
|
uint32_t maxDescriptorSetUpdateAfterBindStorageImages_ = {},
|
|
uint32_t maxDescriptorSetUpdateAfterBindInputAttachments_ = {},
|
|
VULKAN_HPP_NAMESPACE::ResolveModeFlags supportedDepthResolveModes_ = {},
|
|
VULKAN_HPP_NAMESPACE::ResolveModeFlags supportedStencilResolveModes_ = {},
|
|
VULKAN_HPP_NAMESPACE::Bool32 independentResolveNone_ = {},
|
|
VULKAN_HPP_NAMESPACE::Bool32 independentResolve_ = {},
|
|
VULKAN_HPP_NAMESPACE::Bool32 filterMinmaxSingleComponentFormats_ = {},
|
|
VULKAN_HPP_NAMESPACE::Bool32 filterMinmaxImageComponentMapping_ = {},
|
|
uint64_t maxTimelineSemaphoreValueDifference_ = {},
|
|
VULKAN_HPP_NAMESPACE::SampleCountFlags framebufferIntegerColorSampleCounts_ = {},
|
|
void * pNext_ = nullptr )
|
|
: pNext( pNext_ )
|
|
, driverID( driverID_ )
|
|
, conformanceVersion( conformanceVersion_ )
|
|
, denormBehaviorIndependence( denormBehaviorIndependence_ )
|
|
, roundingModeIndependence( roundingModeIndependence_ )
|
|
, shaderSignedZeroInfNanPreserveFloat16( shaderSignedZeroInfNanPreserveFloat16_ )
|
|
, shaderSignedZeroInfNanPreserveFloat32( shaderSignedZeroInfNanPreserveFloat32_ )
|
|
, shaderSignedZeroInfNanPreserveFloat64( shaderSignedZeroInfNanPreserveFloat64_ )
|
|
, shaderDenormPreserveFloat16( shaderDenormPreserveFloat16_ )
|
|
, shaderDenormPreserveFloat32( shaderDenormPreserveFloat32_ )
|
|
, shaderDenormPreserveFloat64( shaderDenormPreserveFloat64_ )
|
|
, shaderDenormFlushToZeroFloat16( shaderDenormFlushToZeroFloat16_ )
|
|
, shaderDenormFlushToZeroFloat32( shaderDenormFlushToZeroFloat32_ )
|
|
, shaderDenormFlushToZeroFloat64( shaderDenormFlushToZeroFloat64_ )
|
|
, shaderRoundingModeRTEFloat16( shaderRoundingModeRTEFloat16_ )
|
|
, shaderRoundingModeRTEFloat32( shaderRoundingModeRTEFloat32_ )
|
|
, shaderRoundingModeRTEFloat64( shaderRoundingModeRTEFloat64_ )
|
|
, shaderRoundingModeRTZFloat16( shaderRoundingModeRTZFloat16_ )
|
|
, shaderRoundingModeRTZFloat32( shaderRoundingModeRTZFloat32_ )
|
|
, shaderRoundingModeRTZFloat64( shaderRoundingModeRTZFloat64_ )
|
|
, maxUpdateAfterBindDescriptorsInAllPools( maxUpdateAfterBindDescriptorsInAllPools_ )
|
|
, shaderUniformBufferArrayNonUniformIndexingNative( shaderUniformBufferArrayNonUniformIndexingNative_ )
|
|
, shaderSampledImageArrayNonUniformIndexingNative( shaderSampledImageArrayNonUniformIndexingNative_ )
|
|
, shaderStorageBufferArrayNonUniformIndexingNative( shaderStorageBufferArrayNonUniformIndexingNative_ )
|
|
, shaderStorageImageArrayNonUniformIndexingNative( shaderStorageImageArrayNonUniformIndexingNative_ )
|
|
, shaderInputAttachmentArrayNonUniformIndexingNative( shaderInputAttachmentArrayNonUniformIndexingNative_ )
|
|
, robustBufferAccessUpdateAfterBind( robustBufferAccessUpdateAfterBind_ )
|
|
, quadDivergentImplicitLod( quadDivergentImplicitLod_ )
|
|
, maxPerStageDescriptorUpdateAfterBindSamplers( maxPerStageDescriptorUpdateAfterBindSamplers_ )
|
|
, maxPerStageDescriptorUpdateAfterBindUniformBuffers( maxPerStageDescriptorUpdateAfterBindUniformBuffers_ )
|
|
, maxPerStageDescriptorUpdateAfterBindStorageBuffers( maxPerStageDescriptorUpdateAfterBindStorageBuffers_ )
|
|
, maxPerStageDescriptorUpdateAfterBindSampledImages( maxPerStageDescriptorUpdateAfterBindSampledImages_ )
|
|
, maxPerStageDescriptorUpdateAfterBindStorageImages( maxPerStageDescriptorUpdateAfterBindStorageImages_ )
|
|
, maxPerStageDescriptorUpdateAfterBindInputAttachments( maxPerStageDescriptorUpdateAfterBindInputAttachments_ )
|
|
, maxPerStageUpdateAfterBindResources( maxPerStageUpdateAfterBindResources_ )
|
|
, maxDescriptorSetUpdateAfterBindSamplers( maxDescriptorSetUpdateAfterBindSamplers_ )
|
|
, maxDescriptorSetUpdateAfterBindUniformBuffers( maxDescriptorSetUpdateAfterBindUniformBuffers_ )
|
|
, maxDescriptorSetUpdateAfterBindUniformBuffersDynamic( maxDescriptorSetUpdateAfterBindUniformBuffersDynamic_ )
|
|
, maxDescriptorSetUpdateAfterBindStorageBuffers( maxDescriptorSetUpdateAfterBindStorageBuffers_ )
|
|
, maxDescriptorSetUpdateAfterBindStorageBuffersDynamic( maxDescriptorSetUpdateAfterBindStorageBuffersDynamic_ )
|
|
, maxDescriptorSetUpdateAfterBindSampledImages( maxDescriptorSetUpdateAfterBindSampledImages_ )
|
|
, maxDescriptorSetUpdateAfterBindStorageImages( maxDescriptorSetUpdateAfterBindStorageImages_ )
|
|
, maxDescriptorSetUpdateAfterBindInputAttachments( maxDescriptorSetUpdateAfterBindInputAttachments_ )
|
|
, supportedDepthResolveModes( supportedDepthResolveModes_ )
|
|
, supportedStencilResolveModes( supportedStencilResolveModes_ )
|
|
, independentResolveNone( independentResolveNone_ )
|
|
, independentResolve( independentResolve_ )
|
|
, filterMinmaxSingleComponentFormats( filterMinmaxSingleComponentFormats_ )
|
|
, filterMinmaxImageComponentMapping( filterMinmaxImageComponentMapping_ )
|
|
, maxTimelineSemaphoreValueDifference( maxTimelineSemaphoreValueDifference_ )
|
|
, framebufferIntegerColorSampleCounts( framebufferIntegerColorSampleCounts_ )
|
|
{
|
|
VULKAN_HPP_ASSERT( driverName_.size() < VK_MAX_DRIVER_NAME_SIZE );
|
|
# if defined( WIN32 )
|
|
strncpy_s( driverName, VK_MAX_DRIVER_NAME_SIZE, driverName_.data(), driverName_.size() );
|
|
# else
|
|
strncpy( driverName, driverName_.data(), std::min<size_t>( VK_MAX_DRIVER_NAME_SIZE, driverName_.size() ) );
|
|
# endif
|
|
|
|
VULKAN_HPP_ASSERT( driverInfo_.size() < VK_MAX_DRIVER_INFO_SIZE );
|
|
# if defined( WIN32 )
|
|
strncpy_s( driverInfo, VK_MAX_DRIVER_INFO_SIZE, driverInfo_.data(), driverInfo_.size() );
|
|
# else
|
|
strncpy( driverInfo, driverInfo_.data(), std::min<size_t>( VK_MAX_DRIVER_INFO_SIZE, driverInfo_.size() ) );
|
|
# endif
|
|
}
|
|
# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
|
|
|
|
PhysicalDeviceVulkan12Properties & operator=( PhysicalDeviceVulkan12Properties const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
PhysicalDeviceVulkan12Properties & operator=( VkPhysicalDeviceVulkan12Properties const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceVulkan12Properties const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
operator VkPhysicalDeviceVulkan12Properties const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkPhysicalDeviceVulkan12Properties *>( this );
|
|
}
|
|
|
|
operator VkPhysicalDeviceVulkan12Properties &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkPhysicalDeviceVulkan12Properties *>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
# if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
# else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &,
|
|
void * const &,
|
|
VULKAN_HPP_NAMESPACE::DriverId const &,
|
|
VULKAN_HPP_NAMESPACE::ArrayWrapper1D<char, VK_MAX_DRIVER_NAME_SIZE> const &,
|
|
VULKAN_HPP_NAMESPACE::ArrayWrapper1D<char, VK_MAX_DRIVER_INFO_SIZE> const &,
|
|
VULKAN_HPP_NAMESPACE::ConformanceVersion const &,
|
|
VULKAN_HPP_NAMESPACE::ShaderFloatControlsIndependence const &,
|
|
VULKAN_HPP_NAMESPACE::ShaderFloatControlsIndependence const &,
|
|
VULKAN_HPP_NAMESPACE::Bool32 const &,
|
|
VULKAN_HPP_NAMESPACE::Bool32 const &,
|
|
VULKAN_HPP_NAMESPACE::Bool32 const &,
|
|
VULKAN_HPP_NAMESPACE::Bool32 const &,
|
|
VULKAN_HPP_NAMESPACE::Bool32 const &,
|
|
VULKAN_HPP_NAMESPACE::Bool32 const &,
|
|
VULKAN_HPP_NAMESPACE::Bool32 const &,
|
|
VULKAN_HPP_NAMESPACE::Bool32 const &,
|
|
VULKAN_HPP_NAMESPACE::Bool32 const &,
|
|
VULKAN_HPP_NAMESPACE::Bool32 const &,
|
|
VULKAN_HPP_NAMESPACE::Bool32 const &,
|
|
VULKAN_HPP_NAMESPACE::Bool32 const &,
|
|
VULKAN_HPP_NAMESPACE::Bool32 const &,
|
|
VULKAN_HPP_NAMESPACE::Bool32 const &,
|
|
VULKAN_HPP_NAMESPACE::Bool32 const &,
|
|
uint32_t const &,
|
|
VULKAN_HPP_NAMESPACE::Bool32 const &,
|
|
VULKAN_HPP_NAMESPACE::Bool32 const &,
|
|
VULKAN_HPP_NAMESPACE::Bool32 const &,
|
|
VULKAN_HPP_NAMESPACE::Bool32 const &,
|
|
VULKAN_HPP_NAMESPACE::Bool32 const &,
|
|
VULKAN_HPP_NAMESPACE::Bool32 const &,
|
|
VULKAN_HPP_NAMESPACE::Bool32 const &,
|
|
uint32_t const &,
|
|
uint32_t const &,
|
|
uint32_t const &,
|
|
uint32_t const &,
|
|
uint32_t const &,
|
|
uint32_t const &,
|
|
uint32_t const &,
|
|
uint32_t const &,
|
|
uint32_t const &,
|
|
uint32_t const &,
|
|
uint32_t const &,
|
|
uint32_t const &,
|
|
uint32_t const &,
|
|
uint32_t const &,
|
|
uint32_t const &,
|
|
VULKAN_HPP_NAMESPACE::ResolveModeFlags const &,
|
|
VULKAN_HPP_NAMESPACE::ResolveModeFlags const &,
|
|
VULKAN_HPP_NAMESPACE::Bool32 const &,
|
|
VULKAN_HPP_NAMESPACE::Bool32 const &,
|
|
VULKAN_HPP_NAMESPACE::Bool32 const &,
|
|
VULKAN_HPP_NAMESPACE::Bool32 const &,
|
|
uint64_t const &,
|
|
VULKAN_HPP_NAMESPACE::SampleCountFlags const &>
|
|
# endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( sType,
|
|
pNext,
|
|
driverID,
|
|
driverName,
|
|
driverInfo,
|
|
conformanceVersion,
|
|
denormBehaviorIndependence,
|
|
roundingModeIndependence,
|
|
shaderSignedZeroInfNanPreserveFloat16,
|
|
shaderSignedZeroInfNanPreserveFloat32,
|
|
shaderSignedZeroInfNanPreserveFloat64,
|
|
shaderDenormPreserveFloat16,
|
|
shaderDenormPreserveFloat32,
|
|
shaderDenormPreserveFloat64,
|
|
shaderDenormFlushToZeroFloat16,
|
|
shaderDenormFlushToZeroFloat32,
|
|
shaderDenormFlushToZeroFloat64,
|
|
shaderRoundingModeRTEFloat16,
|
|
shaderRoundingModeRTEFloat32,
|
|
shaderRoundingModeRTEFloat64,
|
|
shaderRoundingModeRTZFloat16,
|
|
shaderRoundingModeRTZFloat32,
|
|
shaderRoundingModeRTZFloat64,
|
|
maxUpdateAfterBindDescriptorsInAllPools,
|
|
shaderUniformBufferArrayNonUniformIndexingNative,
|
|
shaderSampledImageArrayNonUniformIndexingNative,
|
|
shaderStorageBufferArrayNonUniformIndexingNative,
|
|
shaderStorageImageArrayNonUniformIndexingNative,
|
|
shaderInputAttachmentArrayNonUniformIndexingNative,
|
|
robustBufferAccessUpdateAfterBind,
|
|
quadDivergentImplicitLod,
|
|
maxPerStageDescriptorUpdateAfterBindSamplers,
|
|
maxPerStageDescriptorUpdateAfterBindUniformBuffers,
|
|
maxPerStageDescriptorUpdateAfterBindStorageBuffers,
|
|
maxPerStageDescriptorUpdateAfterBindSampledImages,
|
|
maxPerStageDescriptorUpdateAfterBindStorageImages,
|
|
maxPerStageDescriptorUpdateAfterBindInputAttachments,
|
|
maxPerStageUpdateAfterBindResources,
|
|
maxDescriptorSetUpdateAfterBindSamplers,
|
|
maxDescriptorSetUpdateAfterBindUniformBuffers,
|
|
maxDescriptorSetUpdateAfterBindUniformBuffersDynamic,
|
|
maxDescriptorSetUpdateAfterBindStorageBuffers,
|
|
maxDescriptorSetUpdateAfterBindStorageBuffersDynamic,
|
|
maxDescriptorSetUpdateAfterBindSampledImages,
|
|
maxDescriptorSetUpdateAfterBindStorageImages,
|
|
maxDescriptorSetUpdateAfterBindInputAttachments,
|
|
supportedDepthResolveModes,
|
|
supportedStencilResolveModes,
|
|
independentResolveNone,
|
|
independentResolve,
|
|
filterMinmaxSingleComponentFormats,
|
|
filterMinmaxImageComponentMapping,
|
|
maxTimelineSemaphoreValueDifference,
|
|
framebufferIntegerColorSampleCounts );
|
|
}
|
|
#endif
|
|
|
|
#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
|
|
std::strong_ordering operator<=>( PhysicalDeviceVulkan12Properties const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
if ( auto cmp = sType <=> rhs.sType; cmp != 0 )
|
|
return cmp;
|
|
if ( auto cmp = pNext <=> rhs.pNext; cmp != 0 )
|
|
return cmp;
|
|
if ( auto cmp = driverID <=> rhs.driverID; cmp != 0 )
|
|
return cmp;
|
|
if ( auto cmp = strcmp( driverName, rhs.driverName ); cmp != 0 )
|
|
return ( cmp < 0 ) ? std::strong_ordering::less : std::strong_ordering::greater;
|
|
if ( auto cmp = strcmp( driverInfo, rhs.driverInfo ); cmp != 0 )
|
|
return ( cmp < 0 ) ? std::strong_ordering::less : std::strong_ordering::greater;
|
|
if ( auto cmp = conformanceVersion <=> rhs.conformanceVersion; cmp != 0 )
|
|
return cmp;
|
|
if ( auto cmp = denormBehaviorIndependence <=> rhs.denormBehaviorIndependence; cmp != 0 )
|
|
return cmp;
|
|
if ( auto cmp = roundingModeIndependence <=> rhs.roundingModeIndependence; cmp != 0 )
|
|
return cmp;
|
|
if ( auto cmp = shaderSignedZeroInfNanPreserveFloat16 <=> rhs.shaderSignedZeroInfNanPreserveFloat16; cmp != 0 )
|
|
return cmp;
|
|
if ( auto cmp = shaderSignedZeroInfNanPreserveFloat32 <=> rhs.shaderSignedZeroInfNanPreserveFloat32; cmp != 0 )
|
|
return cmp;
|
|
if ( auto cmp = shaderSignedZeroInfNanPreserveFloat64 <=> rhs.shaderSignedZeroInfNanPreserveFloat64; cmp != 0 )
|
|
return cmp;
|
|
if ( auto cmp = shaderDenormPreserveFloat16 <=> rhs.shaderDenormPreserveFloat16; cmp != 0 )
|
|
return cmp;
|
|
if ( auto cmp = shaderDenormPreserveFloat32 <=> rhs.shaderDenormPreserveFloat32; cmp != 0 )
|
|
return cmp;
|
|
if ( auto cmp = shaderDenormPreserveFloat64 <=> rhs.shaderDenormPreserveFloat64; cmp != 0 )
|
|
return cmp;
|
|
if ( auto cmp = shaderDenormFlushToZeroFloat16 <=> rhs.shaderDenormFlushToZeroFloat16; cmp != 0 )
|
|
return cmp;
|
|
if ( auto cmp = shaderDenormFlushToZeroFloat32 <=> rhs.shaderDenormFlushToZeroFloat32; cmp != 0 )
|
|
return cmp;
|
|
if ( auto cmp = shaderDenormFlushToZeroFloat64 <=> rhs.shaderDenormFlushToZeroFloat64; cmp != 0 )
|
|
return cmp;
|
|
if ( auto cmp = shaderRoundingModeRTEFloat16 <=> rhs.shaderRoundingModeRTEFloat16; cmp != 0 )
|
|
return cmp;
|
|
if ( auto cmp = shaderRoundingModeRTEFloat32 <=> rhs.shaderRoundingModeRTEFloat32; cmp != 0 )
|
|
return cmp;
|
|
if ( auto cmp = shaderRoundingModeRTEFloat64 <=> rhs.shaderRoundingModeRTEFloat64; cmp != 0 )
|
|
return cmp;
|
|
if ( auto cmp = shaderRoundingModeRTZFloat16 <=> rhs.shaderRoundingModeRTZFloat16; cmp != 0 )
|
|
return cmp;
|
|
if ( auto cmp = shaderRoundingModeRTZFloat32 <=> rhs.shaderRoundingModeRTZFloat32; cmp != 0 )
|
|
return cmp;
|
|
if ( auto cmp = shaderRoundingModeRTZFloat64 <=> rhs.shaderRoundingModeRTZFloat64; cmp != 0 )
|
|
return cmp;
|
|
if ( auto cmp = maxUpdateAfterBindDescriptorsInAllPools <=> rhs.maxUpdateAfterBindDescriptorsInAllPools; cmp != 0 )
|
|
return cmp;
|
|
if ( auto cmp = shaderUniformBufferArrayNonUniformIndexingNative <=> rhs.shaderUniformBufferArrayNonUniformIndexingNative; cmp != 0 )
|
|
return cmp;
|
|
if ( auto cmp = shaderSampledImageArrayNonUniformIndexingNative <=> rhs.shaderSampledImageArrayNonUniformIndexingNative; cmp != 0 )
|
|
return cmp;
|
|
if ( auto cmp = shaderStorageBufferArrayNonUniformIndexingNative <=> rhs.shaderStorageBufferArrayNonUniformIndexingNative; cmp != 0 )
|
|
return cmp;
|
|
if ( auto cmp = shaderStorageImageArrayNonUniformIndexingNative <=> rhs.shaderStorageImageArrayNonUniformIndexingNative; cmp != 0 )
|
|
return cmp;
|
|
if ( auto cmp = shaderInputAttachmentArrayNonUniformIndexingNative <=> rhs.shaderInputAttachmentArrayNonUniformIndexingNative; cmp != 0 )
|
|
return cmp;
|
|
if ( auto cmp = robustBufferAccessUpdateAfterBind <=> rhs.robustBufferAccessUpdateAfterBind; cmp != 0 )
|
|
return cmp;
|
|
if ( auto cmp = quadDivergentImplicitLod <=> rhs.quadDivergentImplicitLod; cmp != 0 )
|
|
return cmp;
|
|
if ( auto cmp = maxPerStageDescriptorUpdateAfterBindSamplers <=> rhs.maxPerStageDescriptorUpdateAfterBindSamplers; cmp != 0 )
|
|
return cmp;
|
|
if ( auto cmp = maxPerStageDescriptorUpdateAfterBindUniformBuffers <=> rhs.maxPerStageDescriptorUpdateAfterBindUniformBuffers; cmp != 0 )
|
|
return cmp;
|
|
if ( auto cmp = maxPerStageDescriptorUpdateAfterBindStorageBuffers <=> rhs.maxPerStageDescriptorUpdateAfterBindStorageBuffers; cmp != 0 )
|
|
return cmp;
|
|
if ( auto cmp = maxPerStageDescriptorUpdateAfterBindSampledImages <=> rhs.maxPerStageDescriptorUpdateAfterBindSampledImages; cmp != 0 )
|
|
return cmp;
|
|
if ( auto cmp = maxPerStageDescriptorUpdateAfterBindStorageImages <=> rhs.maxPerStageDescriptorUpdateAfterBindStorageImages; cmp != 0 )
|
|
return cmp;
|
|
if ( auto cmp = maxPerStageDescriptorUpdateAfterBindInputAttachments <=> rhs.maxPerStageDescriptorUpdateAfterBindInputAttachments; cmp != 0 )
|
|
return cmp;
|
|
if ( auto cmp = maxPerStageUpdateAfterBindResources <=> rhs.maxPerStageUpdateAfterBindResources; cmp != 0 )
|
|
return cmp;
|
|
if ( auto cmp = maxDescriptorSetUpdateAfterBindSamplers <=> rhs.maxDescriptorSetUpdateAfterBindSamplers; cmp != 0 )
|
|
return cmp;
|
|
if ( auto cmp = maxDescriptorSetUpdateAfterBindUniformBuffers <=> rhs.maxDescriptorSetUpdateAfterBindUniformBuffers; cmp != 0 )
|
|
return cmp;
|
|
if ( auto cmp = maxDescriptorSetUpdateAfterBindUniformBuffersDynamic <=> rhs.maxDescriptorSetUpdateAfterBindUniformBuffersDynamic; cmp != 0 )
|
|
return cmp;
|
|
if ( auto cmp = maxDescriptorSetUpdateAfterBindStorageBuffers <=> rhs.maxDescriptorSetUpdateAfterBindStorageBuffers; cmp != 0 )
|
|
return cmp;
|
|
if ( auto cmp = maxDescriptorSetUpdateAfterBindStorageBuffersDynamic <=> rhs.maxDescriptorSetUpdateAfterBindStorageBuffersDynamic; cmp != 0 )
|
|
return cmp;
|
|
if ( auto cmp = maxDescriptorSetUpdateAfterBindSampledImages <=> rhs.maxDescriptorSetUpdateAfterBindSampledImages; cmp != 0 )
|
|
return cmp;
|
|
if ( auto cmp = maxDescriptorSetUpdateAfterBindStorageImages <=> rhs.maxDescriptorSetUpdateAfterBindStorageImages; cmp != 0 )
|
|
return cmp;
|
|
if ( auto cmp = maxDescriptorSetUpdateAfterBindInputAttachments <=> rhs.maxDescriptorSetUpdateAfterBindInputAttachments; cmp != 0 )
|
|
return cmp;
|
|
if ( auto cmp = supportedDepthResolveModes <=> rhs.supportedDepthResolveModes; cmp != 0 )
|
|
return cmp;
|
|
if ( auto cmp = supportedStencilResolveModes <=> rhs.supportedStencilResolveModes; cmp != 0 )
|
|
return cmp;
|
|
if ( auto cmp = independentResolveNone <=> rhs.independentResolveNone; cmp != 0 )
|
|
return cmp;
|
|
if ( auto cmp = independentResolve <=> rhs.independentResolve; cmp != 0 )
|
|
return cmp;
|
|
if ( auto cmp = filterMinmaxSingleComponentFormats <=> rhs.filterMinmaxSingleComponentFormats; cmp != 0 )
|
|
return cmp;
|
|
if ( auto cmp = filterMinmaxImageComponentMapping <=> rhs.filterMinmaxImageComponentMapping; cmp != 0 )
|
|
return cmp;
|
|
if ( auto cmp = maxTimelineSemaphoreValueDifference <=> rhs.maxTimelineSemaphoreValueDifference; cmp != 0 )
|
|
return cmp;
|
|
if ( auto cmp = framebufferIntegerColorSampleCounts <=> rhs.framebufferIntegerColorSampleCounts; cmp != 0 )
|
|
return cmp;
|
|
|
|
return std::strong_ordering::equivalent;
|
|
}
|
|
#endif
|
|
|
|
bool operator==( PhysicalDeviceVulkan12Properties const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( driverID == rhs.driverID ) && ( strcmp( driverName, rhs.driverName ) == 0 ) &&
|
|
( strcmp( driverInfo, rhs.driverInfo ) == 0 ) && ( conformanceVersion == rhs.conformanceVersion ) &&
|
|
( denormBehaviorIndependence == rhs.denormBehaviorIndependence ) && ( roundingModeIndependence == rhs.roundingModeIndependence ) &&
|
|
( shaderSignedZeroInfNanPreserveFloat16 == rhs.shaderSignedZeroInfNanPreserveFloat16 ) &&
|
|
( shaderSignedZeroInfNanPreserveFloat32 == rhs.shaderSignedZeroInfNanPreserveFloat32 ) &&
|
|
( shaderSignedZeroInfNanPreserveFloat64 == rhs.shaderSignedZeroInfNanPreserveFloat64 ) &&
|
|
( shaderDenormPreserveFloat16 == rhs.shaderDenormPreserveFloat16 ) && ( shaderDenormPreserveFloat32 == rhs.shaderDenormPreserveFloat32 ) &&
|
|
( shaderDenormPreserveFloat64 == rhs.shaderDenormPreserveFloat64 ) && ( shaderDenormFlushToZeroFloat16 == rhs.shaderDenormFlushToZeroFloat16 ) &&
|
|
( shaderDenormFlushToZeroFloat32 == rhs.shaderDenormFlushToZeroFloat32 ) &&
|
|
( shaderDenormFlushToZeroFloat64 == rhs.shaderDenormFlushToZeroFloat64 ) && ( shaderRoundingModeRTEFloat16 == rhs.shaderRoundingModeRTEFloat16 ) &&
|
|
( shaderRoundingModeRTEFloat32 == rhs.shaderRoundingModeRTEFloat32 ) && ( shaderRoundingModeRTEFloat64 == rhs.shaderRoundingModeRTEFloat64 ) &&
|
|
( shaderRoundingModeRTZFloat16 == rhs.shaderRoundingModeRTZFloat16 ) && ( shaderRoundingModeRTZFloat32 == rhs.shaderRoundingModeRTZFloat32 ) &&
|
|
( shaderRoundingModeRTZFloat64 == rhs.shaderRoundingModeRTZFloat64 ) &&
|
|
( maxUpdateAfterBindDescriptorsInAllPools == rhs.maxUpdateAfterBindDescriptorsInAllPools ) &&
|
|
( shaderUniformBufferArrayNonUniformIndexingNative == rhs.shaderUniformBufferArrayNonUniformIndexingNative ) &&
|
|
( shaderSampledImageArrayNonUniformIndexingNative == rhs.shaderSampledImageArrayNonUniformIndexingNative ) &&
|
|
( shaderStorageBufferArrayNonUniformIndexingNative == rhs.shaderStorageBufferArrayNonUniformIndexingNative ) &&
|
|
( shaderStorageImageArrayNonUniformIndexingNative == rhs.shaderStorageImageArrayNonUniformIndexingNative ) &&
|
|
( shaderInputAttachmentArrayNonUniformIndexingNative == rhs.shaderInputAttachmentArrayNonUniformIndexingNative ) &&
|
|
( robustBufferAccessUpdateAfterBind == rhs.robustBufferAccessUpdateAfterBind ) && ( quadDivergentImplicitLod == rhs.quadDivergentImplicitLod ) &&
|
|
( maxPerStageDescriptorUpdateAfterBindSamplers == rhs.maxPerStageDescriptorUpdateAfterBindSamplers ) &&
|
|
( maxPerStageDescriptorUpdateAfterBindUniformBuffers == rhs.maxPerStageDescriptorUpdateAfterBindUniformBuffers ) &&
|
|
( maxPerStageDescriptorUpdateAfterBindStorageBuffers == rhs.maxPerStageDescriptorUpdateAfterBindStorageBuffers ) &&
|
|
( maxPerStageDescriptorUpdateAfterBindSampledImages == rhs.maxPerStageDescriptorUpdateAfterBindSampledImages ) &&
|
|
( maxPerStageDescriptorUpdateAfterBindStorageImages == rhs.maxPerStageDescriptorUpdateAfterBindStorageImages ) &&
|
|
( maxPerStageDescriptorUpdateAfterBindInputAttachments == rhs.maxPerStageDescriptorUpdateAfterBindInputAttachments ) &&
|
|
( maxPerStageUpdateAfterBindResources == rhs.maxPerStageUpdateAfterBindResources ) &&
|
|
( maxDescriptorSetUpdateAfterBindSamplers == rhs.maxDescriptorSetUpdateAfterBindSamplers ) &&
|
|
( maxDescriptorSetUpdateAfterBindUniformBuffers == rhs.maxDescriptorSetUpdateAfterBindUniformBuffers ) &&
|
|
( maxDescriptorSetUpdateAfterBindUniformBuffersDynamic == rhs.maxDescriptorSetUpdateAfterBindUniformBuffersDynamic ) &&
|
|
( maxDescriptorSetUpdateAfterBindStorageBuffers == rhs.maxDescriptorSetUpdateAfterBindStorageBuffers ) &&
|
|
( maxDescriptorSetUpdateAfterBindStorageBuffersDynamic == rhs.maxDescriptorSetUpdateAfterBindStorageBuffersDynamic ) &&
|
|
( maxDescriptorSetUpdateAfterBindSampledImages == rhs.maxDescriptorSetUpdateAfterBindSampledImages ) &&
|
|
( maxDescriptorSetUpdateAfterBindStorageImages == rhs.maxDescriptorSetUpdateAfterBindStorageImages ) &&
|
|
( maxDescriptorSetUpdateAfterBindInputAttachments == rhs.maxDescriptorSetUpdateAfterBindInputAttachments ) &&
|
|
( supportedDepthResolveModes == rhs.supportedDepthResolveModes ) && ( supportedStencilResolveModes == rhs.supportedStencilResolveModes ) &&
|
|
( independentResolveNone == rhs.independentResolveNone ) && ( independentResolve == rhs.independentResolve ) &&
|
|
( filterMinmaxSingleComponentFormats == rhs.filterMinmaxSingleComponentFormats ) &&
|
|
( filterMinmaxImageComponentMapping == rhs.filterMinmaxImageComponentMapping ) &&
|
|
( maxTimelineSemaphoreValueDifference == rhs.maxTimelineSemaphoreValueDifference ) &&
|
|
( framebufferIntegerColorSampleCounts == rhs.framebufferIntegerColorSampleCounts );
|
|
}
|
|
|
|
bool operator!=( PhysicalDeviceVulkan12Properties const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceVulkan12Properties;
|
|
void * pNext = {};
|
|
VULKAN_HPP_NAMESPACE::DriverId driverID = VULKAN_HPP_NAMESPACE::DriverId::eAmdProprietary;
|
|
VULKAN_HPP_NAMESPACE::ArrayWrapper1D<char, VK_MAX_DRIVER_NAME_SIZE> driverName = {};
|
|
VULKAN_HPP_NAMESPACE::ArrayWrapper1D<char, VK_MAX_DRIVER_INFO_SIZE> driverInfo = {};
|
|
VULKAN_HPP_NAMESPACE::ConformanceVersion conformanceVersion = {};
|
|
VULKAN_HPP_NAMESPACE::ShaderFloatControlsIndependence denormBehaviorIndependence = VULKAN_HPP_NAMESPACE::ShaderFloatControlsIndependence::e32BitOnly;
|
|
VULKAN_HPP_NAMESPACE::ShaderFloatControlsIndependence roundingModeIndependence = VULKAN_HPP_NAMESPACE::ShaderFloatControlsIndependence::e32BitOnly;
|
|
VULKAN_HPP_NAMESPACE::Bool32 shaderSignedZeroInfNanPreserveFloat16 = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 shaderSignedZeroInfNanPreserveFloat32 = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 shaderSignedZeroInfNanPreserveFloat64 = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 shaderDenormPreserveFloat16 = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 shaderDenormPreserveFloat32 = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 shaderDenormPreserveFloat64 = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 shaderDenormFlushToZeroFloat16 = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 shaderDenormFlushToZeroFloat32 = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 shaderDenormFlushToZeroFloat64 = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 shaderRoundingModeRTEFloat16 = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 shaderRoundingModeRTEFloat32 = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 shaderRoundingModeRTEFloat64 = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 shaderRoundingModeRTZFloat16 = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 shaderRoundingModeRTZFloat32 = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 shaderRoundingModeRTZFloat64 = {};
|
|
uint32_t maxUpdateAfterBindDescriptorsInAllPools = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 shaderUniformBufferArrayNonUniformIndexingNative = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 shaderSampledImageArrayNonUniformIndexingNative = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 shaderStorageBufferArrayNonUniformIndexingNative = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 shaderStorageImageArrayNonUniformIndexingNative = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 shaderInputAttachmentArrayNonUniformIndexingNative = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 robustBufferAccessUpdateAfterBind = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 quadDivergentImplicitLod = {};
|
|
uint32_t maxPerStageDescriptorUpdateAfterBindSamplers = {};
|
|
uint32_t maxPerStageDescriptorUpdateAfterBindUniformBuffers = {};
|
|
uint32_t maxPerStageDescriptorUpdateAfterBindStorageBuffers = {};
|
|
uint32_t maxPerStageDescriptorUpdateAfterBindSampledImages = {};
|
|
uint32_t maxPerStageDescriptorUpdateAfterBindStorageImages = {};
|
|
uint32_t maxPerStageDescriptorUpdateAfterBindInputAttachments = {};
|
|
uint32_t maxPerStageUpdateAfterBindResources = {};
|
|
uint32_t maxDescriptorSetUpdateAfterBindSamplers = {};
|
|
uint32_t maxDescriptorSetUpdateAfterBindUniformBuffers = {};
|
|
uint32_t maxDescriptorSetUpdateAfterBindUniformBuffersDynamic = {};
|
|
uint32_t maxDescriptorSetUpdateAfterBindStorageBuffers = {};
|
|
uint32_t maxDescriptorSetUpdateAfterBindStorageBuffersDynamic = {};
|
|
uint32_t maxDescriptorSetUpdateAfterBindSampledImages = {};
|
|
uint32_t maxDescriptorSetUpdateAfterBindStorageImages = {};
|
|
uint32_t maxDescriptorSetUpdateAfterBindInputAttachments = {};
|
|
VULKAN_HPP_NAMESPACE::ResolveModeFlags supportedDepthResolveModes = {};
|
|
VULKAN_HPP_NAMESPACE::ResolveModeFlags supportedStencilResolveModes = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 independentResolveNone = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 independentResolve = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 filterMinmaxSingleComponentFormats = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 filterMinmaxImageComponentMapping = {};
|
|
uint64_t maxTimelineSemaphoreValueDifference = {};
|
|
VULKAN_HPP_NAMESPACE::SampleCountFlags framebufferIntegerColorSampleCounts = {};
|
|
};
|
|
|
|
template <>
|
|
struct CppType<StructureType, StructureType::ePhysicalDeviceVulkan12Properties>
|
|
{
|
|
using Type = PhysicalDeviceVulkan12Properties;
|
|
};
|
|
|
|
struct PhysicalDeviceVulkan13Features
|
|
{
|
|
using NativeType = VkPhysicalDeviceVulkan13Features;
|
|
|
|
static const bool allowDuplicate = false;
|
|
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceVulkan13Features;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR PhysicalDeviceVulkan13Features( VULKAN_HPP_NAMESPACE::Bool32 robustImageAccess_ = {},
|
|
VULKAN_HPP_NAMESPACE::Bool32 inlineUniformBlock_ = {},
|
|
VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingInlineUniformBlockUpdateAfterBind_ = {},
|
|
VULKAN_HPP_NAMESPACE::Bool32 pipelineCreationCacheControl_ = {},
|
|
VULKAN_HPP_NAMESPACE::Bool32 privateData_ = {},
|
|
VULKAN_HPP_NAMESPACE::Bool32 shaderDemoteToHelperInvocation_ = {},
|
|
VULKAN_HPP_NAMESPACE::Bool32 shaderTerminateInvocation_ = {},
|
|
VULKAN_HPP_NAMESPACE::Bool32 subgroupSizeControl_ = {},
|
|
VULKAN_HPP_NAMESPACE::Bool32 computeFullSubgroups_ = {},
|
|
VULKAN_HPP_NAMESPACE::Bool32 synchronization2_ = {},
|
|
VULKAN_HPP_NAMESPACE::Bool32 textureCompressionASTC_HDR_ = {},
|
|
VULKAN_HPP_NAMESPACE::Bool32 shaderZeroInitializeWorkgroupMemory_ = {},
|
|
VULKAN_HPP_NAMESPACE::Bool32 dynamicRendering_ = {},
|
|
VULKAN_HPP_NAMESPACE::Bool32 shaderIntegerDotProduct_ = {},
|
|
VULKAN_HPP_NAMESPACE::Bool32 maintenance4_ = {},
|
|
void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
|
|
: pNext( pNext_ )
|
|
, robustImageAccess( robustImageAccess_ )
|
|
, inlineUniformBlock( inlineUniformBlock_ )
|
|
, descriptorBindingInlineUniformBlockUpdateAfterBind( descriptorBindingInlineUniformBlockUpdateAfterBind_ )
|
|
, pipelineCreationCacheControl( pipelineCreationCacheControl_ )
|
|
, privateData( privateData_ )
|
|
, shaderDemoteToHelperInvocation( shaderDemoteToHelperInvocation_ )
|
|
, shaderTerminateInvocation( shaderTerminateInvocation_ )
|
|
, subgroupSizeControl( subgroupSizeControl_ )
|
|
, computeFullSubgroups( computeFullSubgroups_ )
|
|
, synchronization2( synchronization2_ )
|
|
, textureCompressionASTC_HDR( textureCompressionASTC_HDR_ )
|
|
, shaderZeroInitializeWorkgroupMemory( shaderZeroInitializeWorkgroupMemory_ )
|
|
, dynamicRendering( dynamicRendering_ )
|
|
, shaderIntegerDotProduct( shaderIntegerDotProduct_ )
|
|
, maintenance4( maintenance4_ )
|
|
{
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR PhysicalDeviceVulkan13Features( PhysicalDeviceVulkan13Features const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
PhysicalDeviceVulkan13Features( VkPhysicalDeviceVulkan13Features const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: PhysicalDeviceVulkan13Features( *reinterpret_cast<PhysicalDeviceVulkan13Features const *>( &rhs ) )
|
|
{
|
|
}
|
|
|
|
PhysicalDeviceVulkan13Features & operator=( PhysicalDeviceVulkan13Features const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
PhysicalDeviceVulkan13Features & operator=( VkPhysicalDeviceVulkan13Features const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceVulkan13Features const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
|
|
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan13Features & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pNext = pNext_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan13Features & setRobustImageAccess( VULKAN_HPP_NAMESPACE::Bool32 robustImageAccess_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
robustImageAccess = robustImageAccess_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan13Features & setInlineUniformBlock( VULKAN_HPP_NAMESPACE::Bool32 inlineUniformBlock_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
inlineUniformBlock = inlineUniformBlock_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan13Features & setDescriptorBindingInlineUniformBlockUpdateAfterBind(
|
|
VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingInlineUniformBlockUpdateAfterBind_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
descriptorBindingInlineUniformBlockUpdateAfterBind = descriptorBindingInlineUniformBlockUpdateAfterBind_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan13Features &
|
|
setPipelineCreationCacheControl( VULKAN_HPP_NAMESPACE::Bool32 pipelineCreationCacheControl_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pipelineCreationCacheControl = pipelineCreationCacheControl_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan13Features & setPrivateData( VULKAN_HPP_NAMESPACE::Bool32 privateData_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
privateData = privateData_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan13Features &
|
|
setShaderDemoteToHelperInvocation( VULKAN_HPP_NAMESPACE::Bool32 shaderDemoteToHelperInvocation_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
shaderDemoteToHelperInvocation = shaderDemoteToHelperInvocation_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan13Features &
|
|
setShaderTerminateInvocation( VULKAN_HPP_NAMESPACE::Bool32 shaderTerminateInvocation_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
shaderTerminateInvocation = shaderTerminateInvocation_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan13Features & setSubgroupSizeControl( VULKAN_HPP_NAMESPACE::Bool32 subgroupSizeControl_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
subgroupSizeControl = subgroupSizeControl_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan13Features & setComputeFullSubgroups( VULKAN_HPP_NAMESPACE::Bool32 computeFullSubgroups_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
computeFullSubgroups = computeFullSubgroups_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan13Features & setSynchronization2( VULKAN_HPP_NAMESPACE::Bool32 synchronization2_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
synchronization2 = synchronization2_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan13Features &
|
|
setTextureCompressionASTC_HDR( VULKAN_HPP_NAMESPACE::Bool32 textureCompressionASTC_HDR_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
textureCompressionASTC_HDR = textureCompressionASTC_HDR_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan13Features &
|
|
setShaderZeroInitializeWorkgroupMemory( VULKAN_HPP_NAMESPACE::Bool32 shaderZeroInitializeWorkgroupMemory_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
shaderZeroInitializeWorkgroupMemory = shaderZeroInitializeWorkgroupMemory_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan13Features & setDynamicRendering( VULKAN_HPP_NAMESPACE::Bool32 dynamicRendering_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
dynamicRendering = dynamicRendering_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan13Features &
|
|
setShaderIntegerDotProduct( VULKAN_HPP_NAMESPACE::Bool32 shaderIntegerDotProduct_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
shaderIntegerDotProduct = shaderIntegerDotProduct_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan13Features & setMaintenance4( VULKAN_HPP_NAMESPACE::Bool32 maintenance4_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
maintenance4 = maintenance4_;
|
|
return *this;
|
|
}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
|
|
|
|
operator VkPhysicalDeviceVulkan13Features const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkPhysicalDeviceVulkan13Features *>( this );
|
|
}
|
|
|
|
operator VkPhysicalDeviceVulkan13Features &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkPhysicalDeviceVulkan13Features *>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
# if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
# else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &,
|
|
void * const &,
|
|
VULKAN_HPP_NAMESPACE::Bool32 const &,
|
|
VULKAN_HPP_NAMESPACE::Bool32 const &,
|
|
VULKAN_HPP_NAMESPACE::Bool32 const &,
|
|
VULKAN_HPP_NAMESPACE::Bool32 const &,
|
|
VULKAN_HPP_NAMESPACE::Bool32 const &,
|
|
VULKAN_HPP_NAMESPACE::Bool32 const &,
|
|
VULKAN_HPP_NAMESPACE::Bool32 const &,
|
|
VULKAN_HPP_NAMESPACE::Bool32 const &,
|
|
VULKAN_HPP_NAMESPACE::Bool32 const &,
|
|
VULKAN_HPP_NAMESPACE::Bool32 const &,
|
|
VULKAN_HPP_NAMESPACE::Bool32 const &,
|
|
VULKAN_HPP_NAMESPACE::Bool32 const &,
|
|
VULKAN_HPP_NAMESPACE::Bool32 const &,
|
|
VULKAN_HPP_NAMESPACE::Bool32 const &,
|
|
VULKAN_HPP_NAMESPACE::Bool32 const &>
|
|
# endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( sType,
|
|
pNext,
|
|
robustImageAccess,
|
|
inlineUniformBlock,
|
|
descriptorBindingInlineUniformBlockUpdateAfterBind,
|
|
pipelineCreationCacheControl,
|
|
privateData,
|
|
shaderDemoteToHelperInvocation,
|
|
shaderTerminateInvocation,
|
|
subgroupSizeControl,
|
|
computeFullSubgroups,
|
|
synchronization2,
|
|
textureCompressionASTC_HDR,
|
|
shaderZeroInitializeWorkgroupMemory,
|
|
dynamicRendering,
|
|
shaderIntegerDotProduct,
|
|
maintenance4 );
|
|
}
|
|
#endif
|
|
|
|
#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
|
|
auto operator<=>( PhysicalDeviceVulkan13Features const & ) const = default;
|
|
#else
|
|
bool operator==( PhysicalDeviceVulkan13Features const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
# if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
# else
|
|
return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( robustImageAccess == rhs.robustImageAccess ) &&
|
|
( inlineUniformBlock == rhs.inlineUniformBlock ) &&
|
|
( descriptorBindingInlineUniformBlockUpdateAfterBind == rhs.descriptorBindingInlineUniformBlockUpdateAfterBind ) &&
|
|
( pipelineCreationCacheControl == rhs.pipelineCreationCacheControl ) && ( privateData == rhs.privateData ) &&
|
|
( shaderDemoteToHelperInvocation == rhs.shaderDemoteToHelperInvocation ) && ( shaderTerminateInvocation == rhs.shaderTerminateInvocation ) &&
|
|
( subgroupSizeControl == rhs.subgroupSizeControl ) && ( computeFullSubgroups == rhs.computeFullSubgroups ) &&
|
|
( synchronization2 == rhs.synchronization2 ) && ( textureCompressionASTC_HDR == rhs.textureCompressionASTC_HDR ) &&
|
|
( shaderZeroInitializeWorkgroupMemory == rhs.shaderZeroInitializeWorkgroupMemory ) && ( dynamicRendering == rhs.dynamicRendering ) &&
|
|
( shaderIntegerDotProduct == rhs.shaderIntegerDotProduct ) && ( maintenance4 == rhs.maintenance4 );
|
|
# endif
|
|
}
|
|
|
|
bool operator!=( PhysicalDeviceVulkan13Features const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceVulkan13Features;
|
|
void * pNext = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 robustImageAccess = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 inlineUniformBlock = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingInlineUniformBlockUpdateAfterBind = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 pipelineCreationCacheControl = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 privateData = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 shaderDemoteToHelperInvocation = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 shaderTerminateInvocation = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 subgroupSizeControl = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 computeFullSubgroups = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 synchronization2 = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 textureCompressionASTC_HDR = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 shaderZeroInitializeWorkgroupMemory = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 dynamicRendering = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 shaderIntegerDotProduct = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 maintenance4 = {};
|
|
};
|
|
|
|
template <>
|
|
struct CppType<StructureType, StructureType::ePhysicalDeviceVulkan13Features>
|
|
{
|
|
using Type = PhysicalDeviceVulkan13Features;
|
|
};
|
|
|
|
struct PhysicalDeviceVulkan13Properties
|
|
{
|
|
using NativeType = VkPhysicalDeviceVulkan13Properties;
|
|
|
|
static const bool allowDuplicate = false;
|
|
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceVulkan13Properties;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR
|
|
PhysicalDeviceVulkan13Properties( uint32_t minSubgroupSize_ = {},
|
|
uint32_t maxSubgroupSize_ = {},
|
|
uint32_t maxComputeWorkgroupSubgroups_ = {},
|
|
VULKAN_HPP_NAMESPACE::ShaderStageFlags requiredSubgroupSizeStages_ = {},
|
|
uint32_t maxInlineUniformBlockSize_ = {},
|
|
uint32_t maxPerStageDescriptorInlineUniformBlocks_ = {},
|
|
uint32_t maxPerStageDescriptorUpdateAfterBindInlineUniformBlocks_ = {},
|
|
uint32_t maxDescriptorSetInlineUniformBlocks_ = {},
|
|
uint32_t maxDescriptorSetUpdateAfterBindInlineUniformBlocks_ = {},
|
|
uint32_t maxInlineUniformTotalSize_ = {},
|
|
VULKAN_HPP_NAMESPACE::Bool32 integerDotProduct8BitUnsignedAccelerated_ = {},
|
|
VULKAN_HPP_NAMESPACE::Bool32 integerDotProduct8BitSignedAccelerated_ = {},
|
|
VULKAN_HPP_NAMESPACE::Bool32 integerDotProduct8BitMixedSignednessAccelerated_ = {},
|
|
VULKAN_HPP_NAMESPACE::Bool32 integerDotProduct4x8BitPackedUnsignedAccelerated_ = {},
|
|
VULKAN_HPP_NAMESPACE::Bool32 integerDotProduct4x8BitPackedSignedAccelerated_ = {},
|
|
VULKAN_HPP_NAMESPACE::Bool32 integerDotProduct4x8BitPackedMixedSignednessAccelerated_ = {},
|
|
VULKAN_HPP_NAMESPACE::Bool32 integerDotProduct16BitUnsignedAccelerated_ = {},
|
|
VULKAN_HPP_NAMESPACE::Bool32 integerDotProduct16BitSignedAccelerated_ = {},
|
|
VULKAN_HPP_NAMESPACE::Bool32 integerDotProduct16BitMixedSignednessAccelerated_ = {},
|
|
VULKAN_HPP_NAMESPACE::Bool32 integerDotProduct32BitUnsignedAccelerated_ = {},
|
|
VULKAN_HPP_NAMESPACE::Bool32 integerDotProduct32BitSignedAccelerated_ = {},
|
|
VULKAN_HPP_NAMESPACE::Bool32 integerDotProduct32BitMixedSignednessAccelerated_ = {},
|
|
VULKAN_HPP_NAMESPACE::Bool32 integerDotProduct64BitUnsignedAccelerated_ = {},
|
|
VULKAN_HPP_NAMESPACE::Bool32 integerDotProduct64BitSignedAccelerated_ = {},
|
|
VULKAN_HPP_NAMESPACE::Bool32 integerDotProduct64BitMixedSignednessAccelerated_ = {},
|
|
VULKAN_HPP_NAMESPACE::Bool32 integerDotProductAccumulatingSaturating8BitUnsignedAccelerated_ = {},
|
|
VULKAN_HPP_NAMESPACE::Bool32 integerDotProductAccumulatingSaturating8BitSignedAccelerated_ = {},
|
|
VULKAN_HPP_NAMESPACE::Bool32 integerDotProductAccumulatingSaturating8BitMixedSignednessAccelerated_ = {},
|
|
VULKAN_HPP_NAMESPACE::Bool32 integerDotProductAccumulatingSaturating4x8BitPackedUnsignedAccelerated_ = {},
|
|
VULKAN_HPP_NAMESPACE::Bool32 integerDotProductAccumulatingSaturating4x8BitPackedSignedAccelerated_ = {},
|
|
VULKAN_HPP_NAMESPACE::Bool32 integerDotProductAccumulatingSaturating4x8BitPackedMixedSignednessAccelerated_ = {},
|
|
VULKAN_HPP_NAMESPACE::Bool32 integerDotProductAccumulatingSaturating16BitUnsignedAccelerated_ = {},
|
|
VULKAN_HPP_NAMESPACE::Bool32 integerDotProductAccumulatingSaturating16BitSignedAccelerated_ = {},
|
|
VULKAN_HPP_NAMESPACE::Bool32 integerDotProductAccumulatingSaturating16BitMixedSignednessAccelerated_ = {},
|
|
VULKAN_HPP_NAMESPACE::Bool32 integerDotProductAccumulatingSaturating32BitUnsignedAccelerated_ = {},
|
|
VULKAN_HPP_NAMESPACE::Bool32 integerDotProductAccumulatingSaturating32BitSignedAccelerated_ = {},
|
|
VULKAN_HPP_NAMESPACE::Bool32 integerDotProductAccumulatingSaturating32BitMixedSignednessAccelerated_ = {},
|
|
VULKAN_HPP_NAMESPACE::Bool32 integerDotProductAccumulatingSaturating64BitUnsignedAccelerated_ = {},
|
|
VULKAN_HPP_NAMESPACE::Bool32 integerDotProductAccumulatingSaturating64BitSignedAccelerated_ = {},
|
|
VULKAN_HPP_NAMESPACE::Bool32 integerDotProductAccumulatingSaturating64BitMixedSignednessAccelerated_ = {},
|
|
VULKAN_HPP_NAMESPACE::DeviceSize storageTexelBufferOffsetAlignmentBytes_ = {},
|
|
VULKAN_HPP_NAMESPACE::Bool32 storageTexelBufferOffsetSingleTexelAlignment_ = {},
|
|
VULKAN_HPP_NAMESPACE::DeviceSize uniformTexelBufferOffsetAlignmentBytes_ = {},
|
|
VULKAN_HPP_NAMESPACE::Bool32 uniformTexelBufferOffsetSingleTexelAlignment_ = {},
|
|
VULKAN_HPP_NAMESPACE::DeviceSize maxBufferSize_ = {},
|
|
void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
|
|
: pNext( pNext_ )
|
|
, minSubgroupSize( minSubgroupSize_ )
|
|
, maxSubgroupSize( maxSubgroupSize_ )
|
|
, maxComputeWorkgroupSubgroups( maxComputeWorkgroupSubgroups_ )
|
|
, requiredSubgroupSizeStages( requiredSubgroupSizeStages_ )
|
|
, maxInlineUniformBlockSize( maxInlineUniformBlockSize_ )
|
|
, maxPerStageDescriptorInlineUniformBlocks( maxPerStageDescriptorInlineUniformBlocks_ )
|
|
, maxPerStageDescriptorUpdateAfterBindInlineUniformBlocks( maxPerStageDescriptorUpdateAfterBindInlineUniformBlocks_ )
|
|
, maxDescriptorSetInlineUniformBlocks( maxDescriptorSetInlineUniformBlocks_ )
|
|
, maxDescriptorSetUpdateAfterBindInlineUniformBlocks( maxDescriptorSetUpdateAfterBindInlineUniformBlocks_ )
|
|
, maxInlineUniformTotalSize( maxInlineUniformTotalSize_ )
|
|
, integerDotProduct8BitUnsignedAccelerated( integerDotProduct8BitUnsignedAccelerated_ )
|
|
, integerDotProduct8BitSignedAccelerated( integerDotProduct8BitSignedAccelerated_ )
|
|
, integerDotProduct8BitMixedSignednessAccelerated( integerDotProduct8BitMixedSignednessAccelerated_ )
|
|
, integerDotProduct4x8BitPackedUnsignedAccelerated( integerDotProduct4x8BitPackedUnsignedAccelerated_ )
|
|
, integerDotProduct4x8BitPackedSignedAccelerated( integerDotProduct4x8BitPackedSignedAccelerated_ )
|
|
, integerDotProduct4x8BitPackedMixedSignednessAccelerated( integerDotProduct4x8BitPackedMixedSignednessAccelerated_ )
|
|
, integerDotProduct16BitUnsignedAccelerated( integerDotProduct16BitUnsignedAccelerated_ )
|
|
, integerDotProduct16BitSignedAccelerated( integerDotProduct16BitSignedAccelerated_ )
|
|
, integerDotProduct16BitMixedSignednessAccelerated( integerDotProduct16BitMixedSignednessAccelerated_ )
|
|
, integerDotProduct32BitUnsignedAccelerated( integerDotProduct32BitUnsignedAccelerated_ )
|
|
, integerDotProduct32BitSignedAccelerated( integerDotProduct32BitSignedAccelerated_ )
|
|
, integerDotProduct32BitMixedSignednessAccelerated( integerDotProduct32BitMixedSignednessAccelerated_ )
|
|
, integerDotProduct64BitUnsignedAccelerated( integerDotProduct64BitUnsignedAccelerated_ )
|
|
, integerDotProduct64BitSignedAccelerated( integerDotProduct64BitSignedAccelerated_ )
|
|
, integerDotProduct64BitMixedSignednessAccelerated( integerDotProduct64BitMixedSignednessAccelerated_ )
|
|
, integerDotProductAccumulatingSaturating8BitUnsignedAccelerated( integerDotProductAccumulatingSaturating8BitUnsignedAccelerated_ )
|
|
, integerDotProductAccumulatingSaturating8BitSignedAccelerated( integerDotProductAccumulatingSaturating8BitSignedAccelerated_ )
|
|
, integerDotProductAccumulatingSaturating8BitMixedSignednessAccelerated( integerDotProductAccumulatingSaturating8BitMixedSignednessAccelerated_ )
|
|
, integerDotProductAccumulatingSaturating4x8BitPackedUnsignedAccelerated( integerDotProductAccumulatingSaturating4x8BitPackedUnsignedAccelerated_ )
|
|
, integerDotProductAccumulatingSaturating4x8BitPackedSignedAccelerated( integerDotProductAccumulatingSaturating4x8BitPackedSignedAccelerated_ )
|
|
, integerDotProductAccumulatingSaturating4x8BitPackedMixedSignednessAccelerated(
|
|
integerDotProductAccumulatingSaturating4x8BitPackedMixedSignednessAccelerated_ )
|
|
, integerDotProductAccumulatingSaturating16BitUnsignedAccelerated( integerDotProductAccumulatingSaturating16BitUnsignedAccelerated_ )
|
|
, integerDotProductAccumulatingSaturating16BitSignedAccelerated( integerDotProductAccumulatingSaturating16BitSignedAccelerated_ )
|
|
, integerDotProductAccumulatingSaturating16BitMixedSignednessAccelerated( integerDotProductAccumulatingSaturating16BitMixedSignednessAccelerated_ )
|
|
, integerDotProductAccumulatingSaturating32BitUnsignedAccelerated( integerDotProductAccumulatingSaturating32BitUnsignedAccelerated_ )
|
|
, integerDotProductAccumulatingSaturating32BitSignedAccelerated( integerDotProductAccumulatingSaturating32BitSignedAccelerated_ )
|
|
, integerDotProductAccumulatingSaturating32BitMixedSignednessAccelerated( integerDotProductAccumulatingSaturating32BitMixedSignednessAccelerated_ )
|
|
, integerDotProductAccumulatingSaturating64BitUnsignedAccelerated( integerDotProductAccumulatingSaturating64BitUnsignedAccelerated_ )
|
|
, integerDotProductAccumulatingSaturating64BitSignedAccelerated( integerDotProductAccumulatingSaturating64BitSignedAccelerated_ )
|
|
, integerDotProductAccumulatingSaturating64BitMixedSignednessAccelerated( integerDotProductAccumulatingSaturating64BitMixedSignednessAccelerated_ )
|
|
, storageTexelBufferOffsetAlignmentBytes( storageTexelBufferOffsetAlignmentBytes_ )
|
|
, storageTexelBufferOffsetSingleTexelAlignment( storageTexelBufferOffsetSingleTexelAlignment_ )
|
|
, uniformTexelBufferOffsetAlignmentBytes( uniformTexelBufferOffsetAlignmentBytes_ )
|
|
, uniformTexelBufferOffsetSingleTexelAlignment( uniformTexelBufferOffsetSingleTexelAlignment_ )
|
|
, maxBufferSize( maxBufferSize_ )
|
|
{
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR PhysicalDeviceVulkan13Properties( PhysicalDeviceVulkan13Properties const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
PhysicalDeviceVulkan13Properties( VkPhysicalDeviceVulkan13Properties const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: PhysicalDeviceVulkan13Properties( *reinterpret_cast<PhysicalDeviceVulkan13Properties const *>( &rhs ) )
|
|
{
|
|
}
|
|
|
|
PhysicalDeviceVulkan13Properties & operator=( PhysicalDeviceVulkan13Properties const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
PhysicalDeviceVulkan13Properties & operator=( VkPhysicalDeviceVulkan13Properties const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceVulkan13Properties const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
operator VkPhysicalDeviceVulkan13Properties const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkPhysicalDeviceVulkan13Properties *>( this );
|
|
}
|
|
|
|
operator VkPhysicalDeviceVulkan13Properties &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkPhysicalDeviceVulkan13Properties *>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
# if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
# else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &,
|
|
void * const &,
|
|
uint32_t const &,
|
|
uint32_t const &,
|
|
uint32_t const &,
|
|
VULKAN_HPP_NAMESPACE::ShaderStageFlags const &,
|
|
uint32_t const &,
|
|
uint32_t const &,
|
|
uint32_t const &,
|
|
uint32_t const &,
|
|
uint32_t const &,
|
|
uint32_t const &,
|
|
VULKAN_HPP_NAMESPACE::Bool32 const &,
|
|
VULKAN_HPP_NAMESPACE::Bool32 const &,
|
|
VULKAN_HPP_NAMESPACE::Bool32 const &,
|
|
VULKAN_HPP_NAMESPACE::Bool32 const &,
|
|
VULKAN_HPP_NAMESPACE::Bool32 const &,
|
|
VULKAN_HPP_NAMESPACE::Bool32 const &,
|
|
VULKAN_HPP_NAMESPACE::Bool32 const &,
|
|
VULKAN_HPP_NAMESPACE::Bool32 const &,
|
|
VULKAN_HPP_NAMESPACE::Bool32 const &,
|
|
VULKAN_HPP_NAMESPACE::Bool32 const &,
|
|
VULKAN_HPP_NAMESPACE::Bool32 const &,
|
|
VULKAN_HPP_NAMESPACE::Bool32 const &,
|
|
VULKAN_HPP_NAMESPACE::Bool32 const &,
|
|
VULKAN_HPP_NAMESPACE::Bool32 const &,
|
|
VULKAN_HPP_NAMESPACE::Bool32 const &,
|
|
VULKAN_HPP_NAMESPACE::Bool32 const &,
|
|
VULKAN_HPP_NAMESPACE::Bool32 const &,
|
|
VULKAN_HPP_NAMESPACE::Bool32 const &,
|
|
VULKAN_HPP_NAMESPACE::Bool32 const &,
|
|
VULKAN_HPP_NAMESPACE::Bool32 const &,
|
|
VULKAN_HPP_NAMESPACE::Bool32 const &,
|
|
VULKAN_HPP_NAMESPACE::Bool32 const &,
|
|
VULKAN_HPP_NAMESPACE::Bool32 const &,
|
|
VULKAN_HPP_NAMESPACE::Bool32 const &,
|
|
VULKAN_HPP_NAMESPACE::Bool32 const &,
|
|
VULKAN_HPP_NAMESPACE::Bool32 const &,
|
|
VULKAN_HPP_NAMESPACE::Bool32 const &,
|
|
VULKAN_HPP_NAMESPACE::Bool32 const &,
|
|
VULKAN_HPP_NAMESPACE::Bool32 const &,
|
|
VULKAN_HPP_NAMESPACE::Bool32 const &,
|
|
VULKAN_HPP_NAMESPACE::DeviceSize const &,
|
|
VULKAN_HPP_NAMESPACE::Bool32 const &,
|
|
VULKAN_HPP_NAMESPACE::DeviceSize const &,
|
|
VULKAN_HPP_NAMESPACE::Bool32 const &,
|
|
VULKAN_HPP_NAMESPACE::DeviceSize const &>
|
|
# endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( sType,
|
|
pNext,
|
|
minSubgroupSize,
|
|
maxSubgroupSize,
|
|
maxComputeWorkgroupSubgroups,
|
|
requiredSubgroupSizeStages,
|
|
maxInlineUniformBlockSize,
|
|
maxPerStageDescriptorInlineUniformBlocks,
|
|
maxPerStageDescriptorUpdateAfterBindInlineUniformBlocks,
|
|
maxDescriptorSetInlineUniformBlocks,
|
|
maxDescriptorSetUpdateAfterBindInlineUniformBlocks,
|
|
maxInlineUniformTotalSize,
|
|
integerDotProduct8BitUnsignedAccelerated,
|
|
integerDotProduct8BitSignedAccelerated,
|
|
integerDotProduct8BitMixedSignednessAccelerated,
|
|
integerDotProduct4x8BitPackedUnsignedAccelerated,
|
|
integerDotProduct4x8BitPackedSignedAccelerated,
|
|
integerDotProduct4x8BitPackedMixedSignednessAccelerated,
|
|
integerDotProduct16BitUnsignedAccelerated,
|
|
integerDotProduct16BitSignedAccelerated,
|
|
integerDotProduct16BitMixedSignednessAccelerated,
|
|
integerDotProduct32BitUnsignedAccelerated,
|
|
integerDotProduct32BitSignedAccelerated,
|
|
integerDotProduct32BitMixedSignednessAccelerated,
|
|
integerDotProduct64BitUnsignedAccelerated,
|
|
integerDotProduct64BitSignedAccelerated,
|
|
integerDotProduct64BitMixedSignednessAccelerated,
|
|
integerDotProductAccumulatingSaturating8BitUnsignedAccelerated,
|
|
integerDotProductAccumulatingSaturating8BitSignedAccelerated,
|
|
integerDotProductAccumulatingSaturating8BitMixedSignednessAccelerated,
|
|
integerDotProductAccumulatingSaturating4x8BitPackedUnsignedAccelerated,
|
|
integerDotProductAccumulatingSaturating4x8BitPackedSignedAccelerated,
|
|
integerDotProductAccumulatingSaturating4x8BitPackedMixedSignednessAccelerated,
|
|
integerDotProductAccumulatingSaturating16BitUnsignedAccelerated,
|
|
integerDotProductAccumulatingSaturating16BitSignedAccelerated,
|
|
integerDotProductAccumulatingSaturating16BitMixedSignednessAccelerated,
|
|
integerDotProductAccumulatingSaturating32BitUnsignedAccelerated,
|
|
integerDotProductAccumulatingSaturating32BitSignedAccelerated,
|
|
integerDotProductAccumulatingSaturating32BitMixedSignednessAccelerated,
|
|
integerDotProductAccumulatingSaturating64BitUnsignedAccelerated,
|
|
integerDotProductAccumulatingSaturating64BitSignedAccelerated,
|
|
integerDotProductAccumulatingSaturating64BitMixedSignednessAccelerated,
|
|
storageTexelBufferOffsetAlignmentBytes,
|
|
storageTexelBufferOffsetSingleTexelAlignment,
|
|
uniformTexelBufferOffsetAlignmentBytes,
|
|
uniformTexelBufferOffsetSingleTexelAlignment,
|
|
maxBufferSize );
|
|
}
|
|
#endif
|
|
|
|
#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
|
|
auto operator<=>( PhysicalDeviceVulkan13Properties const & ) const = default;
|
|
#else
|
|
bool operator==( PhysicalDeviceVulkan13Properties const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
# if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
# else
|
|
return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( minSubgroupSize == rhs.minSubgroupSize ) && ( maxSubgroupSize == rhs.maxSubgroupSize ) &&
|
|
( maxComputeWorkgroupSubgroups == rhs.maxComputeWorkgroupSubgroups ) && ( requiredSubgroupSizeStages == rhs.requiredSubgroupSizeStages ) &&
|
|
( maxInlineUniformBlockSize == rhs.maxInlineUniformBlockSize ) &&
|
|
( maxPerStageDescriptorInlineUniformBlocks == rhs.maxPerStageDescriptorInlineUniformBlocks ) &&
|
|
( maxPerStageDescriptorUpdateAfterBindInlineUniformBlocks == rhs.maxPerStageDescriptorUpdateAfterBindInlineUniformBlocks ) &&
|
|
( maxDescriptorSetInlineUniformBlocks == rhs.maxDescriptorSetInlineUniformBlocks ) &&
|
|
( maxDescriptorSetUpdateAfterBindInlineUniformBlocks == rhs.maxDescriptorSetUpdateAfterBindInlineUniformBlocks ) &&
|
|
( maxInlineUniformTotalSize == rhs.maxInlineUniformTotalSize ) &&
|
|
( integerDotProduct8BitUnsignedAccelerated == rhs.integerDotProduct8BitUnsignedAccelerated ) &&
|
|
( integerDotProduct8BitSignedAccelerated == rhs.integerDotProduct8BitSignedAccelerated ) &&
|
|
( integerDotProduct8BitMixedSignednessAccelerated == rhs.integerDotProduct8BitMixedSignednessAccelerated ) &&
|
|
( integerDotProduct4x8BitPackedUnsignedAccelerated == rhs.integerDotProduct4x8BitPackedUnsignedAccelerated ) &&
|
|
( integerDotProduct4x8BitPackedSignedAccelerated == rhs.integerDotProduct4x8BitPackedSignedAccelerated ) &&
|
|
( integerDotProduct4x8BitPackedMixedSignednessAccelerated == rhs.integerDotProduct4x8BitPackedMixedSignednessAccelerated ) &&
|
|
( integerDotProduct16BitUnsignedAccelerated == rhs.integerDotProduct16BitUnsignedAccelerated ) &&
|
|
( integerDotProduct16BitSignedAccelerated == rhs.integerDotProduct16BitSignedAccelerated ) &&
|
|
( integerDotProduct16BitMixedSignednessAccelerated == rhs.integerDotProduct16BitMixedSignednessAccelerated ) &&
|
|
( integerDotProduct32BitUnsignedAccelerated == rhs.integerDotProduct32BitUnsignedAccelerated ) &&
|
|
( integerDotProduct32BitSignedAccelerated == rhs.integerDotProduct32BitSignedAccelerated ) &&
|
|
( integerDotProduct32BitMixedSignednessAccelerated == rhs.integerDotProduct32BitMixedSignednessAccelerated ) &&
|
|
( integerDotProduct64BitUnsignedAccelerated == rhs.integerDotProduct64BitUnsignedAccelerated ) &&
|
|
( integerDotProduct64BitSignedAccelerated == rhs.integerDotProduct64BitSignedAccelerated ) &&
|
|
( integerDotProduct64BitMixedSignednessAccelerated == rhs.integerDotProduct64BitMixedSignednessAccelerated ) &&
|
|
( integerDotProductAccumulatingSaturating8BitUnsignedAccelerated == rhs.integerDotProductAccumulatingSaturating8BitUnsignedAccelerated ) &&
|
|
( integerDotProductAccumulatingSaturating8BitSignedAccelerated == rhs.integerDotProductAccumulatingSaturating8BitSignedAccelerated ) &&
|
|
( integerDotProductAccumulatingSaturating8BitMixedSignednessAccelerated ==
|
|
rhs.integerDotProductAccumulatingSaturating8BitMixedSignednessAccelerated ) &&
|
|
( integerDotProductAccumulatingSaturating4x8BitPackedUnsignedAccelerated ==
|
|
rhs.integerDotProductAccumulatingSaturating4x8BitPackedUnsignedAccelerated ) &&
|
|
( integerDotProductAccumulatingSaturating4x8BitPackedSignedAccelerated ==
|
|
rhs.integerDotProductAccumulatingSaturating4x8BitPackedSignedAccelerated ) &&
|
|
( integerDotProductAccumulatingSaturating4x8BitPackedMixedSignednessAccelerated ==
|
|
rhs.integerDotProductAccumulatingSaturating4x8BitPackedMixedSignednessAccelerated ) &&
|
|
( integerDotProductAccumulatingSaturating16BitUnsignedAccelerated == rhs.integerDotProductAccumulatingSaturating16BitUnsignedAccelerated ) &&
|
|
( integerDotProductAccumulatingSaturating16BitSignedAccelerated == rhs.integerDotProductAccumulatingSaturating16BitSignedAccelerated ) &&
|
|
( integerDotProductAccumulatingSaturating16BitMixedSignednessAccelerated ==
|
|
rhs.integerDotProductAccumulatingSaturating16BitMixedSignednessAccelerated ) &&
|
|
( integerDotProductAccumulatingSaturating32BitUnsignedAccelerated == rhs.integerDotProductAccumulatingSaturating32BitUnsignedAccelerated ) &&
|
|
( integerDotProductAccumulatingSaturating32BitSignedAccelerated == rhs.integerDotProductAccumulatingSaturating32BitSignedAccelerated ) &&
|
|
( integerDotProductAccumulatingSaturating32BitMixedSignednessAccelerated ==
|
|
rhs.integerDotProductAccumulatingSaturating32BitMixedSignednessAccelerated ) &&
|
|
( integerDotProductAccumulatingSaturating64BitUnsignedAccelerated == rhs.integerDotProductAccumulatingSaturating64BitUnsignedAccelerated ) &&
|
|
( integerDotProductAccumulatingSaturating64BitSignedAccelerated == rhs.integerDotProductAccumulatingSaturating64BitSignedAccelerated ) &&
|
|
( integerDotProductAccumulatingSaturating64BitMixedSignednessAccelerated ==
|
|
rhs.integerDotProductAccumulatingSaturating64BitMixedSignednessAccelerated ) &&
|
|
( storageTexelBufferOffsetAlignmentBytes == rhs.storageTexelBufferOffsetAlignmentBytes ) &&
|
|
( storageTexelBufferOffsetSingleTexelAlignment == rhs.storageTexelBufferOffsetSingleTexelAlignment ) &&
|
|
( uniformTexelBufferOffsetAlignmentBytes == rhs.uniformTexelBufferOffsetAlignmentBytes ) &&
|
|
( uniformTexelBufferOffsetSingleTexelAlignment == rhs.uniformTexelBufferOffsetSingleTexelAlignment ) && ( maxBufferSize == rhs.maxBufferSize );
|
|
# endif
|
|
}
|
|
|
|
bool operator!=( PhysicalDeviceVulkan13Properties const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceVulkan13Properties;
|
|
void * pNext = {};
|
|
uint32_t minSubgroupSize = {};
|
|
uint32_t maxSubgroupSize = {};
|
|
uint32_t maxComputeWorkgroupSubgroups = {};
|
|
VULKAN_HPP_NAMESPACE::ShaderStageFlags requiredSubgroupSizeStages = {};
|
|
uint32_t maxInlineUniformBlockSize = {};
|
|
uint32_t maxPerStageDescriptorInlineUniformBlocks = {};
|
|
uint32_t maxPerStageDescriptorUpdateAfterBindInlineUniformBlocks = {};
|
|
uint32_t maxDescriptorSetInlineUniformBlocks = {};
|
|
uint32_t maxDescriptorSetUpdateAfterBindInlineUniformBlocks = {};
|
|
uint32_t maxInlineUniformTotalSize = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 integerDotProduct8BitUnsignedAccelerated = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 integerDotProduct8BitSignedAccelerated = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 integerDotProduct8BitMixedSignednessAccelerated = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 integerDotProduct4x8BitPackedUnsignedAccelerated = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 integerDotProduct4x8BitPackedSignedAccelerated = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 integerDotProduct4x8BitPackedMixedSignednessAccelerated = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 integerDotProduct16BitUnsignedAccelerated = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 integerDotProduct16BitSignedAccelerated = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 integerDotProduct16BitMixedSignednessAccelerated = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 integerDotProduct32BitUnsignedAccelerated = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 integerDotProduct32BitSignedAccelerated = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 integerDotProduct32BitMixedSignednessAccelerated = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 integerDotProduct64BitUnsignedAccelerated = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 integerDotProduct64BitSignedAccelerated = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 integerDotProduct64BitMixedSignednessAccelerated = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 integerDotProductAccumulatingSaturating8BitUnsignedAccelerated = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 integerDotProductAccumulatingSaturating8BitSignedAccelerated = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 integerDotProductAccumulatingSaturating8BitMixedSignednessAccelerated = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 integerDotProductAccumulatingSaturating4x8BitPackedUnsignedAccelerated = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 integerDotProductAccumulatingSaturating4x8BitPackedSignedAccelerated = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 integerDotProductAccumulatingSaturating4x8BitPackedMixedSignednessAccelerated = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 integerDotProductAccumulatingSaturating16BitUnsignedAccelerated = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 integerDotProductAccumulatingSaturating16BitSignedAccelerated = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 integerDotProductAccumulatingSaturating16BitMixedSignednessAccelerated = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 integerDotProductAccumulatingSaturating32BitUnsignedAccelerated = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 integerDotProductAccumulatingSaturating32BitSignedAccelerated = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 integerDotProductAccumulatingSaturating32BitMixedSignednessAccelerated = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 integerDotProductAccumulatingSaturating64BitUnsignedAccelerated = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 integerDotProductAccumulatingSaturating64BitSignedAccelerated = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 integerDotProductAccumulatingSaturating64BitMixedSignednessAccelerated = {};
|
|
VULKAN_HPP_NAMESPACE::DeviceSize storageTexelBufferOffsetAlignmentBytes = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 storageTexelBufferOffsetSingleTexelAlignment = {};
|
|
VULKAN_HPP_NAMESPACE::DeviceSize uniformTexelBufferOffsetAlignmentBytes = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 uniformTexelBufferOffsetSingleTexelAlignment = {};
|
|
VULKAN_HPP_NAMESPACE::DeviceSize maxBufferSize = {};
|
|
};
|
|
|
|
template <>
|
|
struct CppType<StructureType, StructureType::ePhysicalDeviceVulkan13Properties>
|
|
{
|
|
using Type = PhysicalDeviceVulkan13Properties;
|
|
};
|
|
|
|
struct PhysicalDeviceVulkanMemoryModelFeatures
|
|
{
|
|
using NativeType = VkPhysicalDeviceVulkanMemoryModelFeatures;
|
|
|
|
static const bool allowDuplicate = false;
|
|
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceVulkanMemoryModelFeatures;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR PhysicalDeviceVulkanMemoryModelFeatures( VULKAN_HPP_NAMESPACE::Bool32 vulkanMemoryModel_ = {},
|
|
VULKAN_HPP_NAMESPACE::Bool32 vulkanMemoryModelDeviceScope_ = {},
|
|
VULKAN_HPP_NAMESPACE::Bool32 vulkanMemoryModelAvailabilityVisibilityChains_ = {},
|
|
void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
|
|
: pNext( pNext_ )
|
|
, vulkanMemoryModel( vulkanMemoryModel_ )
|
|
, vulkanMemoryModelDeviceScope( vulkanMemoryModelDeviceScope_ )
|
|
, vulkanMemoryModelAvailabilityVisibilityChains( vulkanMemoryModelAvailabilityVisibilityChains_ )
|
|
{
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR PhysicalDeviceVulkanMemoryModelFeatures( PhysicalDeviceVulkanMemoryModelFeatures const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
PhysicalDeviceVulkanMemoryModelFeatures( VkPhysicalDeviceVulkanMemoryModelFeatures const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: PhysicalDeviceVulkanMemoryModelFeatures( *reinterpret_cast<PhysicalDeviceVulkanMemoryModelFeatures const *>( &rhs ) )
|
|
{
|
|
}
|
|
|
|
PhysicalDeviceVulkanMemoryModelFeatures & operator=( PhysicalDeviceVulkanMemoryModelFeatures const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
PhysicalDeviceVulkanMemoryModelFeatures & operator=( VkPhysicalDeviceVulkanMemoryModelFeatures const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceVulkanMemoryModelFeatures const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
|
|
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkanMemoryModelFeatures & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pNext = pNext_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkanMemoryModelFeatures &
|
|
setVulkanMemoryModel( VULKAN_HPP_NAMESPACE::Bool32 vulkanMemoryModel_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
vulkanMemoryModel = vulkanMemoryModel_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkanMemoryModelFeatures &
|
|
setVulkanMemoryModelDeviceScope( VULKAN_HPP_NAMESPACE::Bool32 vulkanMemoryModelDeviceScope_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
vulkanMemoryModelDeviceScope = vulkanMemoryModelDeviceScope_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkanMemoryModelFeatures &
|
|
setVulkanMemoryModelAvailabilityVisibilityChains( VULKAN_HPP_NAMESPACE::Bool32 vulkanMemoryModelAvailabilityVisibilityChains_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
vulkanMemoryModelAvailabilityVisibilityChains = vulkanMemoryModelAvailabilityVisibilityChains_;
|
|
return *this;
|
|
}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
|
|
|
|
operator VkPhysicalDeviceVulkanMemoryModelFeatures const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkPhysicalDeviceVulkanMemoryModelFeatures *>( this );
|
|
}
|
|
|
|
operator VkPhysicalDeviceVulkanMemoryModelFeatures &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkPhysicalDeviceVulkanMemoryModelFeatures *>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
# if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
# else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &,
|
|
void * const &,
|
|
VULKAN_HPP_NAMESPACE::Bool32 const &,
|
|
VULKAN_HPP_NAMESPACE::Bool32 const &,
|
|
VULKAN_HPP_NAMESPACE::Bool32 const &>
|
|
# endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( sType, pNext, vulkanMemoryModel, vulkanMemoryModelDeviceScope, vulkanMemoryModelAvailabilityVisibilityChains );
|
|
}
|
|
#endif
|
|
|
|
#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
|
|
auto operator<=>( PhysicalDeviceVulkanMemoryModelFeatures const & ) const = default;
|
|
#else
|
|
bool operator==( PhysicalDeviceVulkanMemoryModelFeatures const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
# if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
# else
|
|
return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( vulkanMemoryModel == rhs.vulkanMemoryModel ) &&
|
|
( vulkanMemoryModelDeviceScope == rhs.vulkanMemoryModelDeviceScope ) &&
|
|
( vulkanMemoryModelAvailabilityVisibilityChains == rhs.vulkanMemoryModelAvailabilityVisibilityChains );
|
|
# endif
|
|
}
|
|
|
|
bool operator!=( PhysicalDeviceVulkanMemoryModelFeatures const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceVulkanMemoryModelFeatures;
|
|
void * pNext = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 vulkanMemoryModel = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 vulkanMemoryModelDeviceScope = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 vulkanMemoryModelAvailabilityVisibilityChains = {};
|
|
};
|
|
|
|
template <>
|
|
struct CppType<StructureType, StructureType::ePhysicalDeviceVulkanMemoryModelFeatures>
|
|
{
|
|
using Type = PhysicalDeviceVulkanMemoryModelFeatures;
|
|
};
|
|
|
|
using PhysicalDeviceVulkanMemoryModelFeaturesKHR = PhysicalDeviceVulkanMemoryModelFeatures;
|
|
|
|
struct PhysicalDeviceVulkanSC10Features
|
|
{
|
|
using NativeType = VkPhysicalDeviceVulkanSC10Features;
|
|
|
|
static const bool allowDuplicate = false;
|
|
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceVulkanSc10Features;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR PhysicalDeviceVulkanSC10Features( VULKAN_HPP_NAMESPACE::Bool32 shaderAtomicInstructions_ = {},
|
|
void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
|
|
: pNext( pNext_ )
|
|
, shaderAtomicInstructions( shaderAtomicInstructions_ )
|
|
{
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR PhysicalDeviceVulkanSC10Features( PhysicalDeviceVulkanSC10Features const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
PhysicalDeviceVulkanSC10Features( VkPhysicalDeviceVulkanSC10Features const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: PhysicalDeviceVulkanSC10Features( *reinterpret_cast<PhysicalDeviceVulkanSC10Features const *>( &rhs ) )
|
|
{
|
|
}
|
|
|
|
PhysicalDeviceVulkanSC10Features & operator=( PhysicalDeviceVulkanSC10Features const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
PhysicalDeviceVulkanSC10Features & operator=( VkPhysicalDeviceVulkanSC10Features const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceVulkanSC10Features const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
|
|
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkanSC10Features & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pNext = pNext_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkanSC10Features &
|
|
setShaderAtomicInstructions( VULKAN_HPP_NAMESPACE::Bool32 shaderAtomicInstructions_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
shaderAtomicInstructions = shaderAtomicInstructions_;
|
|
return *this;
|
|
}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
|
|
|
|
operator VkPhysicalDeviceVulkanSC10Features const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkPhysicalDeviceVulkanSC10Features *>( this );
|
|
}
|
|
|
|
operator VkPhysicalDeviceVulkanSC10Features &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkPhysicalDeviceVulkanSC10Features *>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
# if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
# else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::Bool32 const &>
|
|
# endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( sType, pNext, shaderAtomicInstructions );
|
|
}
|
|
#endif
|
|
|
|
#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
|
|
auto operator<=>( PhysicalDeviceVulkanSC10Features const & ) const = default;
|
|
#else
|
|
bool operator==( PhysicalDeviceVulkanSC10Features const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
# if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
# else
|
|
return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( shaderAtomicInstructions == rhs.shaderAtomicInstructions );
|
|
# endif
|
|
}
|
|
|
|
bool operator!=( PhysicalDeviceVulkanSC10Features const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceVulkanSc10Features;
|
|
void * pNext = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 shaderAtomicInstructions = {};
|
|
};
|
|
|
|
template <>
|
|
struct CppType<StructureType, StructureType::ePhysicalDeviceVulkanSc10Features>
|
|
{
|
|
using Type = PhysicalDeviceVulkanSC10Features;
|
|
};
|
|
|
|
struct PhysicalDeviceVulkanSC10Properties
|
|
{
|
|
using NativeType = VkPhysicalDeviceVulkanSC10Properties;
|
|
|
|
static const bool allowDuplicate = false;
|
|
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceVulkanSc10Properties;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR PhysicalDeviceVulkanSC10Properties( VULKAN_HPP_NAMESPACE::Bool32 deviceNoDynamicHostAllocations_ = {},
|
|
VULKAN_HPP_NAMESPACE::Bool32 deviceDestroyFreesMemory_ = {},
|
|
VULKAN_HPP_NAMESPACE::Bool32 commandPoolMultipleCommandBuffersRecording_ = {},
|
|
VULKAN_HPP_NAMESPACE::Bool32 commandPoolResetCommandBuffer_ = {},
|
|
VULKAN_HPP_NAMESPACE::Bool32 commandBufferSimultaneousUse_ = {},
|
|
VULKAN_HPP_NAMESPACE::Bool32 secondaryCommandBufferNullOrImagelessFramebuffer_ = {},
|
|
VULKAN_HPP_NAMESPACE::Bool32 recycleDescriptorSetMemory_ = {},
|
|
VULKAN_HPP_NAMESPACE::Bool32 recyclePipelineMemory_ = {},
|
|
uint32_t maxRenderPassSubpasses_ = {},
|
|
uint32_t maxRenderPassDependencies_ = {},
|
|
uint32_t maxSubpassInputAttachments_ = {},
|
|
uint32_t maxSubpassPreserveAttachments_ = {},
|
|
uint32_t maxFramebufferAttachments_ = {},
|
|
uint32_t maxDescriptorSetLayoutBindings_ = {},
|
|
uint32_t maxQueryFaultCount_ = {},
|
|
uint32_t maxCallbackFaultCount_ = {},
|
|
uint32_t maxCommandPoolCommandBuffers_ = {},
|
|
VULKAN_HPP_NAMESPACE::DeviceSize maxCommandBufferSize_ = {},
|
|
void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
|
|
: pNext( pNext_ )
|
|
, deviceNoDynamicHostAllocations( deviceNoDynamicHostAllocations_ )
|
|
, deviceDestroyFreesMemory( deviceDestroyFreesMemory_ )
|
|
, commandPoolMultipleCommandBuffersRecording( commandPoolMultipleCommandBuffersRecording_ )
|
|
, commandPoolResetCommandBuffer( commandPoolResetCommandBuffer_ )
|
|
, commandBufferSimultaneousUse( commandBufferSimultaneousUse_ )
|
|
, secondaryCommandBufferNullOrImagelessFramebuffer( secondaryCommandBufferNullOrImagelessFramebuffer_ )
|
|
, recycleDescriptorSetMemory( recycleDescriptorSetMemory_ )
|
|
, recyclePipelineMemory( recyclePipelineMemory_ )
|
|
, maxRenderPassSubpasses( maxRenderPassSubpasses_ )
|
|
, maxRenderPassDependencies( maxRenderPassDependencies_ )
|
|
, maxSubpassInputAttachments( maxSubpassInputAttachments_ )
|
|
, maxSubpassPreserveAttachments( maxSubpassPreserveAttachments_ )
|
|
, maxFramebufferAttachments( maxFramebufferAttachments_ )
|
|
, maxDescriptorSetLayoutBindings( maxDescriptorSetLayoutBindings_ )
|
|
, maxQueryFaultCount( maxQueryFaultCount_ )
|
|
, maxCallbackFaultCount( maxCallbackFaultCount_ )
|
|
, maxCommandPoolCommandBuffers( maxCommandPoolCommandBuffers_ )
|
|
, maxCommandBufferSize( maxCommandBufferSize_ )
|
|
{
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR PhysicalDeviceVulkanSC10Properties( PhysicalDeviceVulkanSC10Properties const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
PhysicalDeviceVulkanSC10Properties( VkPhysicalDeviceVulkanSC10Properties const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: PhysicalDeviceVulkanSC10Properties( *reinterpret_cast<PhysicalDeviceVulkanSC10Properties const *>( &rhs ) )
|
|
{
|
|
}
|
|
|
|
PhysicalDeviceVulkanSC10Properties & operator=( PhysicalDeviceVulkanSC10Properties const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
PhysicalDeviceVulkanSC10Properties & operator=( VkPhysicalDeviceVulkanSC10Properties const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceVulkanSC10Properties const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
operator VkPhysicalDeviceVulkanSC10Properties const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkPhysicalDeviceVulkanSC10Properties *>( this );
|
|
}
|
|
|
|
operator VkPhysicalDeviceVulkanSC10Properties &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkPhysicalDeviceVulkanSC10Properties *>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
# if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
# else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &,
|
|
void * const &,
|
|
VULKAN_HPP_NAMESPACE::Bool32 const &,
|
|
VULKAN_HPP_NAMESPACE::Bool32 const &,
|
|
VULKAN_HPP_NAMESPACE::Bool32 const &,
|
|
VULKAN_HPP_NAMESPACE::Bool32 const &,
|
|
VULKAN_HPP_NAMESPACE::Bool32 const &,
|
|
VULKAN_HPP_NAMESPACE::Bool32 const &,
|
|
VULKAN_HPP_NAMESPACE::Bool32 const &,
|
|
VULKAN_HPP_NAMESPACE::Bool32 const &,
|
|
uint32_t const &,
|
|
uint32_t const &,
|
|
uint32_t const &,
|
|
uint32_t const &,
|
|
uint32_t const &,
|
|
uint32_t const &,
|
|
uint32_t const &,
|
|
uint32_t const &,
|
|
uint32_t const &,
|
|
VULKAN_HPP_NAMESPACE::DeviceSize const &>
|
|
# endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( sType,
|
|
pNext,
|
|
deviceNoDynamicHostAllocations,
|
|
deviceDestroyFreesMemory,
|
|
commandPoolMultipleCommandBuffersRecording,
|
|
commandPoolResetCommandBuffer,
|
|
commandBufferSimultaneousUse,
|
|
secondaryCommandBufferNullOrImagelessFramebuffer,
|
|
recycleDescriptorSetMemory,
|
|
recyclePipelineMemory,
|
|
maxRenderPassSubpasses,
|
|
maxRenderPassDependencies,
|
|
maxSubpassInputAttachments,
|
|
maxSubpassPreserveAttachments,
|
|
maxFramebufferAttachments,
|
|
maxDescriptorSetLayoutBindings,
|
|
maxQueryFaultCount,
|
|
maxCallbackFaultCount,
|
|
maxCommandPoolCommandBuffers,
|
|
maxCommandBufferSize );
|
|
}
|
|
#endif
|
|
|
|
#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
|
|
auto operator<=>( PhysicalDeviceVulkanSC10Properties const & ) const = default;
|
|
#else
|
|
bool operator==( PhysicalDeviceVulkanSC10Properties const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
# if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
# else
|
|
return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( deviceNoDynamicHostAllocations == rhs.deviceNoDynamicHostAllocations ) &&
|
|
( deviceDestroyFreesMemory == rhs.deviceDestroyFreesMemory ) &&
|
|
( commandPoolMultipleCommandBuffersRecording == rhs.commandPoolMultipleCommandBuffersRecording ) &&
|
|
( commandPoolResetCommandBuffer == rhs.commandPoolResetCommandBuffer ) && ( commandBufferSimultaneousUse == rhs.commandBufferSimultaneousUse ) &&
|
|
( secondaryCommandBufferNullOrImagelessFramebuffer == rhs.secondaryCommandBufferNullOrImagelessFramebuffer ) &&
|
|
( recycleDescriptorSetMemory == rhs.recycleDescriptorSetMemory ) && ( recyclePipelineMemory == rhs.recyclePipelineMemory ) &&
|
|
( maxRenderPassSubpasses == rhs.maxRenderPassSubpasses ) && ( maxRenderPassDependencies == rhs.maxRenderPassDependencies ) &&
|
|
( maxSubpassInputAttachments == rhs.maxSubpassInputAttachments ) && ( maxSubpassPreserveAttachments == rhs.maxSubpassPreserveAttachments ) &&
|
|
( maxFramebufferAttachments == rhs.maxFramebufferAttachments ) && ( maxDescriptorSetLayoutBindings == rhs.maxDescriptorSetLayoutBindings ) &&
|
|
( maxQueryFaultCount == rhs.maxQueryFaultCount ) && ( maxCallbackFaultCount == rhs.maxCallbackFaultCount ) &&
|
|
( maxCommandPoolCommandBuffers == rhs.maxCommandPoolCommandBuffers ) && ( maxCommandBufferSize == rhs.maxCommandBufferSize );
|
|
# endif
|
|
}
|
|
|
|
bool operator!=( PhysicalDeviceVulkanSC10Properties const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceVulkanSc10Properties;
|
|
void * pNext = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 deviceNoDynamicHostAllocations = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 deviceDestroyFreesMemory = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 commandPoolMultipleCommandBuffersRecording = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 commandPoolResetCommandBuffer = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 commandBufferSimultaneousUse = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 secondaryCommandBufferNullOrImagelessFramebuffer = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 recycleDescriptorSetMemory = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 recyclePipelineMemory = {};
|
|
uint32_t maxRenderPassSubpasses = {};
|
|
uint32_t maxRenderPassDependencies = {};
|
|
uint32_t maxSubpassInputAttachments = {};
|
|
uint32_t maxSubpassPreserveAttachments = {};
|
|
uint32_t maxFramebufferAttachments = {};
|
|
uint32_t maxDescriptorSetLayoutBindings = {};
|
|
uint32_t maxQueryFaultCount = {};
|
|
uint32_t maxCallbackFaultCount = {};
|
|
uint32_t maxCommandPoolCommandBuffers = {};
|
|
VULKAN_HPP_NAMESPACE::DeviceSize maxCommandBufferSize = {};
|
|
};
|
|
|
|
template <>
|
|
struct CppType<StructureType, StructureType::ePhysicalDeviceVulkanSc10Properties>
|
|
{
|
|
using Type = PhysicalDeviceVulkanSC10Properties;
|
|
};
|
|
|
|
struct PhysicalDeviceYcbcr2Plane444FormatsFeaturesEXT
|
|
{
|
|
using NativeType = VkPhysicalDeviceYcbcr2Plane444FormatsFeaturesEXT;
|
|
|
|
static const bool allowDuplicate = false;
|
|
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceYcbcr2Plane444FormatsFeaturesEXT;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR PhysicalDeviceYcbcr2Plane444FormatsFeaturesEXT( VULKAN_HPP_NAMESPACE::Bool32 ycbcr2plane444Formats_ = {},
|
|
void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
|
|
: pNext( pNext_ )
|
|
, ycbcr2plane444Formats( ycbcr2plane444Formats_ )
|
|
{
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR
|
|
PhysicalDeviceYcbcr2Plane444FormatsFeaturesEXT( PhysicalDeviceYcbcr2Plane444FormatsFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
PhysicalDeviceYcbcr2Plane444FormatsFeaturesEXT( VkPhysicalDeviceYcbcr2Plane444FormatsFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: PhysicalDeviceYcbcr2Plane444FormatsFeaturesEXT( *reinterpret_cast<PhysicalDeviceYcbcr2Plane444FormatsFeaturesEXT const *>( &rhs ) )
|
|
{
|
|
}
|
|
|
|
PhysicalDeviceYcbcr2Plane444FormatsFeaturesEXT & operator=( PhysicalDeviceYcbcr2Plane444FormatsFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
PhysicalDeviceYcbcr2Plane444FormatsFeaturesEXT & operator=( VkPhysicalDeviceYcbcr2Plane444FormatsFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceYcbcr2Plane444FormatsFeaturesEXT const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
|
|
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceYcbcr2Plane444FormatsFeaturesEXT & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pNext = pNext_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceYcbcr2Plane444FormatsFeaturesEXT &
|
|
setYcbcr2plane444Formats( VULKAN_HPP_NAMESPACE::Bool32 ycbcr2plane444Formats_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
ycbcr2plane444Formats = ycbcr2plane444Formats_;
|
|
return *this;
|
|
}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
|
|
|
|
operator VkPhysicalDeviceYcbcr2Plane444FormatsFeaturesEXT const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkPhysicalDeviceYcbcr2Plane444FormatsFeaturesEXT *>( this );
|
|
}
|
|
|
|
operator VkPhysicalDeviceYcbcr2Plane444FormatsFeaturesEXT &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkPhysicalDeviceYcbcr2Plane444FormatsFeaturesEXT *>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
# if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
# else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::Bool32 const &>
|
|
# endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( sType, pNext, ycbcr2plane444Formats );
|
|
}
|
|
#endif
|
|
|
|
#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
|
|
auto operator<=>( PhysicalDeviceYcbcr2Plane444FormatsFeaturesEXT const & ) const = default;
|
|
#else
|
|
bool operator==( PhysicalDeviceYcbcr2Plane444FormatsFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
# if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
# else
|
|
return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( ycbcr2plane444Formats == rhs.ycbcr2plane444Formats );
|
|
# endif
|
|
}
|
|
|
|
bool operator!=( PhysicalDeviceYcbcr2Plane444FormatsFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceYcbcr2Plane444FormatsFeaturesEXT;
|
|
void * pNext = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 ycbcr2plane444Formats = {};
|
|
};
|
|
|
|
template <>
|
|
struct CppType<StructureType, StructureType::ePhysicalDeviceYcbcr2Plane444FormatsFeaturesEXT>
|
|
{
|
|
using Type = PhysicalDeviceYcbcr2Plane444FormatsFeaturesEXT;
|
|
};
|
|
|
|
struct PhysicalDeviceYcbcrImageArraysFeaturesEXT
|
|
{
|
|
using NativeType = VkPhysicalDeviceYcbcrImageArraysFeaturesEXT;
|
|
|
|
static const bool allowDuplicate = false;
|
|
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceYcbcrImageArraysFeaturesEXT;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR PhysicalDeviceYcbcrImageArraysFeaturesEXT( VULKAN_HPP_NAMESPACE::Bool32 ycbcrImageArrays_ = {},
|
|
void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
|
|
: pNext( pNext_ )
|
|
, ycbcrImageArrays( ycbcrImageArrays_ )
|
|
{
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR PhysicalDeviceYcbcrImageArraysFeaturesEXT( PhysicalDeviceYcbcrImageArraysFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
PhysicalDeviceYcbcrImageArraysFeaturesEXT( VkPhysicalDeviceYcbcrImageArraysFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: PhysicalDeviceYcbcrImageArraysFeaturesEXT( *reinterpret_cast<PhysicalDeviceYcbcrImageArraysFeaturesEXT const *>( &rhs ) )
|
|
{
|
|
}
|
|
|
|
PhysicalDeviceYcbcrImageArraysFeaturesEXT & operator=( PhysicalDeviceYcbcrImageArraysFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
PhysicalDeviceYcbcrImageArraysFeaturesEXT & operator=( VkPhysicalDeviceYcbcrImageArraysFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceYcbcrImageArraysFeaturesEXT const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
|
|
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceYcbcrImageArraysFeaturesEXT & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pNext = pNext_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceYcbcrImageArraysFeaturesEXT &
|
|
setYcbcrImageArrays( VULKAN_HPP_NAMESPACE::Bool32 ycbcrImageArrays_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
ycbcrImageArrays = ycbcrImageArrays_;
|
|
return *this;
|
|
}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
|
|
|
|
operator VkPhysicalDeviceYcbcrImageArraysFeaturesEXT const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkPhysicalDeviceYcbcrImageArraysFeaturesEXT *>( this );
|
|
}
|
|
|
|
operator VkPhysicalDeviceYcbcrImageArraysFeaturesEXT &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkPhysicalDeviceYcbcrImageArraysFeaturesEXT *>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
# if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
# else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::Bool32 const &>
|
|
# endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( sType, pNext, ycbcrImageArrays );
|
|
}
|
|
#endif
|
|
|
|
#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
|
|
auto operator<=>( PhysicalDeviceYcbcrImageArraysFeaturesEXT const & ) const = default;
|
|
#else
|
|
bool operator==( PhysicalDeviceYcbcrImageArraysFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
# if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
# else
|
|
return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( ycbcrImageArrays == rhs.ycbcrImageArrays );
|
|
# endif
|
|
}
|
|
|
|
bool operator!=( PhysicalDeviceYcbcrImageArraysFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceYcbcrImageArraysFeaturesEXT;
|
|
void * pNext = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 ycbcrImageArrays = {};
|
|
};
|
|
|
|
template <>
|
|
struct CppType<StructureType, StructureType::ePhysicalDeviceYcbcrImageArraysFeaturesEXT>
|
|
{
|
|
using Type = PhysicalDeviceYcbcrImageArraysFeaturesEXT;
|
|
};
|
|
|
|
struct PhysicalDeviceZeroInitializeWorkgroupMemoryFeatures
|
|
{
|
|
using NativeType = VkPhysicalDeviceZeroInitializeWorkgroupMemoryFeatures;
|
|
|
|
static const bool allowDuplicate = false;
|
|
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceZeroInitializeWorkgroupMemoryFeatures;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR PhysicalDeviceZeroInitializeWorkgroupMemoryFeatures( VULKAN_HPP_NAMESPACE::Bool32 shaderZeroInitializeWorkgroupMemory_ = {},
|
|
void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
|
|
: pNext( pNext_ )
|
|
, shaderZeroInitializeWorkgroupMemory( shaderZeroInitializeWorkgroupMemory_ )
|
|
{
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR
|
|
PhysicalDeviceZeroInitializeWorkgroupMemoryFeatures( PhysicalDeviceZeroInitializeWorkgroupMemoryFeatures const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
PhysicalDeviceZeroInitializeWorkgroupMemoryFeatures( VkPhysicalDeviceZeroInitializeWorkgroupMemoryFeatures const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: PhysicalDeviceZeroInitializeWorkgroupMemoryFeatures( *reinterpret_cast<PhysicalDeviceZeroInitializeWorkgroupMemoryFeatures const *>( &rhs ) )
|
|
{
|
|
}
|
|
|
|
PhysicalDeviceZeroInitializeWorkgroupMemoryFeatures &
|
|
operator=( PhysicalDeviceZeroInitializeWorkgroupMemoryFeatures const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
PhysicalDeviceZeroInitializeWorkgroupMemoryFeatures & operator=( VkPhysicalDeviceZeroInitializeWorkgroupMemoryFeatures const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceZeroInitializeWorkgroupMemoryFeatures const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
|
|
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceZeroInitializeWorkgroupMemoryFeatures & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pNext = pNext_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceZeroInitializeWorkgroupMemoryFeatures &
|
|
setShaderZeroInitializeWorkgroupMemory( VULKAN_HPP_NAMESPACE::Bool32 shaderZeroInitializeWorkgroupMemory_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
shaderZeroInitializeWorkgroupMemory = shaderZeroInitializeWorkgroupMemory_;
|
|
return *this;
|
|
}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
|
|
|
|
operator VkPhysicalDeviceZeroInitializeWorkgroupMemoryFeatures const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkPhysicalDeviceZeroInitializeWorkgroupMemoryFeatures *>( this );
|
|
}
|
|
|
|
operator VkPhysicalDeviceZeroInitializeWorkgroupMemoryFeatures &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkPhysicalDeviceZeroInitializeWorkgroupMemoryFeatures *>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
# if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
# else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::Bool32 const &>
|
|
# endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( sType, pNext, shaderZeroInitializeWorkgroupMemory );
|
|
}
|
|
#endif
|
|
|
|
#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
|
|
auto operator<=>( PhysicalDeviceZeroInitializeWorkgroupMemoryFeatures const & ) const = default;
|
|
#else
|
|
bool operator==( PhysicalDeviceZeroInitializeWorkgroupMemoryFeatures const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
# if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
# else
|
|
return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( shaderZeroInitializeWorkgroupMemory == rhs.shaderZeroInitializeWorkgroupMemory );
|
|
# endif
|
|
}
|
|
|
|
bool operator!=( PhysicalDeviceZeroInitializeWorkgroupMemoryFeatures const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceZeroInitializeWorkgroupMemoryFeatures;
|
|
void * pNext = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 shaderZeroInitializeWorkgroupMemory = {};
|
|
};
|
|
|
|
template <>
|
|
struct CppType<StructureType, StructureType::ePhysicalDeviceZeroInitializeWorkgroupMemoryFeatures>
|
|
{
|
|
using Type = PhysicalDeviceZeroInitializeWorkgroupMemoryFeatures;
|
|
};
|
|
|
|
using PhysicalDeviceZeroInitializeWorkgroupMemoryFeaturesKHR = PhysicalDeviceZeroInitializeWorkgroupMemoryFeatures;
|
|
|
|
struct PipelineCacheHeaderVersionOne
|
|
{
|
|
using NativeType = VkPipelineCacheHeaderVersionOne;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR_14
|
|
PipelineCacheHeaderVersionOne( uint32_t headerSize_ = {},
|
|
VULKAN_HPP_NAMESPACE::PipelineCacheHeaderVersion headerVersion_ = VULKAN_HPP_NAMESPACE::PipelineCacheHeaderVersion::eOne,
|
|
uint32_t vendorID_ = {},
|
|
uint32_t deviceID_ = {},
|
|
std::array<uint8_t, VK_UUID_SIZE> const & pipelineCacheUUID_ = {} ) VULKAN_HPP_NOEXCEPT
|
|
: headerSize( headerSize_ )
|
|
, headerVersion( headerVersion_ )
|
|
, vendorID( vendorID_ )
|
|
, deviceID( deviceID_ )
|
|
, pipelineCacheUUID( pipelineCacheUUID_ )
|
|
{
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PipelineCacheHeaderVersionOne( PipelineCacheHeaderVersionOne const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
PipelineCacheHeaderVersionOne( VkPipelineCacheHeaderVersionOne const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: PipelineCacheHeaderVersionOne( *reinterpret_cast<PipelineCacheHeaderVersionOne const *>( &rhs ) )
|
|
{
|
|
}
|
|
|
|
PipelineCacheHeaderVersionOne & operator=( PipelineCacheHeaderVersionOne const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
PipelineCacheHeaderVersionOne & operator=( VkPipelineCacheHeaderVersionOne const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PipelineCacheHeaderVersionOne const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
|
|
VULKAN_HPP_CONSTEXPR_14 PipelineCacheHeaderVersionOne & setHeaderSize( uint32_t headerSize_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
headerSize = headerSize_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PipelineCacheHeaderVersionOne &
|
|
setHeaderVersion( VULKAN_HPP_NAMESPACE::PipelineCacheHeaderVersion headerVersion_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
headerVersion = headerVersion_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PipelineCacheHeaderVersionOne & setVendorID( uint32_t vendorID_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
vendorID = vendorID_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PipelineCacheHeaderVersionOne & setDeviceID( uint32_t deviceID_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
deviceID = deviceID_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PipelineCacheHeaderVersionOne & setPipelineCacheUUID( std::array<uint8_t, VK_UUID_SIZE> pipelineCacheUUID_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pipelineCacheUUID = pipelineCacheUUID_;
|
|
return *this;
|
|
}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
|
|
|
|
operator VkPipelineCacheHeaderVersionOne const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkPipelineCacheHeaderVersionOne *>( this );
|
|
}
|
|
|
|
operator VkPipelineCacheHeaderVersionOne &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkPipelineCacheHeaderVersionOne *>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
# if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
# else
|
|
std::tuple<uint32_t const &,
|
|
VULKAN_HPP_NAMESPACE::PipelineCacheHeaderVersion const &,
|
|
uint32_t const &,
|
|
uint32_t const &,
|
|
VULKAN_HPP_NAMESPACE::ArrayWrapper1D<uint8_t, VK_UUID_SIZE> const &>
|
|
# endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( headerSize, headerVersion, vendorID, deviceID, pipelineCacheUUID );
|
|
}
|
|
#endif
|
|
|
|
#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
|
|
auto operator<=>( PipelineCacheHeaderVersionOne const & ) const = default;
|
|
#else
|
|
bool operator==( PipelineCacheHeaderVersionOne const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
# if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
# else
|
|
return ( headerSize == rhs.headerSize ) && ( headerVersion == rhs.headerVersion ) && ( vendorID == rhs.vendorID ) && ( deviceID == rhs.deviceID ) &&
|
|
( pipelineCacheUUID == rhs.pipelineCacheUUID );
|
|
# endif
|
|
}
|
|
|
|
bool operator!=( PipelineCacheHeaderVersionOne const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
uint32_t headerSize = {};
|
|
VULKAN_HPP_NAMESPACE::PipelineCacheHeaderVersion headerVersion = VULKAN_HPP_NAMESPACE::PipelineCacheHeaderVersion::eOne;
|
|
uint32_t vendorID = {};
|
|
uint32_t deviceID = {};
|
|
VULKAN_HPP_NAMESPACE::ArrayWrapper1D<uint8_t, VK_UUID_SIZE> pipelineCacheUUID = {};
|
|
};
|
|
|
|
struct PipelineCacheHeaderVersionSafetyCriticalOne
|
|
{
|
|
using NativeType = VkPipelineCacheHeaderVersionSafetyCriticalOne;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR_14 PipelineCacheHeaderVersionSafetyCriticalOne(
|
|
VULKAN_HPP_NAMESPACE::PipelineCacheHeaderVersionOne headerVersionOne_ = {},
|
|
VULKAN_HPP_NAMESPACE::PipelineCacheValidationVersion validationVersion_ = VULKAN_HPP_NAMESPACE::PipelineCacheValidationVersion::eSafetyCriticalOne,
|
|
uint32_t implementationData_ = {},
|
|
uint32_t pipelineIndexCount_ = {},
|
|
uint32_t pipelineIndexStride_ = {},
|
|
uint64_t pipelineIndexOffset_ = {} ) VULKAN_HPP_NOEXCEPT
|
|
: headerVersionOne( headerVersionOne_ )
|
|
, validationVersion( validationVersion_ )
|
|
, implementationData( implementationData_ )
|
|
, pipelineIndexCount( pipelineIndexCount_ )
|
|
, pipelineIndexStride( pipelineIndexStride_ )
|
|
, pipelineIndexOffset( pipelineIndexOffset_ )
|
|
{
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14
|
|
PipelineCacheHeaderVersionSafetyCriticalOne( PipelineCacheHeaderVersionSafetyCriticalOne const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
PipelineCacheHeaderVersionSafetyCriticalOne( VkPipelineCacheHeaderVersionSafetyCriticalOne const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: PipelineCacheHeaderVersionSafetyCriticalOne( *reinterpret_cast<PipelineCacheHeaderVersionSafetyCriticalOne const *>( &rhs ) )
|
|
{
|
|
}
|
|
|
|
PipelineCacheHeaderVersionSafetyCriticalOne & operator=( PipelineCacheHeaderVersionSafetyCriticalOne const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
PipelineCacheHeaderVersionSafetyCriticalOne & operator=( VkPipelineCacheHeaderVersionSafetyCriticalOne const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PipelineCacheHeaderVersionSafetyCriticalOne const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
|
|
VULKAN_HPP_CONSTEXPR_14 PipelineCacheHeaderVersionSafetyCriticalOne &
|
|
setHeaderVersionOne( VULKAN_HPP_NAMESPACE::PipelineCacheHeaderVersionOne const & headerVersionOne_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
headerVersionOne = headerVersionOne_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PipelineCacheHeaderVersionSafetyCriticalOne &
|
|
setValidationVersion( VULKAN_HPP_NAMESPACE::PipelineCacheValidationVersion validationVersion_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
validationVersion = validationVersion_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PipelineCacheHeaderVersionSafetyCriticalOne & setImplementationData( uint32_t implementationData_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
implementationData = implementationData_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PipelineCacheHeaderVersionSafetyCriticalOne & setPipelineIndexCount( uint32_t pipelineIndexCount_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pipelineIndexCount = pipelineIndexCount_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PipelineCacheHeaderVersionSafetyCriticalOne & setPipelineIndexStride( uint32_t pipelineIndexStride_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pipelineIndexStride = pipelineIndexStride_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PipelineCacheHeaderVersionSafetyCriticalOne & setPipelineIndexOffset( uint64_t pipelineIndexOffset_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pipelineIndexOffset = pipelineIndexOffset_;
|
|
return *this;
|
|
}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
|
|
|
|
operator VkPipelineCacheHeaderVersionSafetyCriticalOne const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkPipelineCacheHeaderVersionSafetyCriticalOne *>( this );
|
|
}
|
|
|
|
operator VkPipelineCacheHeaderVersionSafetyCriticalOne &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkPipelineCacheHeaderVersionSafetyCriticalOne *>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
# if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
# else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::PipelineCacheHeaderVersionOne const &,
|
|
VULKAN_HPP_NAMESPACE::PipelineCacheValidationVersion const &,
|
|
uint32_t const &,
|
|
uint32_t const &,
|
|
uint32_t const &,
|
|
uint64_t const &>
|
|
# endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( headerVersionOne, validationVersion, implementationData, pipelineIndexCount, pipelineIndexStride, pipelineIndexOffset );
|
|
}
|
|
#endif
|
|
|
|
#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
|
|
auto operator<=>( PipelineCacheHeaderVersionSafetyCriticalOne const & ) const = default;
|
|
#else
|
|
bool operator==( PipelineCacheHeaderVersionSafetyCriticalOne const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
# if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
# else
|
|
return ( headerVersionOne == rhs.headerVersionOne ) && ( validationVersion == rhs.validationVersion ) &&
|
|
( implementationData == rhs.implementationData ) && ( pipelineIndexCount == rhs.pipelineIndexCount ) &&
|
|
( pipelineIndexStride == rhs.pipelineIndexStride ) && ( pipelineIndexOffset == rhs.pipelineIndexOffset );
|
|
# endif
|
|
}
|
|
|
|
bool operator!=( PipelineCacheHeaderVersionSafetyCriticalOne const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::PipelineCacheHeaderVersionOne headerVersionOne = {};
|
|
VULKAN_HPP_NAMESPACE::PipelineCacheValidationVersion validationVersion = VULKAN_HPP_NAMESPACE::PipelineCacheValidationVersion::eSafetyCriticalOne;
|
|
uint32_t implementationData = {};
|
|
uint32_t pipelineIndexCount = {};
|
|
uint32_t pipelineIndexStride = {};
|
|
uint64_t pipelineIndexOffset = {};
|
|
};
|
|
|
|
struct PipelineCacheSafetyCriticalIndexEntry
|
|
{
|
|
using NativeType = VkPipelineCacheSafetyCriticalIndexEntry;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR_14 PipelineCacheSafetyCriticalIndexEntry( std::array<uint8_t, VK_UUID_SIZE> const & pipelineIdentifier_ = {},
|
|
uint64_t pipelineMemorySize_ = {},
|
|
uint64_t jsonSize_ = {},
|
|
uint64_t jsonOffset_ = {},
|
|
uint32_t stageIndexCount_ = {},
|
|
uint32_t stageIndexStride_ = {},
|
|
uint64_t stageIndexOffset_ = {} ) VULKAN_HPP_NOEXCEPT
|
|
: pipelineIdentifier( pipelineIdentifier_ )
|
|
, pipelineMemorySize( pipelineMemorySize_ )
|
|
, jsonSize( jsonSize_ )
|
|
, jsonOffset( jsonOffset_ )
|
|
, stageIndexCount( stageIndexCount_ )
|
|
, stageIndexStride( stageIndexStride_ )
|
|
, stageIndexOffset( stageIndexOffset_ )
|
|
{
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PipelineCacheSafetyCriticalIndexEntry( PipelineCacheSafetyCriticalIndexEntry const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
PipelineCacheSafetyCriticalIndexEntry( VkPipelineCacheSafetyCriticalIndexEntry const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: PipelineCacheSafetyCriticalIndexEntry( *reinterpret_cast<PipelineCacheSafetyCriticalIndexEntry const *>( &rhs ) )
|
|
{
|
|
}
|
|
|
|
PipelineCacheSafetyCriticalIndexEntry & operator=( PipelineCacheSafetyCriticalIndexEntry const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
PipelineCacheSafetyCriticalIndexEntry & operator=( VkPipelineCacheSafetyCriticalIndexEntry const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PipelineCacheSafetyCriticalIndexEntry const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
|
|
VULKAN_HPP_CONSTEXPR_14 PipelineCacheSafetyCriticalIndexEntry &
|
|
setPipelineIdentifier( std::array<uint8_t, VK_UUID_SIZE> pipelineIdentifier_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pipelineIdentifier = pipelineIdentifier_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PipelineCacheSafetyCriticalIndexEntry & setPipelineMemorySize( uint64_t pipelineMemorySize_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pipelineMemorySize = pipelineMemorySize_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PipelineCacheSafetyCriticalIndexEntry & setJsonSize( uint64_t jsonSize_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
jsonSize = jsonSize_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PipelineCacheSafetyCriticalIndexEntry & setJsonOffset( uint64_t jsonOffset_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
jsonOffset = jsonOffset_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PipelineCacheSafetyCriticalIndexEntry & setStageIndexCount( uint32_t stageIndexCount_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
stageIndexCount = stageIndexCount_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PipelineCacheSafetyCriticalIndexEntry & setStageIndexStride( uint32_t stageIndexStride_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
stageIndexStride = stageIndexStride_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PipelineCacheSafetyCriticalIndexEntry & setStageIndexOffset( uint64_t stageIndexOffset_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
stageIndexOffset = stageIndexOffset_;
|
|
return *this;
|
|
}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
|
|
|
|
operator VkPipelineCacheSafetyCriticalIndexEntry const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkPipelineCacheSafetyCriticalIndexEntry *>( this );
|
|
}
|
|
|
|
operator VkPipelineCacheSafetyCriticalIndexEntry &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkPipelineCacheSafetyCriticalIndexEntry *>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
# if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
# else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::ArrayWrapper1D<uint8_t, VK_UUID_SIZE> const &,
|
|
uint64_t const &,
|
|
uint64_t const &,
|
|
uint64_t const &,
|
|
uint32_t const &,
|
|
uint32_t const &,
|
|
uint64_t const &>
|
|
# endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( pipelineIdentifier, pipelineMemorySize, jsonSize, jsonOffset, stageIndexCount, stageIndexStride, stageIndexOffset );
|
|
}
|
|
#endif
|
|
|
|
#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
|
|
auto operator<=>( PipelineCacheSafetyCriticalIndexEntry const & ) const = default;
|
|
#else
|
|
bool operator==( PipelineCacheSafetyCriticalIndexEntry const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
# if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
# else
|
|
return ( pipelineIdentifier == rhs.pipelineIdentifier ) && ( pipelineMemorySize == rhs.pipelineMemorySize ) && ( jsonSize == rhs.jsonSize ) &&
|
|
( jsonOffset == rhs.jsonOffset ) && ( stageIndexCount == rhs.stageIndexCount ) && ( stageIndexStride == rhs.stageIndexStride ) &&
|
|
( stageIndexOffset == rhs.stageIndexOffset );
|
|
# endif
|
|
}
|
|
|
|
bool operator!=( PipelineCacheSafetyCriticalIndexEntry const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::ArrayWrapper1D<uint8_t, VK_UUID_SIZE> pipelineIdentifier = {};
|
|
uint64_t pipelineMemorySize = {};
|
|
uint64_t jsonSize = {};
|
|
uint64_t jsonOffset = {};
|
|
uint32_t stageIndexCount = {};
|
|
uint32_t stageIndexStride = {};
|
|
uint64_t stageIndexOffset = {};
|
|
};
|
|
|
|
struct PipelineCacheStageValidationIndexEntry
|
|
{
|
|
using NativeType = VkPipelineCacheStageValidationIndexEntry;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR PipelineCacheStageValidationIndexEntry( uint64_t codeSize_ = {}, uint64_t codeOffset_ = {} ) VULKAN_HPP_NOEXCEPT
|
|
: codeSize( codeSize_ )
|
|
, codeOffset( codeOffset_ )
|
|
{
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR PipelineCacheStageValidationIndexEntry( PipelineCacheStageValidationIndexEntry const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
PipelineCacheStageValidationIndexEntry( VkPipelineCacheStageValidationIndexEntry const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: PipelineCacheStageValidationIndexEntry( *reinterpret_cast<PipelineCacheStageValidationIndexEntry const *>( &rhs ) )
|
|
{
|
|
}
|
|
|
|
PipelineCacheStageValidationIndexEntry & operator=( PipelineCacheStageValidationIndexEntry const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
PipelineCacheStageValidationIndexEntry & operator=( VkPipelineCacheStageValidationIndexEntry const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PipelineCacheStageValidationIndexEntry const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
|
|
VULKAN_HPP_CONSTEXPR_14 PipelineCacheStageValidationIndexEntry & setCodeSize( uint64_t codeSize_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
codeSize = codeSize_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PipelineCacheStageValidationIndexEntry & setCodeOffset( uint64_t codeOffset_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
codeOffset = codeOffset_;
|
|
return *this;
|
|
}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
|
|
|
|
operator VkPipelineCacheStageValidationIndexEntry const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkPipelineCacheStageValidationIndexEntry *>( this );
|
|
}
|
|
|
|
operator VkPipelineCacheStageValidationIndexEntry &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkPipelineCacheStageValidationIndexEntry *>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
# if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
# else
|
|
std::tuple<uint64_t const &, uint64_t const &>
|
|
# endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( codeSize, codeOffset );
|
|
}
|
|
#endif
|
|
|
|
#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
|
|
auto operator<=>( PipelineCacheStageValidationIndexEntry const & ) const = default;
|
|
#else
|
|
bool operator==( PipelineCacheStageValidationIndexEntry const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
# if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
# else
|
|
return ( codeSize == rhs.codeSize ) && ( codeOffset == rhs.codeOffset );
|
|
# endif
|
|
}
|
|
|
|
bool operator!=( PipelineCacheStageValidationIndexEntry const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
uint64_t codeSize = {};
|
|
uint64_t codeOffset = {};
|
|
};
|
|
|
|
struct PipelineColorBlendAdvancedStateCreateInfoEXT
|
|
{
|
|
using NativeType = VkPipelineColorBlendAdvancedStateCreateInfoEXT;
|
|
|
|
static const bool allowDuplicate = false;
|
|
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePipelineColorBlendAdvancedStateCreateInfoEXT;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR
|
|
PipelineColorBlendAdvancedStateCreateInfoEXT( VULKAN_HPP_NAMESPACE::Bool32 srcPremultiplied_ = {},
|
|
VULKAN_HPP_NAMESPACE::Bool32 dstPremultiplied_ = {},
|
|
VULKAN_HPP_NAMESPACE::BlendOverlapEXT blendOverlap_ = VULKAN_HPP_NAMESPACE::BlendOverlapEXT::eUncorrelated,
|
|
const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
|
|
: pNext( pNext_ )
|
|
, srcPremultiplied( srcPremultiplied_ )
|
|
, dstPremultiplied( dstPremultiplied_ )
|
|
, blendOverlap( blendOverlap_ )
|
|
{
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR PipelineColorBlendAdvancedStateCreateInfoEXT( PipelineColorBlendAdvancedStateCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
PipelineColorBlendAdvancedStateCreateInfoEXT( VkPipelineColorBlendAdvancedStateCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: PipelineColorBlendAdvancedStateCreateInfoEXT( *reinterpret_cast<PipelineColorBlendAdvancedStateCreateInfoEXT const *>( &rhs ) )
|
|
{
|
|
}
|
|
|
|
PipelineColorBlendAdvancedStateCreateInfoEXT & operator=( PipelineColorBlendAdvancedStateCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
PipelineColorBlendAdvancedStateCreateInfoEXT & operator=( VkPipelineColorBlendAdvancedStateCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PipelineColorBlendAdvancedStateCreateInfoEXT const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
|
|
VULKAN_HPP_CONSTEXPR_14 PipelineColorBlendAdvancedStateCreateInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pNext = pNext_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PipelineColorBlendAdvancedStateCreateInfoEXT &
|
|
setSrcPremultiplied( VULKAN_HPP_NAMESPACE::Bool32 srcPremultiplied_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
srcPremultiplied = srcPremultiplied_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PipelineColorBlendAdvancedStateCreateInfoEXT &
|
|
setDstPremultiplied( VULKAN_HPP_NAMESPACE::Bool32 dstPremultiplied_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
dstPremultiplied = dstPremultiplied_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PipelineColorBlendAdvancedStateCreateInfoEXT &
|
|
setBlendOverlap( VULKAN_HPP_NAMESPACE::BlendOverlapEXT blendOverlap_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
blendOverlap = blendOverlap_;
|
|
return *this;
|
|
}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
|
|
|
|
operator VkPipelineColorBlendAdvancedStateCreateInfoEXT const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkPipelineColorBlendAdvancedStateCreateInfoEXT *>( this );
|
|
}
|
|
|
|
operator VkPipelineColorBlendAdvancedStateCreateInfoEXT &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkPipelineColorBlendAdvancedStateCreateInfoEXT *>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
# if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
# else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &,
|
|
const void * const &,
|
|
VULKAN_HPP_NAMESPACE::Bool32 const &,
|
|
VULKAN_HPP_NAMESPACE::Bool32 const &,
|
|
VULKAN_HPP_NAMESPACE::BlendOverlapEXT const &>
|
|
# endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( sType, pNext, srcPremultiplied, dstPremultiplied, blendOverlap );
|
|
}
|
|
#endif
|
|
|
|
#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
|
|
auto operator<=>( PipelineColorBlendAdvancedStateCreateInfoEXT const & ) const = default;
|
|
#else
|
|
bool operator==( PipelineColorBlendAdvancedStateCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
# if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
# else
|
|
return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( srcPremultiplied == rhs.srcPremultiplied ) && ( dstPremultiplied == rhs.dstPremultiplied ) &&
|
|
( blendOverlap == rhs.blendOverlap );
|
|
# endif
|
|
}
|
|
|
|
bool operator!=( PipelineColorBlendAdvancedStateCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineColorBlendAdvancedStateCreateInfoEXT;
|
|
const void * pNext = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 srcPremultiplied = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 dstPremultiplied = {};
|
|
VULKAN_HPP_NAMESPACE::BlendOverlapEXT blendOverlap = VULKAN_HPP_NAMESPACE::BlendOverlapEXT::eUncorrelated;
|
|
};
|
|
|
|
template <>
|
|
struct CppType<StructureType, StructureType::ePipelineColorBlendAdvancedStateCreateInfoEXT>
|
|
{
|
|
using Type = PipelineColorBlendAdvancedStateCreateInfoEXT;
|
|
};
|
|
|
|
struct PipelineColorWriteCreateInfoEXT
|
|
{
|
|
using NativeType = VkPipelineColorWriteCreateInfoEXT;
|
|
|
|
static const bool allowDuplicate = false;
|
|
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePipelineColorWriteCreateInfoEXT;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR PipelineColorWriteCreateInfoEXT( uint32_t attachmentCount_ = {},
|
|
const VULKAN_HPP_NAMESPACE::Bool32 * pColorWriteEnables_ = {},
|
|
const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
|
|
: pNext( pNext_ )
|
|
, attachmentCount( attachmentCount_ )
|
|
, pColorWriteEnables( pColorWriteEnables_ )
|
|
{
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR PipelineColorWriteCreateInfoEXT( PipelineColorWriteCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
PipelineColorWriteCreateInfoEXT( VkPipelineColorWriteCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: PipelineColorWriteCreateInfoEXT( *reinterpret_cast<PipelineColorWriteCreateInfoEXT const *>( &rhs ) )
|
|
{
|
|
}
|
|
|
|
# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
|
|
PipelineColorWriteCreateInfoEXT( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::Bool32> const & colorWriteEnables_,
|
|
const void * pNext_ = nullptr )
|
|
: pNext( pNext_ ), attachmentCount( static_cast<uint32_t>( colorWriteEnables_.size() ) ), pColorWriteEnables( colorWriteEnables_.data() )
|
|
{
|
|
}
|
|
# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
|
|
|
|
PipelineColorWriteCreateInfoEXT & operator=( PipelineColorWriteCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
PipelineColorWriteCreateInfoEXT & operator=( VkPipelineColorWriteCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PipelineColorWriteCreateInfoEXT const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
|
|
VULKAN_HPP_CONSTEXPR_14 PipelineColorWriteCreateInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pNext = pNext_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PipelineColorWriteCreateInfoEXT & setAttachmentCount( uint32_t attachmentCount_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
attachmentCount = attachmentCount_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PipelineColorWriteCreateInfoEXT &
|
|
setPColorWriteEnables( const VULKAN_HPP_NAMESPACE::Bool32 * pColorWriteEnables_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pColorWriteEnables = pColorWriteEnables_;
|
|
return *this;
|
|
}
|
|
|
|
# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
|
|
PipelineColorWriteCreateInfoEXT &
|
|
setColorWriteEnables( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::Bool32> const & colorWriteEnables_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
attachmentCount = static_cast<uint32_t>( colorWriteEnables_.size() );
|
|
pColorWriteEnables = colorWriteEnables_.data();
|
|
return *this;
|
|
}
|
|
# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
|
|
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
|
|
|
|
operator VkPipelineColorWriteCreateInfoEXT const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkPipelineColorWriteCreateInfoEXT *>( this );
|
|
}
|
|
|
|
operator VkPipelineColorWriteCreateInfoEXT &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkPipelineColorWriteCreateInfoEXT *>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
# if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
# else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, uint32_t const &, const VULKAN_HPP_NAMESPACE::Bool32 * const &>
|
|
# endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( sType, pNext, attachmentCount, pColorWriteEnables );
|
|
}
|
|
#endif
|
|
|
|
#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
|
|
auto operator<=>( PipelineColorWriteCreateInfoEXT const & ) const = default;
|
|
#else
|
|
bool operator==( PipelineColorWriteCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
# if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
# else
|
|
return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( attachmentCount == rhs.attachmentCount ) && ( pColorWriteEnables == rhs.pColorWriteEnables );
|
|
# endif
|
|
}
|
|
|
|
bool operator!=( PipelineColorWriteCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineColorWriteCreateInfoEXT;
|
|
const void * pNext = {};
|
|
uint32_t attachmentCount = {};
|
|
const VULKAN_HPP_NAMESPACE::Bool32 * pColorWriteEnables = {};
|
|
};
|
|
|
|
template <>
|
|
struct CppType<StructureType, StructureType::ePipelineColorWriteCreateInfoEXT>
|
|
{
|
|
using Type = PipelineColorWriteCreateInfoEXT;
|
|
};
|
|
|
|
struct PipelineCreationFeedback
|
|
{
|
|
using NativeType = VkPipelineCreationFeedback;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR PipelineCreationFeedback( VULKAN_HPP_NAMESPACE::PipelineCreationFeedbackFlags flags_ = {},
|
|
uint64_t duration_ = {} ) VULKAN_HPP_NOEXCEPT
|
|
: flags( flags_ )
|
|
, duration( duration_ )
|
|
{
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR PipelineCreationFeedback( PipelineCreationFeedback const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
PipelineCreationFeedback( VkPipelineCreationFeedback const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: PipelineCreationFeedback( *reinterpret_cast<PipelineCreationFeedback const *>( &rhs ) )
|
|
{
|
|
}
|
|
|
|
PipelineCreationFeedback & operator=( PipelineCreationFeedback const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
PipelineCreationFeedback & operator=( VkPipelineCreationFeedback const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PipelineCreationFeedback const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
operator VkPipelineCreationFeedback const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkPipelineCreationFeedback *>( this );
|
|
}
|
|
|
|
operator VkPipelineCreationFeedback &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkPipelineCreationFeedback *>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
# if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
# else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::PipelineCreationFeedbackFlags const &, uint64_t const &>
|
|
# endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( flags, duration );
|
|
}
|
|
#endif
|
|
|
|
#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
|
|
auto operator<=>( PipelineCreationFeedback const & ) const = default;
|
|
#else
|
|
bool operator==( PipelineCreationFeedback const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
# if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
# else
|
|
return ( flags == rhs.flags ) && ( duration == rhs.duration );
|
|
# endif
|
|
}
|
|
|
|
bool operator!=( PipelineCreationFeedback const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::PipelineCreationFeedbackFlags flags = {};
|
|
uint64_t duration = {};
|
|
};
|
|
|
|
using PipelineCreationFeedbackEXT = PipelineCreationFeedback;
|
|
|
|
struct PipelineCreationFeedbackCreateInfo
|
|
{
|
|
using NativeType = VkPipelineCreationFeedbackCreateInfo;
|
|
|
|
static const bool allowDuplicate = false;
|
|
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePipelineCreationFeedbackCreateInfo;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR PipelineCreationFeedbackCreateInfo( VULKAN_HPP_NAMESPACE::PipelineCreationFeedback * pPipelineCreationFeedback_ = {},
|
|
uint32_t pipelineStageCreationFeedbackCount_ = {},
|
|
VULKAN_HPP_NAMESPACE::PipelineCreationFeedback * pPipelineStageCreationFeedbacks_ = {},
|
|
const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
|
|
: pNext( pNext_ )
|
|
, pPipelineCreationFeedback( pPipelineCreationFeedback_ )
|
|
, pipelineStageCreationFeedbackCount( pipelineStageCreationFeedbackCount_ )
|
|
, pPipelineStageCreationFeedbacks( pPipelineStageCreationFeedbacks_ )
|
|
{
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR PipelineCreationFeedbackCreateInfo( PipelineCreationFeedbackCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
PipelineCreationFeedbackCreateInfo( VkPipelineCreationFeedbackCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: PipelineCreationFeedbackCreateInfo( *reinterpret_cast<PipelineCreationFeedbackCreateInfo const *>( &rhs ) )
|
|
{
|
|
}
|
|
|
|
# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
|
|
PipelineCreationFeedbackCreateInfo(
|
|
VULKAN_HPP_NAMESPACE::PipelineCreationFeedback * pPipelineCreationFeedback_,
|
|
VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<VULKAN_HPP_NAMESPACE::PipelineCreationFeedback> const & pipelineStageCreationFeedbacks_,
|
|
const void * pNext_ = nullptr )
|
|
: pNext( pNext_ )
|
|
, pPipelineCreationFeedback( pPipelineCreationFeedback_ )
|
|
, pipelineStageCreationFeedbackCount( static_cast<uint32_t>( pipelineStageCreationFeedbacks_.size() ) )
|
|
, pPipelineStageCreationFeedbacks( pipelineStageCreationFeedbacks_.data() )
|
|
{
|
|
}
|
|
# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
|
|
|
|
PipelineCreationFeedbackCreateInfo & operator=( PipelineCreationFeedbackCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
PipelineCreationFeedbackCreateInfo & operator=( VkPipelineCreationFeedbackCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PipelineCreationFeedbackCreateInfo const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
|
|
VULKAN_HPP_CONSTEXPR_14 PipelineCreationFeedbackCreateInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pNext = pNext_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PipelineCreationFeedbackCreateInfo &
|
|
setPPipelineCreationFeedback( VULKAN_HPP_NAMESPACE::PipelineCreationFeedback * pPipelineCreationFeedback_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pPipelineCreationFeedback = pPipelineCreationFeedback_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PipelineCreationFeedbackCreateInfo &
|
|
setPipelineStageCreationFeedbackCount( uint32_t pipelineStageCreationFeedbackCount_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pipelineStageCreationFeedbackCount = pipelineStageCreationFeedbackCount_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PipelineCreationFeedbackCreateInfo &
|
|
setPPipelineStageCreationFeedbacks( VULKAN_HPP_NAMESPACE::PipelineCreationFeedback * pPipelineStageCreationFeedbacks_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pPipelineStageCreationFeedbacks = pPipelineStageCreationFeedbacks_;
|
|
return *this;
|
|
}
|
|
|
|
# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
|
|
PipelineCreationFeedbackCreateInfo & setPipelineStageCreationFeedbacks(
|
|
VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<VULKAN_HPP_NAMESPACE::PipelineCreationFeedback> const & pipelineStageCreationFeedbacks_ )
|
|
VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pipelineStageCreationFeedbackCount = static_cast<uint32_t>( pipelineStageCreationFeedbacks_.size() );
|
|
pPipelineStageCreationFeedbacks = pipelineStageCreationFeedbacks_.data();
|
|
return *this;
|
|
}
|
|
# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
|
|
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
|
|
|
|
operator VkPipelineCreationFeedbackCreateInfo const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkPipelineCreationFeedbackCreateInfo *>( this );
|
|
}
|
|
|
|
operator VkPipelineCreationFeedbackCreateInfo &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkPipelineCreationFeedbackCreateInfo *>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
# if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
# else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &,
|
|
const void * const &,
|
|
VULKAN_HPP_NAMESPACE::PipelineCreationFeedback * const &,
|
|
uint32_t const &,
|
|
VULKAN_HPP_NAMESPACE::PipelineCreationFeedback * const &>
|
|
# endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( sType, pNext, pPipelineCreationFeedback, pipelineStageCreationFeedbackCount, pPipelineStageCreationFeedbacks );
|
|
}
|
|
#endif
|
|
|
|
#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
|
|
auto operator<=>( PipelineCreationFeedbackCreateInfo const & ) const = default;
|
|
#else
|
|
bool operator==( PipelineCreationFeedbackCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
# if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
# else
|
|
return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( pPipelineCreationFeedback == rhs.pPipelineCreationFeedback ) &&
|
|
( pipelineStageCreationFeedbackCount == rhs.pipelineStageCreationFeedbackCount ) &&
|
|
( pPipelineStageCreationFeedbacks == rhs.pPipelineStageCreationFeedbacks );
|
|
# endif
|
|
}
|
|
|
|
bool operator!=( PipelineCreationFeedbackCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineCreationFeedbackCreateInfo;
|
|
const void * pNext = {};
|
|
VULKAN_HPP_NAMESPACE::PipelineCreationFeedback * pPipelineCreationFeedback = {};
|
|
uint32_t pipelineStageCreationFeedbackCount = {};
|
|
VULKAN_HPP_NAMESPACE::PipelineCreationFeedback * pPipelineStageCreationFeedbacks = {};
|
|
};
|
|
|
|
template <>
|
|
struct CppType<StructureType, StructureType::ePipelineCreationFeedbackCreateInfo>
|
|
{
|
|
using Type = PipelineCreationFeedbackCreateInfo;
|
|
};
|
|
|
|
using PipelineCreationFeedbackCreateInfoEXT = PipelineCreationFeedbackCreateInfo;
|
|
|
|
struct PipelineDiscardRectangleStateCreateInfoEXT
|
|
{
|
|
using NativeType = VkPipelineDiscardRectangleStateCreateInfoEXT;
|
|
|
|
static const bool allowDuplicate = false;
|
|
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePipelineDiscardRectangleStateCreateInfoEXT;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR PipelineDiscardRectangleStateCreateInfoEXT(
|
|
VULKAN_HPP_NAMESPACE::PipelineDiscardRectangleStateCreateFlagsEXT flags_ = {},
|
|
VULKAN_HPP_NAMESPACE::DiscardRectangleModeEXT discardRectangleMode_ = VULKAN_HPP_NAMESPACE::DiscardRectangleModeEXT::eInclusive,
|
|
uint32_t discardRectangleCount_ = {},
|
|
const VULKAN_HPP_NAMESPACE::Rect2D * pDiscardRectangles_ = {},
|
|
const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
|
|
: pNext( pNext_ )
|
|
, flags( flags_ )
|
|
, discardRectangleMode( discardRectangleMode_ )
|
|
, discardRectangleCount( discardRectangleCount_ )
|
|
, pDiscardRectangles( pDiscardRectangles_ )
|
|
{
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR PipelineDiscardRectangleStateCreateInfoEXT( PipelineDiscardRectangleStateCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
PipelineDiscardRectangleStateCreateInfoEXT( VkPipelineDiscardRectangleStateCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: PipelineDiscardRectangleStateCreateInfoEXT( *reinterpret_cast<PipelineDiscardRectangleStateCreateInfoEXT const *>( &rhs ) )
|
|
{
|
|
}
|
|
|
|
# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
|
|
PipelineDiscardRectangleStateCreateInfoEXT( VULKAN_HPP_NAMESPACE::PipelineDiscardRectangleStateCreateFlagsEXT flags_,
|
|
VULKAN_HPP_NAMESPACE::DiscardRectangleModeEXT discardRectangleMode_,
|
|
VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::Rect2D> const & discardRectangles_,
|
|
const void * pNext_ = nullptr )
|
|
: pNext( pNext_ )
|
|
, flags( flags_ )
|
|
, discardRectangleMode( discardRectangleMode_ )
|
|
, discardRectangleCount( static_cast<uint32_t>( discardRectangles_.size() ) )
|
|
, pDiscardRectangles( discardRectangles_.data() )
|
|
{
|
|
}
|
|
# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
|
|
|
|
PipelineDiscardRectangleStateCreateInfoEXT & operator=( PipelineDiscardRectangleStateCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
PipelineDiscardRectangleStateCreateInfoEXT & operator=( VkPipelineDiscardRectangleStateCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PipelineDiscardRectangleStateCreateInfoEXT const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
|
|
VULKAN_HPP_CONSTEXPR_14 PipelineDiscardRectangleStateCreateInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pNext = pNext_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PipelineDiscardRectangleStateCreateInfoEXT &
|
|
setFlags( VULKAN_HPP_NAMESPACE::PipelineDiscardRectangleStateCreateFlagsEXT flags_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
flags = flags_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PipelineDiscardRectangleStateCreateInfoEXT &
|
|
setDiscardRectangleMode( VULKAN_HPP_NAMESPACE::DiscardRectangleModeEXT discardRectangleMode_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
discardRectangleMode = discardRectangleMode_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PipelineDiscardRectangleStateCreateInfoEXT & setDiscardRectangleCount( uint32_t discardRectangleCount_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
discardRectangleCount = discardRectangleCount_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PipelineDiscardRectangleStateCreateInfoEXT &
|
|
setPDiscardRectangles( const VULKAN_HPP_NAMESPACE::Rect2D * pDiscardRectangles_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pDiscardRectangles = pDiscardRectangles_;
|
|
return *this;
|
|
}
|
|
|
|
# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
|
|
PipelineDiscardRectangleStateCreateInfoEXT &
|
|
setDiscardRectangles( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::Rect2D> const & discardRectangles_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
discardRectangleCount = static_cast<uint32_t>( discardRectangles_.size() );
|
|
pDiscardRectangles = discardRectangles_.data();
|
|
return *this;
|
|
}
|
|
# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
|
|
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
|
|
|
|
operator VkPipelineDiscardRectangleStateCreateInfoEXT const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkPipelineDiscardRectangleStateCreateInfoEXT *>( this );
|
|
}
|
|
|
|
operator VkPipelineDiscardRectangleStateCreateInfoEXT &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkPipelineDiscardRectangleStateCreateInfoEXT *>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
# if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
# else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &,
|
|
const void * const &,
|
|
VULKAN_HPP_NAMESPACE::PipelineDiscardRectangleStateCreateFlagsEXT const &,
|
|
VULKAN_HPP_NAMESPACE::DiscardRectangleModeEXT const &,
|
|
uint32_t const &,
|
|
const VULKAN_HPP_NAMESPACE::Rect2D * const &>
|
|
# endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( sType, pNext, flags, discardRectangleMode, discardRectangleCount, pDiscardRectangles );
|
|
}
|
|
#endif
|
|
|
|
#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
|
|
auto operator<=>( PipelineDiscardRectangleStateCreateInfoEXT const & ) const = default;
|
|
#else
|
|
bool operator==( PipelineDiscardRectangleStateCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
# if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
# else
|
|
return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && ( discardRectangleMode == rhs.discardRectangleMode ) &&
|
|
( discardRectangleCount == rhs.discardRectangleCount ) && ( pDiscardRectangles == rhs.pDiscardRectangles );
|
|
# endif
|
|
}
|
|
|
|
bool operator!=( PipelineDiscardRectangleStateCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineDiscardRectangleStateCreateInfoEXT;
|
|
const void * pNext = {};
|
|
VULKAN_HPP_NAMESPACE::PipelineDiscardRectangleStateCreateFlagsEXT flags = {};
|
|
VULKAN_HPP_NAMESPACE::DiscardRectangleModeEXT discardRectangleMode = VULKAN_HPP_NAMESPACE::DiscardRectangleModeEXT::eInclusive;
|
|
uint32_t discardRectangleCount = {};
|
|
const VULKAN_HPP_NAMESPACE::Rect2D * pDiscardRectangles = {};
|
|
};
|
|
|
|
template <>
|
|
struct CppType<StructureType, StructureType::ePipelineDiscardRectangleStateCreateInfoEXT>
|
|
{
|
|
using Type = PipelineDiscardRectangleStateCreateInfoEXT;
|
|
};
|
|
|
|
struct PipelineFragmentShadingRateStateCreateInfoKHR
|
|
{
|
|
using NativeType = VkPipelineFragmentShadingRateStateCreateInfoKHR;
|
|
|
|
static const bool allowDuplicate = false;
|
|
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePipelineFragmentShadingRateStateCreateInfoKHR;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR_14 PipelineFragmentShadingRateStateCreateInfoKHR(
|
|
VULKAN_HPP_NAMESPACE::Extent2D fragmentSize_ = {},
|
|
std::array<VULKAN_HPP_NAMESPACE::FragmentShadingRateCombinerOpKHR, 2> const &
|
|
combinerOps_ = { { VULKAN_HPP_NAMESPACE::FragmentShadingRateCombinerOpKHR::eKeep, VULKAN_HPP_NAMESPACE::FragmentShadingRateCombinerOpKHR::eKeep } },
|
|
const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
|
|
: pNext( pNext_ )
|
|
, fragmentSize( fragmentSize_ )
|
|
, combinerOps( combinerOps_ )
|
|
{
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14
|
|
PipelineFragmentShadingRateStateCreateInfoKHR( PipelineFragmentShadingRateStateCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
PipelineFragmentShadingRateStateCreateInfoKHR( VkPipelineFragmentShadingRateStateCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: PipelineFragmentShadingRateStateCreateInfoKHR( *reinterpret_cast<PipelineFragmentShadingRateStateCreateInfoKHR const *>( &rhs ) )
|
|
{
|
|
}
|
|
|
|
PipelineFragmentShadingRateStateCreateInfoKHR & operator=( PipelineFragmentShadingRateStateCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
PipelineFragmentShadingRateStateCreateInfoKHR & operator=( VkPipelineFragmentShadingRateStateCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PipelineFragmentShadingRateStateCreateInfoKHR const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
|
|
VULKAN_HPP_CONSTEXPR_14 PipelineFragmentShadingRateStateCreateInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pNext = pNext_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PipelineFragmentShadingRateStateCreateInfoKHR &
|
|
setFragmentSize( VULKAN_HPP_NAMESPACE::Extent2D const & fragmentSize_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
fragmentSize = fragmentSize_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PipelineFragmentShadingRateStateCreateInfoKHR &
|
|
setCombinerOps( std::array<VULKAN_HPP_NAMESPACE::FragmentShadingRateCombinerOpKHR, 2> combinerOps_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
combinerOps = combinerOps_;
|
|
return *this;
|
|
}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
|
|
|
|
operator VkPipelineFragmentShadingRateStateCreateInfoKHR const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkPipelineFragmentShadingRateStateCreateInfoKHR *>( this );
|
|
}
|
|
|
|
operator VkPipelineFragmentShadingRateStateCreateInfoKHR &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkPipelineFragmentShadingRateStateCreateInfoKHR *>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
# if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
# else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &,
|
|
const void * const &,
|
|
VULKAN_HPP_NAMESPACE::Extent2D const &,
|
|
VULKAN_HPP_NAMESPACE::ArrayWrapper1D<VULKAN_HPP_NAMESPACE::FragmentShadingRateCombinerOpKHR, 2> const &>
|
|
# endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( sType, pNext, fragmentSize, combinerOps );
|
|
}
|
|
#endif
|
|
|
|
#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
|
|
auto operator<=>( PipelineFragmentShadingRateStateCreateInfoKHR const & ) const = default;
|
|
#else
|
|
bool operator==( PipelineFragmentShadingRateStateCreateInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
# if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
# else
|
|
return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( fragmentSize == rhs.fragmentSize ) && ( combinerOps == rhs.combinerOps );
|
|
# endif
|
|
}
|
|
|
|
bool operator!=( PipelineFragmentShadingRateStateCreateInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineFragmentShadingRateStateCreateInfoKHR;
|
|
const void * pNext = {};
|
|
VULKAN_HPP_NAMESPACE::Extent2D fragmentSize = {};
|
|
VULKAN_HPP_NAMESPACE::ArrayWrapper1D<VULKAN_HPP_NAMESPACE::FragmentShadingRateCombinerOpKHR, 2> combinerOps = {};
|
|
};
|
|
|
|
template <>
|
|
struct CppType<StructureType, StructureType::ePipelineFragmentShadingRateStateCreateInfoKHR>
|
|
{
|
|
using Type = PipelineFragmentShadingRateStateCreateInfoKHR;
|
|
};
|
|
|
|
struct PushConstantRange
|
|
{
|
|
using NativeType = VkPushConstantRange;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR
|
|
PushConstantRange( VULKAN_HPP_NAMESPACE::ShaderStageFlags stageFlags_ = {}, uint32_t offset_ = {}, uint32_t size_ = {} ) VULKAN_HPP_NOEXCEPT
|
|
: stageFlags( stageFlags_ )
|
|
, offset( offset_ )
|
|
, size( size_ )
|
|
{
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR PushConstantRange( PushConstantRange const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
PushConstantRange( VkPushConstantRange const & rhs ) VULKAN_HPP_NOEXCEPT : PushConstantRange( *reinterpret_cast<PushConstantRange const *>( &rhs ) ) {}
|
|
|
|
PushConstantRange & operator=( PushConstantRange const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
PushConstantRange & operator=( VkPushConstantRange const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PushConstantRange const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
|
|
VULKAN_HPP_CONSTEXPR_14 PushConstantRange & setStageFlags( VULKAN_HPP_NAMESPACE::ShaderStageFlags stageFlags_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
stageFlags = stageFlags_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PushConstantRange & setOffset( uint32_t offset_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
offset = offset_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PushConstantRange & setSize( uint32_t size_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
size = size_;
|
|
return *this;
|
|
}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
|
|
|
|
operator VkPushConstantRange const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkPushConstantRange *>( this );
|
|
}
|
|
|
|
operator VkPushConstantRange &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkPushConstantRange *>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
# if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
# else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::ShaderStageFlags const &, uint32_t const &, uint32_t const &>
|
|
# endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( stageFlags, offset, size );
|
|
}
|
|
#endif
|
|
|
|
#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
|
|
auto operator<=>( PushConstantRange const & ) const = default;
|
|
#else
|
|
bool operator==( PushConstantRange const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
# if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
# else
|
|
return ( stageFlags == rhs.stageFlags ) && ( offset == rhs.offset ) && ( size == rhs.size );
|
|
# endif
|
|
}
|
|
|
|
bool operator!=( PushConstantRange const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::ShaderStageFlags stageFlags = {};
|
|
uint32_t offset = {};
|
|
uint32_t size = {};
|
|
};
|
|
|
|
struct PipelineLayoutCreateInfo
|
|
{
|
|
using NativeType = VkPipelineLayoutCreateInfo;
|
|
|
|
static const bool allowDuplicate = false;
|
|
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePipelineLayoutCreateInfo;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR PipelineLayoutCreateInfo( VULKAN_HPP_NAMESPACE::PipelineLayoutCreateFlags flags_ = {},
|
|
uint32_t setLayoutCount_ = {},
|
|
const VULKAN_HPP_NAMESPACE::DescriptorSetLayout * pSetLayouts_ = {},
|
|
uint32_t pushConstantRangeCount_ = {},
|
|
const VULKAN_HPP_NAMESPACE::PushConstantRange * pPushConstantRanges_ = {},
|
|
const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
|
|
: pNext( pNext_ )
|
|
, flags( flags_ )
|
|
, setLayoutCount( setLayoutCount_ )
|
|
, pSetLayouts( pSetLayouts_ )
|
|
, pushConstantRangeCount( pushConstantRangeCount_ )
|
|
, pPushConstantRanges( pPushConstantRanges_ )
|
|
{
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR PipelineLayoutCreateInfo( PipelineLayoutCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
PipelineLayoutCreateInfo( VkPipelineLayoutCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: PipelineLayoutCreateInfo( *reinterpret_cast<PipelineLayoutCreateInfo const *>( &rhs ) )
|
|
{
|
|
}
|
|
|
|
# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
|
|
PipelineLayoutCreateInfo( VULKAN_HPP_NAMESPACE::PipelineLayoutCreateFlags flags_,
|
|
VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::DescriptorSetLayout> const & setLayouts_,
|
|
VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::PushConstantRange> const & pushConstantRanges_ = {},
|
|
const void * pNext_ = nullptr )
|
|
: pNext( pNext_ )
|
|
, flags( flags_ )
|
|
, setLayoutCount( static_cast<uint32_t>( setLayouts_.size() ) )
|
|
, pSetLayouts( setLayouts_.data() )
|
|
, pushConstantRangeCount( static_cast<uint32_t>( pushConstantRanges_.size() ) )
|
|
, pPushConstantRanges( pushConstantRanges_.data() )
|
|
{
|
|
}
|
|
# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
|
|
|
|
PipelineLayoutCreateInfo & operator=( PipelineLayoutCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
PipelineLayoutCreateInfo & operator=( VkPipelineLayoutCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PipelineLayoutCreateInfo const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
|
|
VULKAN_HPP_CONSTEXPR_14 PipelineLayoutCreateInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pNext = pNext_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PipelineLayoutCreateInfo & setFlags( VULKAN_HPP_NAMESPACE::PipelineLayoutCreateFlags flags_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
flags = flags_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PipelineLayoutCreateInfo & setSetLayoutCount( uint32_t setLayoutCount_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
setLayoutCount = setLayoutCount_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PipelineLayoutCreateInfo & setPSetLayouts( const VULKAN_HPP_NAMESPACE::DescriptorSetLayout * pSetLayouts_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pSetLayouts = pSetLayouts_;
|
|
return *this;
|
|
}
|
|
|
|
# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
|
|
PipelineLayoutCreateInfo &
|
|
setSetLayouts( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::DescriptorSetLayout> const & setLayouts_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
setLayoutCount = static_cast<uint32_t>( setLayouts_.size() );
|
|
pSetLayouts = setLayouts_.data();
|
|
return *this;
|
|
}
|
|
# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PipelineLayoutCreateInfo & setPushConstantRangeCount( uint32_t pushConstantRangeCount_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pushConstantRangeCount = pushConstantRangeCount_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PipelineLayoutCreateInfo &
|
|
setPPushConstantRanges( const VULKAN_HPP_NAMESPACE::PushConstantRange * pPushConstantRanges_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pPushConstantRanges = pPushConstantRanges_;
|
|
return *this;
|
|
}
|
|
|
|
# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
|
|
PipelineLayoutCreateInfo & setPushConstantRanges(
|
|
VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::PushConstantRange> const & pushConstantRanges_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pushConstantRangeCount = static_cast<uint32_t>( pushConstantRanges_.size() );
|
|
pPushConstantRanges = pushConstantRanges_.data();
|
|
return *this;
|
|
}
|
|
# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
|
|
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
|
|
|
|
operator VkPipelineLayoutCreateInfo const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkPipelineLayoutCreateInfo *>( this );
|
|
}
|
|
|
|
operator VkPipelineLayoutCreateInfo &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkPipelineLayoutCreateInfo *>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
# if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
# else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &,
|
|
const void * const &,
|
|
VULKAN_HPP_NAMESPACE::PipelineLayoutCreateFlags const &,
|
|
uint32_t const &,
|
|
const VULKAN_HPP_NAMESPACE::DescriptorSetLayout * const &,
|
|
uint32_t const &,
|
|
const VULKAN_HPP_NAMESPACE::PushConstantRange * const &>
|
|
# endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( sType, pNext, flags, setLayoutCount, pSetLayouts, pushConstantRangeCount, pPushConstantRanges );
|
|
}
|
|
#endif
|
|
|
|
#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
|
|
auto operator<=>( PipelineLayoutCreateInfo const & ) const = default;
|
|
#else
|
|
bool operator==( PipelineLayoutCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
# if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
# else
|
|
return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && ( setLayoutCount == rhs.setLayoutCount ) &&
|
|
( pSetLayouts == rhs.pSetLayouts ) && ( pushConstantRangeCount == rhs.pushConstantRangeCount ) &&
|
|
( pPushConstantRanges == rhs.pPushConstantRanges );
|
|
# endif
|
|
}
|
|
|
|
bool operator!=( PipelineLayoutCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineLayoutCreateInfo;
|
|
const void * pNext = {};
|
|
VULKAN_HPP_NAMESPACE::PipelineLayoutCreateFlags flags = {};
|
|
uint32_t setLayoutCount = {};
|
|
const VULKAN_HPP_NAMESPACE::DescriptorSetLayout * pSetLayouts = {};
|
|
uint32_t pushConstantRangeCount = {};
|
|
const VULKAN_HPP_NAMESPACE::PushConstantRange * pPushConstantRanges = {};
|
|
};
|
|
|
|
template <>
|
|
struct CppType<StructureType, StructureType::ePipelineLayoutCreateInfo>
|
|
{
|
|
using Type = PipelineLayoutCreateInfo;
|
|
};
|
|
|
|
struct PipelineOfflineCreateInfo
|
|
{
|
|
using NativeType = VkPipelineOfflineCreateInfo;
|
|
|
|
static const bool allowDuplicate = false;
|
|
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePipelineOfflineCreateInfo;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR_14 PipelineOfflineCreateInfo(
|
|
std::array<uint8_t, VK_UUID_SIZE> const & pipelineIdentifier_ = {},
|
|
VULKAN_HPP_NAMESPACE::PipelineMatchControl matchControl_ = VULKAN_HPP_NAMESPACE::PipelineMatchControl::eApplicationUuidExactMatch,
|
|
VULKAN_HPP_NAMESPACE::DeviceSize poolEntrySize_ = {},
|
|
const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
|
|
: pNext( pNext_ )
|
|
, pipelineIdentifier( pipelineIdentifier_ )
|
|
, matchControl( matchControl_ )
|
|
, poolEntrySize( poolEntrySize_ )
|
|
{
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PipelineOfflineCreateInfo( PipelineOfflineCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
PipelineOfflineCreateInfo( VkPipelineOfflineCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: PipelineOfflineCreateInfo( *reinterpret_cast<PipelineOfflineCreateInfo const *>( &rhs ) )
|
|
{
|
|
}
|
|
|
|
PipelineOfflineCreateInfo & operator=( PipelineOfflineCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
PipelineOfflineCreateInfo & operator=( VkPipelineOfflineCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PipelineOfflineCreateInfo const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
|
|
VULKAN_HPP_CONSTEXPR_14 PipelineOfflineCreateInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pNext = pNext_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PipelineOfflineCreateInfo & setPipelineIdentifier( std::array<uint8_t, VK_UUID_SIZE> pipelineIdentifier_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pipelineIdentifier = pipelineIdentifier_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PipelineOfflineCreateInfo & setMatchControl( VULKAN_HPP_NAMESPACE::PipelineMatchControl matchControl_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
matchControl = matchControl_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PipelineOfflineCreateInfo & setPoolEntrySize( VULKAN_HPP_NAMESPACE::DeviceSize poolEntrySize_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
poolEntrySize = poolEntrySize_;
|
|
return *this;
|
|
}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
|
|
|
|
operator VkPipelineOfflineCreateInfo const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkPipelineOfflineCreateInfo *>( this );
|
|
}
|
|
|
|
operator VkPipelineOfflineCreateInfo &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkPipelineOfflineCreateInfo *>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
# if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
# else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &,
|
|
const void * const &,
|
|
VULKAN_HPP_NAMESPACE::ArrayWrapper1D<uint8_t, VK_UUID_SIZE> const &,
|
|
VULKAN_HPP_NAMESPACE::PipelineMatchControl const &,
|
|
VULKAN_HPP_NAMESPACE::DeviceSize const &>
|
|
# endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( sType, pNext, pipelineIdentifier, matchControl, poolEntrySize );
|
|
}
|
|
#endif
|
|
|
|
#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
|
|
auto operator<=>( PipelineOfflineCreateInfo const & ) const = default;
|
|
#else
|
|
bool operator==( PipelineOfflineCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
# if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
# else
|
|
return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( pipelineIdentifier == rhs.pipelineIdentifier ) && ( matchControl == rhs.matchControl ) &&
|
|
( poolEntrySize == rhs.poolEntrySize );
|
|
# endif
|
|
}
|
|
|
|
bool operator!=( PipelineOfflineCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineOfflineCreateInfo;
|
|
const void * pNext = {};
|
|
VULKAN_HPP_NAMESPACE::ArrayWrapper1D<uint8_t, VK_UUID_SIZE> pipelineIdentifier = {};
|
|
VULKAN_HPP_NAMESPACE::PipelineMatchControl matchControl = VULKAN_HPP_NAMESPACE::PipelineMatchControl::eApplicationUuidExactMatch;
|
|
VULKAN_HPP_NAMESPACE::DeviceSize poolEntrySize = {};
|
|
};
|
|
|
|
template <>
|
|
struct CppType<StructureType, StructureType::ePipelineOfflineCreateInfo>
|
|
{
|
|
using Type = PipelineOfflineCreateInfo;
|
|
};
|
|
|
|
struct PipelineRasterizationConservativeStateCreateInfoEXT
|
|
{
|
|
using NativeType = VkPipelineRasterizationConservativeStateCreateInfoEXT;
|
|
|
|
static const bool allowDuplicate = false;
|
|
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePipelineRasterizationConservativeStateCreateInfoEXT;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR PipelineRasterizationConservativeStateCreateInfoEXT(
|
|
VULKAN_HPP_NAMESPACE::PipelineRasterizationConservativeStateCreateFlagsEXT flags_ = {},
|
|
VULKAN_HPP_NAMESPACE::ConservativeRasterizationModeEXT conservativeRasterizationMode_ = VULKAN_HPP_NAMESPACE::ConservativeRasterizationModeEXT::eDisabled,
|
|
float extraPrimitiveOverestimationSize_ = {},
|
|
const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
|
|
: pNext( pNext_ )
|
|
, flags( flags_ )
|
|
, conservativeRasterizationMode( conservativeRasterizationMode_ )
|
|
, extraPrimitiveOverestimationSize( extraPrimitiveOverestimationSize_ )
|
|
{
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR
|
|
PipelineRasterizationConservativeStateCreateInfoEXT( PipelineRasterizationConservativeStateCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
PipelineRasterizationConservativeStateCreateInfoEXT( VkPipelineRasterizationConservativeStateCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: PipelineRasterizationConservativeStateCreateInfoEXT( *reinterpret_cast<PipelineRasterizationConservativeStateCreateInfoEXT const *>( &rhs ) )
|
|
{
|
|
}
|
|
|
|
PipelineRasterizationConservativeStateCreateInfoEXT &
|
|
operator=( PipelineRasterizationConservativeStateCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
PipelineRasterizationConservativeStateCreateInfoEXT & operator=( VkPipelineRasterizationConservativeStateCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PipelineRasterizationConservativeStateCreateInfoEXT const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
|
|
VULKAN_HPP_CONSTEXPR_14 PipelineRasterizationConservativeStateCreateInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pNext = pNext_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PipelineRasterizationConservativeStateCreateInfoEXT &
|
|
setFlags( VULKAN_HPP_NAMESPACE::PipelineRasterizationConservativeStateCreateFlagsEXT flags_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
flags = flags_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PipelineRasterizationConservativeStateCreateInfoEXT &
|
|
setConservativeRasterizationMode( VULKAN_HPP_NAMESPACE::ConservativeRasterizationModeEXT conservativeRasterizationMode_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
conservativeRasterizationMode = conservativeRasterizationMode_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PipelineRasterizationConservativeStateCreateInfoEXT &
|
|
setExtraPrimitiveOverestimationSize( float extraPrimitiveOverestimationSize_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
extraPrimitiveOverestimationSize = extraPrimitiveOverestimationSize_;
|
|
return *this;
|
|
}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
|
|
|
|
operator VkPipelineRasterizationConservativeStateCreateInfoEXT const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkPipelineRasterizationConservativeStateCreateInfoEXT *>( this );
|
|
}
|
|
|
|
operator VkPipelineRasterizationConservativeStateCreateInfoEXT &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkPipelineRasterizationConservativeStateCreateInfoEXT *>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
# if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
# else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &,
|
|
const void * const &,
|
|
VULKAN_HPP_NAMESPACE::PipelineRasterizationConservativeStateCreateFlagsEXT const &,
|
|
VULKAN_HPP_NAMESPACE::ConservativeRasterizationModeEXT const &,
|
|
float const &>
|
|
# endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( sType, pNext, flags, conservativeRasterizationMode, extraPrimitiveOverestimationSize );
|
|
}
|
|
#endif
|
|
|
|
#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
|
|
auto operator<=>( PipelineRasterizationConservativeStateCreateInfoEXT const & ) const = default;
|
|
#else
|
|
bool operator==( PipelineRasterizationConservativeStateCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
# if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
# else
|
|
return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) &&
|
|
( conservativeRasterizationMode == rhs.conservativeRasterizationMode ) &&
|
|
( extraPrimitiveOverestimationSize == rhs.extraPrimitiveOverestimationSize );
|
|
# endif
|
|
}
|
|
|
|
bool operator!=( PipelineRasterizationConservativeStateCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineRasterizationConservativeStateCreateInfoEXT;
|
|
const void * pNext = {};
|
|
VULKAN_HPP_NAMESPACE::PipelineRasterizationConservativeStateCreateFlagsEXT flags = {};
|
|
VULKAN_HPP_NAMESPACE::ConservativeRasterizationModeEXT conservativeRasterizationMode = VULKAN_HPP_NAMESPACE::ConservativeRasterizationModeEXT::eDisabled;
|
|
float extraPrimitiveOverestimationSize = {};
|
|
};
|
|
|
|
template <>
|
|
struct CppType<StructureType, StructureType::ePipelineRasterizationConservativeStateCreateInfoEXT>
|
|
{
|
|
using Type = PipelineRasterizationConservativeStateCreateInfoEXT;
|
|
};
|
|
|
|
struct PipelineRasterizationDepthClipStateCreateInfoEXT
|
|
{
|
|
using NativeType = VkPipelineRasterizationDepthClipStateCreateInfoEXT;
|
|
|
|
static const bool allowDuplicate = false;
|
|
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePipelineRasterizationDepthClipStateCreateInfoEXT;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR PipelineRasterizationDepthClipStateCreateInfoEXT( VULKAN_HPP_NAMESPACE::PipelineRasterizationDepthClipStateCreateFlagsEXT flags_ = {},
|
|
VULKAN_HPP_NAMESPACE::Bool32 depthClipEnable_ = {},
|
|
const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
|
|
: pNext( pNext_ )
|
|
, flags( flags_ )
|
|
, depthClipEnable( depthClipEnable_ )
|
|
{
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR
|
|
PipelineRasterizationDepthClipStateCreateInfoEXT( PipelineRasterizationDepthClipStateCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
PipelineRasterizationDepthClipStateCreateInfoEXT( VkPipelineRasterizationDepthClipStateCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: PipelineRasterizationDepthClipStateCreateInfoEXT( *reinterpret_cast<PipelineRasterizationDepthClipStateCreateInfoEXT const *>( &rhs ) )
|
|
{
|
|
}
|
|
|
|
PipelineRasterizationDepthClipStateCreateInfoEXT & operator=( PipelineRasterizationDepthClipStateCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
PipelineRasterizationDepthClipStateCreateInfoEXT & operator=( VkPipelineRasterizationDepthClipStateCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PipelineRasterizationDepthClipStateCreateInfoEXT const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
|
|
VULKAN_HPP_CONSTEXPR_14 PipelineRasterizationDepthClipStateCreateInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pNext = pNext_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PipelineRasterizationDepthClipStateCreateInfoEXT &
|
|
setFlags( VULKAN_HPP_NAMESPACE::PipelineRasterizationDepthClipStateCreateFlagsEXT flags_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
flags = flags_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PipelineRasterizationDepthClipStateCreateInfoEXT &
|
|
setDepthClipEnable( VULKAN_HPP_NAMESPACE::Bool32 depthClipEnable_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
depthClipEnable = depthClipEnable_;
|
|
return *this;
|
|
}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
|
|
|
|
operator VkPipelineRasterizationDepthClipStateCreateInfoEXT const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkPipelineRasterizationDepthClipStateCreateInfoEXT *>( this );
|
|
}
|
|
|
|
operator VkPipelineRasterizationDepthClipStateCreateInfoEXT &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkPipelineRasterizationDepthClipStateCreateInfoEXT *>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
# if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
# else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &,
|
|
const void * const &,
|
|
VULKAN_HPP_NAMESPACE::PipelineRasterizationDepthClipStateCreateFlagsEXT const &,
|
|
VULKAN_HPP_NAMESPACE::Bool32 const &>
|
|
# endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( sType, pNext, flags, depthClipEnable );
|
|
}
|
|
#endif
|
|
|
|
#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
|
|
auto operator<=>( PipelineRasterizationDepthClipStateCreateInfoEXT const & ) const = default;
|
|
#else
|
|
bool operator==( PipelineRasterizationDepthClipStateCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
# if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
# else
|
|
return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && ( depthClipEnable == rhs.depthClipEnable );
|
|
# endif
|
|
}
|
|
|
|
bool operator!=( PipelineRasterizationDepthClipStateCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineRasterizationDepthClipStateCreateInfoEXT;
|
|
const void * pNext = {};
|
|
VULKAN_HPP_NAMESPACE::PipelineRasterizationDepthClipStateCreateFlagsEXT flags = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 depthClipEnable = {};
|
|
};
|
|
|
|
template <>
|
|
struct CppType<StructureType, StructureType::ePipelineRasterizationDepthClipStateCreateInfoEXT>
|
|
{
|
|
using Type = PipelineRasterizationDepthClipStateCreateInfoEXT;
|
|
};
|
|
|
|
struct PipelineRenderingCreateInfo
|
|
{
|
|
using NativeType = VkPipelineRenderingCreateInfo;
|
|
|
|
static const bool allowDuplicate = false;
|
|
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePipelineRenderingCreateInfo;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR PipelineRenderingCreateInfo( uint32_t viewMask_ = {},
|
|
uint32_t colorAttachmentCount_ = {},
|
|
const VULKAN_HPP_NAMESPACE::Format * pColorAttachmentFormats_ = {},
|
|
VULKAN_HPP_NAMESPACE::Format depthAttachmentFormat_ = VULKAN_HPP_NAMESPACE::Format::eUndefined,
|
|
VULKAN_HPP_NAMESPACE::Format stencilAttachmentFormat_ = VULKAN_HPP_NAMESPACE::Format::eUndefined,
|
|
const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
|
|
: pNext( pNext_ )
|
|
, viewMask( viewMask_ )
|
|
, colorAttachmentCount( colorAttachmentCount_ )
|
|
, pColorAttachmentFormats( pColorAttachmentFormats_ )
|
|
, depthAttachmentFormat( depthAttachmentFormat_ )
|
|
, stencilAttachmentFormat( stencilAttachmentFormat_ )
|
|
{
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR PipelineRenderingCreateInfo( PipelineRenderingCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
PipelineRenderingCreateInfo( VkPipelineRenderingCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: PipelineRenderingCreateInfo( *reinterpret_cast<PipelineRenderingCreateInfo const *>( &rhs ) )
|
|
{
|
|
}
|
|
|
|
# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
|
|
PipelineRenderingCreateInfo( uint32_t viewMask_,
|
|
VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::Format> const & colorAttachmentFormats_,
|
|
VULKAN_HPP_NAMESPACE::Format depthAttachmentFormat_ = VULKAN_HPP_NAMESPACE::Format::eUndefined,
|
|
VULKAN_HPP_NAMESPACE::Format stencilAttachmentFormat_ = VULKAN_HPP_NAMESPACE::Format::eUndefined,
|
|
const void * pNext_ = nullptr )
|
|
: pNext( pNext_ )
|
|
, viewMask( viewMask_ )
|
|
, colorAttachmentCount( static_cast<uint32_t>( colorAttachmentFormats_.size() ) )
|
|
, pColorAttachmentFormats( colorAttachmentFormats_.data() )
|
|
, depthAttachmentFormat( depthAttachmentFormat_ )
|
|
, stencilAttachmentFormat( stencilAttachmentFormat_ )
|
|
{
|
|
}
|
|
# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
|
|
|
|
PipelineRenderingCreateInfo & operator=( PipelineRenderingCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
PipelineRenderingCreateInfo & operator=( VkPipelineRenderingCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PipelineRenderingCreateInfo const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
|
|
VULKAN_HPP_CONSTEXPR_14 PipelineRenderingCreateInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pNext = pNext_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PipelineRenderingCreateInfo & setViewMask( uint32_t viewMask_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
viewMask = viewMask_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PipelineRenderingCreateInfo & setColorAttachmentCount( uint32_t colorAttachmentCount_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
colorAttachmentCount = colorAttachmentCount_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PipelineRenderingCreateInfo &
|
|
setPColorAttachmentFormats( const VULKAN_HPP_NAMESPACE::Format * pColorAttachmentFormats_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pColorAttachmentFormats = pColorAttachmentFormats_;
|
|
return *this;
|
|
}
|
|
|
|
# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
|
|
PipelineRenderingCreateInfo & setColorAttachmentFormats(
|
|
VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::Format> const & colorAttachmentFormats_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
colorAttachmentCount = static_cast<uint32_t>( colorAttachmentFormats_.size() );
|
|
pColorAttachmentFormats = colorAttachmentFormats_.data();
|
|
return *this;
|
|
}
|
|
# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PipelineRenderingCreateInfo & setDepthAttachmentFormat( VULKAN_HPP_NAMESPACE::Format depthAttachmentFormat_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
depthAttachmentFormat = depthAttachmentFormat_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PipelineRenderingCreateInfo &
|
|
setStencilAttachmentFormat( VULKAN_HPP_NAMESPACE::Format stencilAttachmentFormat_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
stencilAttachmentFormat = stencilAttachmentFormat_;
|
|
return *this;
|
|
}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
|
|
|
|
operator VkPipelineRenderingCreateInfo const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkPipelineRenderingCreateInfo *>( this );
|
|
}
|
|
|
|
operator VkPipelineRenderingCreateInfo &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkPipelineRenderingCreateInfo *>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
# if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
# else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &,
|
|
const void * const &,
|
|
uint32_t const &,
|
|
uint32_t const &,
|
|
const VULKAN_HPP_NAMESPACE::Format * const &,
|
|
VULKAN_HPP_NAMESPACE::Format const &,
|
|
VULKAN_HPP_NAMESPACE::Format const &>
|
|
# endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( sType, pNext, viewMask, colorAttachmentCount, pColorAttachmentFormats, depthAttachmentFormat, stencilAttachmentFormat );
|
|
}
|
|
#endif
|
|
|
|
#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
|
|
auto operator<=>( PipelineRenderingCreateInfo const & ) const = default;
|
|
#else
|
|
bool operator==( PipelineRenderingCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
# if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
# else
|
|
return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( viewMask == rhs.viewMask ) && ( colorAttachmentCount == rhs.colorAttachmentCount ) &&
|
|
( pColorAttachmentFormats == rhs.pColorAttachmentFormats ) && ( depthAttachmentFormat == rhs.depthAttachmentFormat ) &&
|
|
( stencilAttachmentFormat == rhs.stencilAttachmentFormat );
|
|
# endif
|
|
}
|
|
|
|
bool operator!=( PipelineRenderingCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineRenderingCreateInfo;
|
|
const void * pNext = {};
|
|
uint32_t viewMask = {};
|
|
uint32_t colorAttachmentCount = {};
|
|
const VULKAN_HPP_NAMESPACE::Format * pColorAttachmentFormats = {};
|
|
VULKAN_HPP_NAMESPACE::Format depthAttachmentFormat = VULKAN_HPP_NAMESPACE::Format::eUndefined;
|
|
VULKAN_HPP_NAMESPACE::Format stencilAttachmentFormat = VULKAN_HPP_NAMESPACE::Format::eUndefined;
|
|
};
|
|
|
|
template <>
|
|
struct CppType<StructureType, StructureType::ePipelineRenderingCreateInfo>
|
|
{
|
|
using Type = PipelineRenderingCreateInfo;
|
|
};
|
|
|
|
using PipelineRenderingCreateInfoKHR = PipelineRenderingCreateInfo;
|
|
|
|
struct PipelineSampleLocationsStateCreateInfoEXT
|
|
{
|
|
using NativeType = VkPipelineSampleLocationsStateCreateInfoEXT;
|
|
|
|
static const bool allowDuplicate = false;
|
|
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePipelineSampleLocationsStateCreateInfoEXT;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR PipelineSampleLocationsStateCreateInfoEXT( VULKAN_HPP_NAMESPACE::Bool32 sampleLocationsEnable_ = {},
|
|
VULKAN_HPP_NAMESPACE::SampleLocationsInfoEXT sampleLocationsInfo_ = {},
|
|
const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
|
|
: pNext( pNext_ )
|
|
, sampleLocationsEnable( sampleLocationsEnable_ )
|
|
, sampleLocationsInfo( sampleLocationsInfo_ )
|
|
{
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR PipelineSampleLocationsStateCreateInfoEXT( PipelineSampleLocationsStateCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
PipelineSampleLocationsStateCreateInfoEXT( VkPipelineSampleLocationsStateCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: PipelineSampleLocationsStateCreateInfoEXT( *reinterpret_cast<PipelineSampleLocationsStateCreateInfoEXT const *>( &rhs ) )
|
|
{
|
|
}
|
|
|
|
PipelineSampleLocationsStateCreateInfoEXT & operator=( PipelineSampleLocationsStateCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
PipelineSampleLocationsStateCreateInfoEXT & operator=( VkPipelineSampleLocationsStateCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PipelineSampleLocationsStateCreateInfoEXT const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
|
|
VULKAN_HPP_CONSTEXPR_14 PipelineSampleLocationsStateCreateInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pNext = pNext_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PipelineSampleLocationsStateCreateInfoEXT &
|
|
setSampleLocationsEnable( VULKAN_HPP_NAMESPACE::Bool32 sampleLocationsEnable_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
sampleLocationsEnable = sampleLocationsEnable_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PipelineSampleLocationsStateCreateInfoEXT &
|
|
setSampleLocationsInfo( VULKAN_HPP_NAMESPACE::SampleLocationsInfoEXT const & sampleLocationsInfo_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
sampleLocationsInfo = sampleLocationsInfo_;
|
|
return *this;
|
|
}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
|
|
|
|
operator VkPipelineSampleLocationsStateCreateInfoEXT const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkPipelineSampleLocationsStateCreateInfoEXT *>( this );
|
|
}
|
|
|
|
operator VkPipelineSampleLocationsStateCreateInfoEXT &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkPipelineSampleLocationsStateCreateInfoEXT *>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
# if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
# else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &,
|
|
const void * const &,
|
|
VULKAN_HPP_NAMESPACE::Bool32 const &,
|
|
VULKAN_HPP_NAMESPACE::SampleLocationsInfoEXT const &>
|
|
# endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( sType, pNext, sampleLocationsEnable, sampleLocationsInfo );
|
|
}
|
|
#endif
|
|
|
|
#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
|
|
auto operator<=>( PipelineSampleLocationsStateCreateInfoEXT const & ) const = default;
|
|
#else
|
|
bool operator==( PipelineSampleLocationsStateCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
# if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
# else
|
|
return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( sampleLocationsEnable == rhs.sampleLocationsEnable ) &&
|
|
( sampleLocationsInfo == rhs.sampleLocationsInfo );
|
|
# endif
|
|
}
|
|
|
|
bool operator!=( PipelineSampleLocationsStateCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineSampleLocationsStateCreateInfoEXT;
|
|
const void * pNext = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 sampleLocationsEnable = {};
|
|
VULKAN_HPP_NAMESPACE::SampleLocationsInfoEXT sampleLocationsInfo = {};
|
|
};
|
|
|
|
template <>
|
|
struct CppType<StructureType, StructureType::ePipelineSampleLocationsStateCreateInfoEXT>
|
|
{
|
|
using Type = PipelineSampleLocationsStateCreateInfoEXT;
|
|
};
|
|
|
|
struct PipelineShaderStageRequiredSubgroupSizeCreateInfo
|
|
{
|
|
using NativeType = VkPipelineShaderStageRequiredSubgroupSizeCreateInfo;
|
|
|
|
static const bool allowDuplicate = false;
|
|
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePipelineShaderStageRequiredSubgroupSizeCreateInfo;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR PipelineShaderStageRequiredSubgroupSizeCreateInfo( uint32_t requiredSubgroupSize_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
|
|
: pNext( pNext_ )
|
|
, requiredSubgroupSize( requiredSubgroupSize_ )
|
|
{
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR
|
|
PipelineShaderStageRequiredSubgroupSizeCreateInfo( PipelineShaderStageRequiredSubgroupSizeCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
PipelineShaderStageRequiredSubgroupSizeCreateInfo( VkPipelineShaderStageRequiredSubgroupSizeCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: PipelineShaderStageRequiredSubgroupSizeCreateInfo( *reinterpret_cast<PipelineShaderStageRequiredSubgroupSizeCreateInfo const *>( &rhs ) )
|
|
{
|
|
}
|
|
|
|
PipelineShaderStageRequiredSubgroupSizeCreateInfo &
|
|
operator=( PipelineShaderStageRequiredSubgroupSizeCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
PipelineShaderStageRequiredSubgroupSizeCreateInfo & operator=( VkPipelineShaderStageRequiredSubgroupSizeCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PipelineShaderStageRequiredSubgroupSizeCreateInfo const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
operator VkPipelineShaderStageRequiredSubgroupSizeCreateInfo const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkPipelineShaderStageRequiredSubgroupSizeCreateInfo *>( this );
|
|
}
|
|
|
|
operator VkPipelineShaderStageRequiredSubgroupSizeCreateInfo &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkPipelineShaderStageRequiredSubgroupSizeCreateInfo *>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
# if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
# else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, uint32_t const &>
|
|
# endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( sType, pNext, requiredSubgroupSize );
|
|
}
|
|
#endif
|
|
|
|
#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
|
|
auto operator<=>( PipelineShaderStageRequiredSubgroupSizeCreateInfo const & ) const = default;
|
|
#else
|
|
bool operator==( PipelineShaderStageRequiredSubgroupSizeCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
# if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
# else
|
|
return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( requiredSubgroupSize == rhs.requiredSubgroupSize );
|
|
# endif
|
|
}
|
|
|
|
bool operator!=( PipelineShaderStageRequiredSubgroupSizeCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineShaderStageRequiredSubgroupSizeCreateInfo;
|
|
void * pNext = {};
|
|
uint32_t requiredSubgroupSize = {};
|
|
};
|
|
|
|
template <>
|
|
struct CppType<StructureType, StructureType::ePipelineShaderStageRequiredSubgroupSizeCreateInfo>
|
|
{
|
|
using Type = PipelineShaderStageRequiredSubgroupSizeCreateInfo;
|
|
};
|
|
|
|
using PipelineShaderStageRequiredSubgroupSizeCreateInfoEXT = PipelineShaderStageRequiredSubgroupSizeCreateInfo;
|
|
using ShaderRequiredSubgroupSizeCreateInfoEXT = PipelineShaderStageRequiredSubgroupSizeCreateInfo;
|
|
|
|
struct PipelineTessellationDomainOriginStateCreateInfo
|
|
{
|
|
using NativeType = VkPipelineTessellationDomainOriginStateCreateInfo;
|
|
|
|
static const bool allowDuplicate = false;
|
|
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePipelineTessellationDomainOriginStateCreateInfo;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR PipelineTessellationDomainOriginStateCreateInfo(
|
|
VULKAN_HPP_NAMESPACE::TessellationDomainOrigin domainOrigin_ = VULKAN_HPP_NAMESPACE::TessellationDomainOrigin::eUpperLeft,
|
|
const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
|
|
: pNext( pNext_ )
|
|
, domainOrigin( domainOrigin_ )
|
|
{
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR
|
|
PipelineTessellationDomainOriginStateCreateInfo( PipelineTessellationDomainOriginStateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
PipelineTessellationDomainOriginStateCreateInfo( VkPipelineTessellationDomainOriginStateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: PipelineTessellationDomainOriginStateCreateInfo( *reinterpret_cast<PipelineTessellationDomainOriginStateCreateInfo const *>( &rhs ) )
|
|
{
|
|
}
|
|
|
|
PipelineTessellationDomainOriginStateCreateInfo & operator=( PipelineTessellationDomainOriginStateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
PipelineTessellationDomainOriginStateCreateInfo & operator=( VkPipelineTessellationDomainOriginStateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PipelineTessellationDomainOriginStateCreateInfo const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
|
|
VULKAN_HPP_CONSTEXPR_14 PipelineTessellationDomainOriginStateCreateInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pNext = pNext_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PipelineTessellationDomainOriginStateCreateInfo &
|
|
setDomainOrigin( VULKAN_HPP_NAMESPACE::TessellationDomainOrigin domainOrigin_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
domainOrigin = domainOrigin_;
|
|
return *this;
|
|
}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
|
|
|
|
operator VkPipelineTessellationDomainOriginStateCreateInfo const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkPipelineTessellationDomainOriginStateCreateInfo *>( this );
|
|
}
|
|
|
|
operator VkPipelineTessellationDomainOriginStateCreateInfo &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkPipelineTessellationDomainOriginStateCreateInfo *>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
# if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
# else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::TessellationDomainOrigin const &>
|
|
# endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( sType, pNext, domainOrigin );
|
|
}
|
|
#endif
|
|
|
|
#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
|
|
auto operator<=>( PipelineTessellationDomainOriginStateCreateInfo const & ) const = default;
|
|
#else
|
|
bool operator==( PipelineTessellationDomainOriginStateCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
# if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
# else
|
|
return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( domainOrigin == rhs.domainOrigin );
|
|
# endif
|
|
}
|
|
|
|
bool operator!=( PipelineTessellationDomainOriginStateCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineTessellationDomainOriginStateCreateInfo;
|
|
const void * pNext = {};
|
|
VULKAN_HPP_NAMESPACE::TessellationDomainOrigin domainOrigin = VULKAN_HPP_NAMESPACE::TessellationDomainOrigin::eUpperLeft;
|
|
};
|
|
|
|
template <>
|
|
struct CppType<StructureType, StructureType::ePipelineTessellationDomainOriginStateCreateInfo>
|
|
{
|
|
using Type = PipelineTessellationDomainOriginStateCreateInfo;
|
|
};
|
|
|
|
using PipelineTessellationDomainOriginStateCreateInfoKHR = PipelineTessellationDomainOriginStateCreateInfo;
|
|
|
|
struct PresentInfoKHR
|
|
{
|
|
using NativeType = VkPresentInfoKHR;
|
|
|
|
static const bool allowDuplicate = false;
|
|
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePresentInfoKHR;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR PresentInfoKHR( uint32_t waitSemaphoreCount_ = {},
|
|
const VULKAN_HPP_NAMESPACE::Semaphore * pWaitSemaphores_ = {},
|
|
uint32_t swapchainCount_ = {},
|
|
const VULKAN_HPP_NAMESPACE::SwapchainKHR * pSwapchains_ = {},
|
|
const uint32_t * pImageIndices_ = {},
|
|
VULKAN_HPP_NAMESPACE::Result * pResults_ = {},
|
|
const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
|
|
: pNext( pNext_ )
|
|
, waitSemaphoreCount( waitSemaphoreCount_ )
|
|
, pWaitSemaphores( pWaitSemaphores_ )
|
|
, swapchainCount( swapchainCount_ )
|
|
, pSwapchains( pSwapchains_ )
|
|
, pImageIndices( pImageIndices_ )
|
|
, pResults( pResults_ )
|
|
{
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR PresentInfoKHR( PresentInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
PresentInfoKHR( VkPresentInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT : PresentInfoKHR( *reinterpret_cast<PresentInfoKHR const *>( &rhs ) ) {}
|
|
|
|
# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
|
|
PresentInfoKHR( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::Semaphore> const & waitSemaphores_,
|
|
VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::SwapchainKHR> const & swapchains_ = {},
|
|
VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const uint32_t> const & imageIndices_ = {},
|
|
VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<VULKAN_HPP_NAMESPACE::Result> const & results_ = {},
|
|
const void * pNext_ = nullptr )
|
|
: pNext( pNext_ )
|
|
, waitSemaphoreCount( static_cast<uint32_t>( waitSemaphores_.size() ) )
|
|
, pWaitSemaphores( waitSemaphores_.data() )
|
|
, swapchainCount( static_cast<uint32_t>( swapchains_.size() ) )
|
|
, pSwapchains( swapchains_.data() )
|
|
, pImageIndices( imageIndices_.data() )
|
|
, pResults( results_.data() )
|
|
{
|
|
# ifdef VULKAN_HPP_NO_EXCEPTIONS
|
|
VULKAN_HPP_ASSERT( swapchains_.size() == imageIndices_.size() );
|
|
VULKAN_HPP_ASSERT( results_.empty() || ( swapchains_.size() == results_.size() ) );
|
|
VULKAN_HPP_ASSERT( results_.empty() || ( imageIndices_.size() == results_.size() ) );
|
|
# else
|
|
if ( swapchains_.size() != imageIndices_.size() )
|
|
{
|
|
throw LogicError( VULKAN_HPP_NAMESPACE_STRING "::PresentInfoKHR::PresentInfoKHR: swapchains_.size() != imageIndices_.size()" );
|
|
}
|
|
if ( !results_.empty() && ( swapchains_.size() != results_.size() ) )
|
|
{
|
|
throw LogicError( VULKAN_HPP_NAMESPACE_STRING "::PresentInfoKHR::PresentInfoKHR: !results_.empty() && ( swapchains_.size() != results_.size() )" );
|
|
}
|
|
if ( !results_.empty() && ( imageIndices_.size() != results_.size() ) )
|
|
{
|
|
throw LogicError( VULKAN_HPP_NAMESPACE_STRING "::PresentInfoKHR::PresentInfoKHR: !results_.empty() && ( imageIndices_.size() != results_.size() )" );
|
|
}
|
|
# endif /*VULKAN_HPP_NO_EXCEPTIONS*/
|
|
}
|
|
# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
|
|
|
|
PresentInfoKHR & operator=( PresentInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
PresentInfoKHR & operator=( VkPresentInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PresentInfoKHR const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
|
|
VULKAN_HPP_CONSTEXPR_14 PresentInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pNext = pNext_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PresentInfoKHR & setWaitSemaphoreCount( uint32_t waitSemaphoreCount_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
waitSemaphoreCount = waitSemaphoreCount_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PresentInfoKHR & setPWaitSemaphores( const VULKAN_HPP_NAMESPACE::Semaphore * pWaitSemaphores_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pWaitSemaphores = pWaitSemaphores_;
|
|
return *this;
|
|
}
|
|
|
|
# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
|
|
PresentInfoKHR &
|
|
setWaitSemaphores( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::Semaphore> const & waitSemaphores_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
waitSemaphoreCount = static_cast<uint32_t>( waitSemaphores_.size() );
|
|
pWaitSemaphores = waitSemaphores_.data();
|
|
return *this;
|
|
}
|
|
# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PresentInfoKHR & setSwapchainCount( uint32_t swapchainCount_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
swapchainCount = swapchainCount_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PresentInfoKHR & setPSwapchains( const VULKAN_HPP_NAMESPACE::SwapchainKHR * pSwapchains_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pSwapchains = pSwapchains_;
|
|
return *this;
|
|
}
|
|
|
|
# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
|
|
PresentInfoKHR &
|
|
setSwapchains( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::SwapchainKHR> const & swapchains_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
swapchainCount = static_cast<uint32_t>( swapchains_.size() );
|
|
pSwapchains = swapchains_.data();
|
|
return *this;
|
|
}
|
|
# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PresentInfoKHR & setPImageIndices( const uint32_t * pImageIndices_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pImageIndices = pImageIndices_;
|
|
return *this;
|
|
}
|
|
|
|
# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
|
|
PresentInfoKHR & setImageIndices( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const uint32_t> const & imageIndices_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
swapchainCount = static_cast<uint32_t>( imageIndices_.size() );
|
|
pImageIndices = imageIndices_.data();
|
|
return *this;
|
|
}
|
|
# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PresentInfoKHR & setPResults( VULKAN_HPP_NAMESPACE::Result * pResults_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pResults = pResults_;
|
|
return *this;
|
|
}
|
|
|
|
# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
|
|
PresentInfoKHR & setResults( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<VULKAN_HPP_NAMESPACE::Result> const & results_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
swapchainCount = static_cast<uint32_t>( results_.size() );
|
|
pResults = results_.data();
|
|
return *this;
|
|
}
|
|
# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
|
|
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
|
|
|
|
operator VkPresentInfoKHR const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkPresentInfoKHR *>( this );
|
|
}
|
|
|
|
operator VkPresentInfoKHR &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkPresentInfoKHR *>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
# if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
# else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &,
|
|
const void * const &,
|
|
uint32_t const &,
|
|
const VULKAN_HPP_NAMESPACE::Semaphore * const &,
|
|
uint32_t const &,
|
|
const VULKAN_HPP_NAMESPACE::SwapchainKHR * const &,
|
|
const uint32_t * const &,
|
|
VULKAN_HPP_NAMESPACE::Result * const &>
|
|
# endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( sType, pNext, waitSemaphoreCount, pWaitSemaphores, swapchainCount, pSwapchains, pImageIndices, pResults );
|
|
}
|
|
#endif
|
|
|
|
#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
|
|
auto operator<=>( PresentInfoKHR const & ) const = default;
|
|
#else
|
|
bool operator==( PresentInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
# if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
# else
|
|
return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( waitSemaphoreCount == rhs.waitSemaphoreCount ) &&
|
|
( pWaitSemaphores == rhs.pWaitSemaphores ) && ( swapchainCount == rhs.swapchainCount ) && ( pSwapchains == rhs.pSwapchains ) &&
|
|
( pImageIndices == rhs.pImageIndices ) && ( pResults == rhs.pResults );
|
|
# endif
|
|
}
|
|
|
|
bool operator!=( PresentInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePresentInfoKHR;
|
|
const void * pNext = {};
|
|
uint32_t waitSemaphoreCount = {};
|
|
const VULKAN_HPP_NAMESPACE::Semaphore * pWaitSemaphores = {};
|
|
uint32_t swapchainCount = {};
|
|
const VULKAN_HPP_NAMESPACE::SwapchainKHR * pSwapchains = {};
|
|
const uint32_t * pImageIndices = {};
|
|
VULKAN_HPP_NAMESPACE::Result * pResults = {};
|
|
};
|
|
|
|
template <>
|
|
struct CppType<StructureType, StructureType::ePresentInfoKHR>
|
|
{
|
|
using Type = PresentInfoKHR;
|
|
};
|
|
|
|
struct RectLayerKHR
|
|
{
|
|
using NativeType = VkRectLayerKHR;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR
|
|
RectLayerKHR( VULKAN_HPP_NAMESPACE::Offset2D offset_ = {}, VULKAN_HPP_NAMESPACE::Extent2D extent_ = {}, uint32_t layer_ = {} ) VULKAN_HPP_NOEXCEPT
|
|
: offset( offset_ )
|
|
, extent( extent_ )
|
|
, layer( layer_ )
|
|
{
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR RectLayerKHR( RectLayerKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
RectLayerKHR( VkRectLayerKHR const & rhs ) VULKAN_HPP_NOEXCEPT : RectLayerKHR( *reinterpret_cast<RectLayerKHR const *>( &rhs ) ) {}
|
|
|
|
explicit RectLayerKHR( Rect2D const & rect2D, uint32_t layer_ = {} ) : offset( rect2D.offset ), extent( rect2D.extent ), layer( layer_ ) {}
|
|
|
|
RectLayerKHR & operator=( RectLayerKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
RectLayerKHR & operator=( VkRectLayerKHR const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::RectLayerKHR const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
|
|
VULKAN_HPP_CONSTEXPR_14 RectLayerKHR & setOffset( VULKAN_HPP_NAMESPACE::Offset2D const & offset_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
offset = offset_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 RectLayerKHR & setExtent( VULKAN_HPP_NAMESPACE::Extent2D const & extent_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
extent = extent_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 RectLayerKHR & setLayer( uint32_t layer_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
layer = layer_;
|
|
return *this;
|
|
}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
|
|
|
|
operator VkRectLayerKHR const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkRectLayerKHR *>( this );
|
|
}
|
|
|
|
operator VkRectLayerKHR &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkRectLayerKHR *>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
# if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
# else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::Offset2D const &, VULKAN_HPP_NAMESPACE::Extent2D const &, uint32_t const &>
|
|
# endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( offset, extent, layer );
|
|
}
|
|
#endif
|
|
|
|
#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
|
|
auto operator<=>( RectLayerKHR const & ) const = default;
|
|
#else
|
|
bool operator==( RectLayerKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
# if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
# else
|
|
return ( offset == rhs.offset ) && ( extent == rhs.extent ) && ( layer == rhs.layer );
|
|
# endif
|
|
}
|
|
|
|
bool operator!=( RectLayerKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::Offset2D offset = {};
|
|
VULKAN_HPP_NAMESPACE::Extent2D extent = {};
|
|
uint32_t layer = {};
|
|
};
|
|
|
|
struct PresentRegionKHR
|
|
{
|
|
using NativeType = VkPresentRegionKHR;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR PresentRegionKHR( uint32_t rectangleCount_ = {}, const VULKAN_HPP_NAMESPACE::RectLayerKHR * pRectangles_ = {} ) VULKAN_HPP_NOEXCEPT
|
|
: rectangleCount( rectangleCount_ )
|
|
, pRectangles( pRectangles_ )
|
|
{
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR PresentRegionKHR( PresentRegionKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
PresentRegionKHR( VkPresentRegionKHR const & rhs ) VULKAN_HPP_NOEXCEPT : PresentRegionKHR( *reinterpret_cast<PresentRegionKHR const *>( &rhs ) ) {}
|
|
|
|
# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
|
|
PresentRegionKHR( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::RectLayerKHR> const & rectangles_ )
|
|
: rectangleCount( static_cast<uint32_t>( rectangles_.size() ) ), pRectangles( rectangles_.data() )
|
|
{
|
|
}
|
|
# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
|
|
|
|
PresentRegionKHR & operator=( PresentRegionKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
PresentRegionKHR & operator=( VkPresentRegionKHR const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PresentRegionKHR const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
|
|
VULKAN_HPP_CONSTEXPR_14 PresentRegionKHR & setRectangleCount( uint32_t rectangleCount_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
rectangleCount = rectangleCount_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PresentRegionKHR & setPRectangles( const VULKAN_HPP_NAMESPACE::RectLayerKHR * pRectangles_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pRectangles = pRectangles_;
|
|
return *this;
|
|
}
|
|
|
|
# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
|
|
PresentRegionKHR &
|
|
setRectangles( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::RectLayerKHR> const & rectangles_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
rectangleCount = static_cast<uint32_t>( rectangles_.size() );
|
|
pRectangles = rectangles_.data();
|
|
return *this;
|
|
}
|
|
# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
|
|
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
|
|
|
|
operator VkPresentRegionKHR const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkPresentRegionKHR *>( this );
|
|
}
|
|
|
|
operator VkPresentRegionKHR &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkPresentRegionKHR *>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
# if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
# else
|
|
std::tuple<uint32_t const &, const VULKAN_HPP_NAMESPACE::RectLayerKHR * const &>
|
|
# endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( rectangleCount, pRectangles );
|
|
}
|
|
#endif
|
|
|
|
#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
|
|
auto operator<=>( PresentRegionKHR const & ) const = default;
|
|
#else
|
|
bool operator==( PresentRegionKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
# if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
# else
|
|
return ( rectangleCount == rhs.rectangleCount ) && ( pRectangles == rhs.pRectangles );
|
|
# endif
|
|
}
|
|
|
|
bool operator!=( PresentRegionKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
uint32_t rectangleCount = {};
|
|
const VULKAN_HPP_NAMESPACE::RectLayerKHR * pRectangles = {};
|
|
};
|
|
|
|
struct PresentRegionsKHR
|
|
{
|
|
using NativeType = VkPresentRegionsKHR;
|
|
|
|
static const bool allowDuplicate = false;
|
|
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePresentRegionsKHR;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR PresentRegionsKHR( uint32_t swapchainCount_ = {},
|
|
const VULKAN_HPP_NAMESPACE::PresentRegionKHR * pRegions_ = {},
|
|
const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
|
|
: pNext( pNext_ )
|
|
, swapchainCount( swapchainCount_ )
|
|
, pRegions( pRegions_ )
|
|
{
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR PresentRegionsKHR( PresentRegionsKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
PresentRegionsKHR( VkPresentRegionsKHR const & rhs ) VULKAN_HPP_NOEXCEPT : PresentRegionsKHR( *reinterpret_cast<PresentRegionsKHR const *>( &rhs ) ) {}
|
|
|
|
# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
|
|
PresentRegionsKHR( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::PresentRegionKHR> const & regions_,
|
|
const void * pNext_ = nullptr )
|
|
: pNext( pNext_ ), swapchainCount( static_cast<uint32_t>( regions_.size() ) ), pRegions( regions_.data() )
|
|
{
|
|
}
|
|
# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
|
|
|
|
PresentRegionsKHR & operator=( PresentRegionsKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
PresentRegionsKHR & operator=( VkPresentRegionsKHR const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PresentRegionsKHR const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
|
|
VULKAN_HPP_CONSTEXPR_14 PresentRegionsKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pNext = pNext_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PresentRegionsKHR & setSwapchainCount( uint32_t swapchainCount_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
swapchainCount = swapchainCount_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PresentRegionsKHR & setPRegions( const VULKAN_HPP_NAMESPACE::PresentRegionKHR * pRegions_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pRegions = pRegions_;
|
|
return *this;
|
|
}
|
|
|
|
# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
|
|
PresentRegionsKHR &
|
|
setRegions( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::PresentRegionKHR> const & regions_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
swapchainCount = static_cast<uint32_t>( regions_.size() );
|
|
pRegions = regions_.data();
|
|
return *this;
|
|
}
|
|
# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
|
|
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
|
|
|
|
operator VkPresentRegionsKHR const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkPresentRegionsKHR *>( this );
|
|
}
|
|
|
|
operator VkPresentRegionsKHR &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkPresentRegionsKHR *>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
# if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
# else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, uint32_t const &, const VULKAN_HPP_NAMESPACE::PresentRegionKHR * const &>
|
|
# endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( sType, pNext, swapchainCount, pRegions );
|
|
}
|
|
#endif
|
|
|
|
#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
|
|
auto operator<=>( PresentRegionsKHR const & ) const = default;
|
|
#else
|
|
bool operator==( PresentRegionsKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
# if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
# else
|
|
return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( swapchainCount == rhs.swapchainCount ) && ( pRegions == rhs.pRegions );
|
|
# endif
|
|
}
|
|
|
|
bool operator!=( PresentRegionsKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePresentRegionsKHR;
|
|
const void * pNext = {};
|
|
uint32_t swapchainCount = {};
|
|
const VULKAN_HPP_NAMESPACE::PresentRegionKHR * pRegions = {};
|
|
};
|
|
|
|
template <>
|
|
struct CppType<StructureType, StructureType::ePresentRegionsKHR>
|
|
{
|
|
using Type = PresentRegionsKHR;
|
|
};
|
|
|
|
struct PrivateDataSlotCreateInfo
|
|
{
|
|
using NativeType = VkPrivateDataSlotCreateInfo;
|
|
|
|
static const bool allowDuplicate = false;
|
|
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePrivateDataSlotCreateInfo;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR PrivateDataSlotCreateInfo( VULKAN_HPP_NAMESPACE::PrivateDataSlotCreateFlags flags_ = {},
|
|
const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
|
|
: pNext( pNext_ )
|
|
, flags( flags_ )
|
|
{
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR PrivateDataSlotCreateInfo( PrivateDataSlotCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
PrivateDataSlotCreateInfo( VkPrivateDataSlotCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: PrivateDataSlotCreateInfo( *reinterpret_cast<PrivateDataSlotCreateInfo const *>( &rhs ) )
|
|
{
|
|
}
|
|
|
|
PrivateDataSlotCreateInfo & operator=( PrivateDataSlotCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
PrivateDataSlotCreateInfo & operator=( VkPrivateDataSlotCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PrivateDataSlotCreateInfo const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
|
|
VULKAN_HPP_CONSTEXPR_14 PrivateDataSlotCreateInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pNext = pNext_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PrivateDataSlotCreateInfo & setFlags( VULKAN_HPP_NAMESPACE::PrivateDataSlotCreateFlags flags_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
flags = flags_;
|
|
return *this;
|
|
}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
|
|
|
|
operator VkPrivateDataSlotCreateInfo const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkPrivateDataSlotCreateInfo *>( this );
|
|
}
|
|
|
|
operator VkPrivateDataSlotCreateInfo &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkPrivateDataSlotCreateInfo *>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
# if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
# else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::PrivateDataSlotCreateFlags const &>
|
|
# endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( sType, pNext, flags );
|
|
}
|
|
#endif
|
|
|
|
#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
|
|
auto operator<=>( PrivateDataSlotCreateInfo const & ) const = default;
|
|
#else
|
|
bool operator==( PrivateDataSlotCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
# if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
# else
|
|
return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags );
|
|
# endif
|
|
}
|
|
|
|
bool operator!=( PrivateDataSlotCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePrivateDataSlotCreateInfo;
|
|
const void * pNext = {};
|
|
VULKAN_HPP_NAMESPACE::PrivateDataSlotCreateFlags flags = {};
|
|
};
|
|
|
|
template <>
|
|
struct CppType<StructureType, StructureType::ePrivateDataSlotCreateInfo>
|
|
{
|
|
using Type = PrivateDataSlotCreateInfo;
|
|
};
|
|
|
|
using PrivateDataSlotCreateInfoEXT = PrivateDataSlotCreateInfo;
|
|
|
|
struct ProtectedSubmitInfo
|
|
{
|
|
using NativeType = VkProtectedSubmitInfo;
|
|
|
|
static const bool allowDuplicate = false;
|
|
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eProtectedSubmitInfo;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR ProtectedSubmitInfo( VULKAN_HPP_NAMESPACE::Bool32 protectedSubmit_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
|
|
: pNext( pNext_ )
|
|
, protectedSubmit( protectedSubmit_ )
|
|
{
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR ProtectedSubmitInfo( ProtectedSubmitInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
ProtectedSubmitInfo( VkProtectedSubmitInfo const & rhs ) VULKAN_HPP_NOEXCEPT : ProtectedSubmitInfo( *reinterpret_cast<ProtectedSubmitInfo const *>( &rhs ) )
|
|
{
|
|
}
|
|
|
|
ProtectedSubmitInfo & operator=( ProtectedSubmitInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
ProtectedSubmitInfo & operator=( VkProtectedSubmitInfo const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ProtectedSubmitInfo const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
|
|
VULKAN_HPP_CONSTEXPR_14 ProtectedSubmitInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pNext = pNext_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 ProtectedSubmitInfo & setProtectedSubmit( VULKAN_HPP_NAMESPACE::Bool32 protectedSubmit_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
protectedSubmit = protectedSubmit_;
|
|
return *this;
|
|
}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
|
|
|
|
operator VkProtectedSubmitInfo const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkProtectedSubmitInfo *>( this );
|
|
}
|
|
|
|
operator VkProtectedSubmitInfo &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkProtectedSubmitInfo *>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
# if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
# else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::Bool32 const &>
|
|
# endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( sType, pNext, protectedSubmit );
|
|
}
|
|
#endif
|
|
|
|
#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
|
|
auto operator<=>( ProtectedSubmitInfo const & ) const = default;
|
|
#else
|
|
bool operator==( ProtectedSubmitInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
# if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
# else
|
|
return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( protectedSubmit == rhs.protectedSubmit );
|
|
# endif
|
|
}
|
|
|
|
bool operator!=( ProtectedSubmitInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eProtectedSubmitInfo;
|
|
const void * pNext = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 protectedSubmit = {};
|
|
};
|
|
|
|
template <>
|
|
struct CppType<StructureType, StructureType::eProtectedSubmitInfo>
|
|
{
|
|
using Type = ProtectedSubmitInfo;
|
|
};
|
|
|
|
struct QueryPoolCreateInfo
|
|
{
|
|
using NativeType = VkQueryPoolCreateInfo;
|
|
|
|
static const bool allowDuplicate = false;
|
|
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eQueryPoolCreateInfo;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR QueryPoolCreateInfo( VULKAN_HPP_NAMESPACE::QueryPoolCreateFlags flags_ = {},
|
|
VULKAN_HPP_NAMESPACE::QueryType queryType_ = VULKAN_HPP_NAMESPACE::QueryType::eOcclusion,
|
|
uint32_t queryCount_ = {},
|
|
VULKAN_HPP_NAMESPACE::QueryPipelineStatisticFlags pipelineStatistics_ = {},
|
|
const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
|
|
: pNext( pNext_ )
|
|
, flags( flags_ )
|
|
, queryType( queryType_ )
|
|
, queryCount( queryCount_ )
|
|
, pipelineStatistics( pipelineStatistics_ )
|
|
{
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR QueryPoolCreateInfo( QueryPoolCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
QueryPoolCreateInfo( VkQueryPoolCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT : QueryPoolCreateInfo( *reinterpret_cast<QueryPoolCreateInfo const *>( &rhs ) )
|
|
{
|
|
}
|
|
|
|
QueryPoolCreateInfo & operator=( QueryPoolCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
QueryPoolCreateInfo & operator=( VkQueryPoolCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::QueryPoolCreateInfo const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
|
|
VULKAN_HPP_CONSTEXPR_14 QueryPoolCreateInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pNext = pNext_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 QueryPoolCreateInfo & setFlags( VULKAN_HPP_NAMESPACE::QueryPoolCreateFlags flags_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
flags = flags_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 QueryPoolCreateInfo & setQueryType( VULKAN_HPP_NAMESPACE::QueryType queryType_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
queryType = queryType_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 QueryPoolCreateInfo & setQueryCount( uint32_t queryCount_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
queryCount = queryCount_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 QueryPoolCreateInfo &
|
|
setPipelineStatistics( VULKAN_HPP_NAMESPACE::QueryPipelineStatisticFlags pipelineStatistics_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pipelineStatistics = pipelineStatistics_;
|
|
return *this;
|
|
}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
|
|
|
|
operator VkQueryPoolCreateInfo const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkQueryPoolCreateInfo *>( this );
|
|
}
|
|
|
|
operator VkQueryPoolCreateInfo &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkQueryPoolCreateInfo *>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
# if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
# else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &,
|
|
const void * const &,
|
|
VULKAN_HPP_NAMESPACE::QueryPoolCreateFlags const &,
|
|
VULKAN_HPP_NAMESPACE::QueryType const &,
|
|
uint32_t const &,
|
|
VULKAN_HPP_NAMESPACE::QueryPipelineStatisticFlags const &>
|
|
# endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( sType, pNext, flags, queryType, queryCount, pipelineStatistics );
|
|
}
|
|
#endif
|
|
|
|
#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
|
|
auto operator<=>( QueryPoolCreateInfo const & ) const = default;
|
|
#else
|
|
bool operator==( QueryPoolCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
# if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
# else
|
|
return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && ( queryType == rhs.queryType ) && ( queryCount == rhs.queryCount ) &&
|
|
( pipelineStatistics == rhs.pipelineStatistics );
|
|
# endif
|
|
}
|
|
|
|
bool operator!=( QueryPoolCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eQueryPoolCreateInfo;
|
|
const void * pNext = {};
|
|
VULKAN_HPP_NAMESPACE::QueryPoolCreateFlags flags = {};
|
|
VULKAN_HPP_NAMESPACE::QueryType queryType = VULKAN_HPP_NAMESPACE::QueryType::eOcclusion;
|
|
uint32_t queryCount = {};
|
|
VULKAN_HPP_NAMESPACE::QueryPipelineStatisticFlags pipelineStatistics = {};
|
|
};
|
|
|
|
template <>
|
|
struct CppType<StructureType, StructureType::eQueryPoolCreateInfo>
|
|
{
|
|
using Type = QueryPoolCreateInfo;
|
|
};
|
|
|
|
struct QueryPoolPerformanceCreateInfoKHR
|
|
{
|
|
using NativeType = VkQueryPoolPerformanceCreateInfoKHR;
|
|
|
|
static const bool allowDuplicate = false;
|
|
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eQueryPoolPerformanceCreateInfoKHR;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR QueryPoolPerformanceCreateInfoKHR( uint32_t queueFamilyIndex_ = {},
|
|
uint32_t counterIndexCount_ = {},
|
|
const uint32_t * pCounterIndices_ = {},
|
|
const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
|
|
: pNext( pNext_ )
|
|
, queueFamilyIndex( queueFamilyIndex_ )
|
|
, counterIndexCount( counterIndexCount_ )
|
|
, pCounterIndices( pCounterIndices_ )
|
|
{
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR QueryPoolPerformanceCreateInfoKHR( QueryPoolPerformanceCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
QueryPoolPerformanceCreateInfoKHR( VkQueryPoolPerformanceCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: QueryPoolPerformanceCreateInfoKHR( *reinterpret_cast<QueryPoolPerformanceCreateInfoKHR const *>( &rhs ) )
|
|
{
|
|
}
|
|
|
|
# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
|
|
QueryPoolPerformanceCreateInfoKHR( uint32_t queueFamilyIndex_,
|
|
VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const uint32_t> const & counterIndices_,
|
|
const void * pNext_ = nullptr )
|
|
: pNext( pNext_ )
|
|
, queueFamilyIndex( queueFamilyIndex_ )
|
|
, counterIndexCount( static_cast<uint32_t>( counterIndices_.size() ) )
|
|
, pCounterIndices( counterIndices_.data() )
|
|
{
|
|
}
|
|
# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
|
|
|
|
QueryPoolPerformanceCreateInfoKHR & operator=( QueryPoolPerformanceCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
QueryPoolPerformanceCreateInfoKHR & operator=( VkQueryPoolPerformanceCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::QueryPoolPerformanceCreateInfoKHR const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
|
|
VULKAN_HPP_CONSTEXPR_14 QueryPoolPerformanceCreateInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pNext = pNext_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 QueryPoolPerformanceCreateInfoKHR & setQueueFamilyIndex( uint32_t queueFamilyIndex_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
queueFamilyIndex = queueFamilyIndex_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 QueryPoolPerformanceCreateInfoKHR & setCounterIndexCount( uint32_t counterIndexCount_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
counterIndexCount = counterIndexCount_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 QueryPoolPerformanceCreateInfoKHR & setPCounterIndices( const uint32_t * pCounterIndices_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pCounterIndices = pCounterIndices_;
|
|
return *this;
|
|
}
|
|
|
|
# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
|
|
QueryPoolPerformanceCreateInfoKHR &
|
|
setCounterIndices( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const uint32_t> const & counterIndices_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
counterIndexCount = static_cast<uint32_t>( counterIndices_.size() );
|
|
pCounterIndices = counterIndices_.data();
|
|
return *this;
|
|
}
|
|
# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
|
|
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
|
|
|
|
operator VkQueryPoolPerformanceCreateInfoKHR const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkQueryPoolPerformanceCreateInfoKHR *>( this );
|
|
}
|
|
|
|
operator VkQueryPoolPerformanceCreateInfoKHR &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkQueryPoolPerformanceCreateInfoKHR *>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
# if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
# else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, uint32_t const &, uint32_t const &, const uint32_t * const &>
|
|
# endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( sType, pNext, queueFamilyIndex, counterIndexCount, pCounterIndices );
|
|
}
|
|
#endif
|
|
|
|
#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
|
|
auto operator<=>( QueryPoolPerformanceCreateInfoKHR const & ) const = default;
|
|
#else
|
|
bool operator==( QueryPoolPerformanceCreateInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
# if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
# else
|
|
return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( queueFamilyIndex == rhs.queueFamilyIndex ) &&
|
|
( counterIndexCount == rhs.counterIndexCount ) && ( pCounterIndices == rhs.pCounterIndices );
|
|
# endif
|
|
}
|
|
|
|
bool operator!=( QueryPoolPerformanceCreateInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eQueryPoolPerformanceCreateInfoKHR;
|
|
const void * pNext = {};
|
|
uint32_t queueFamilyIndex = {};
|
|
uint32_t counterIndexCount = {};
|
|
const uint32_t * pCounterIndices = {};
|
|
};
|
|
|
|
template <>
|
|
struct CppType<StructureType, StructureType::eQueryPoolPerformanceCreateInfoKHR>
|
|
{
|
|
using Type = QueryPoolPerformanceCreateInfoKHR;
|
|
};
|
|
|
|
struct QueueFamilyCheckpointProperties2NV
|
|
{
|
|
using NativeType = VkQueueFamilyCheckpointProperties2NV;
|
|
|
|
static const bool allowDuplicate = false;
|
|
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eQueueFamilyCheckpointProperties2NV;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR QueueFamilyCheckpointProperties2NV( VULKAN_HPP_NAMESPACE::PipelineStageFlags2 checkpointExecutionStageMask_ = {},
|
|
void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
|
|
: pNext( pNext_ )
|
|
, checkpointExecutionStageMask( checkpointExecutionStageMask_ )
|
|
{
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR QueueFamilyCheckpointProperties2NV( QueueFamilyCheckpointProperties2NV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
QueueFamilyCheckpointProperties2NV( VkQueueFamilyCheckpointProperties2NV const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: QueueFamilyCheckpointProperties2NV( *reinterpret_cast<QueueFamilyCheckpointProperties2NV const *>( &rhs ) )
|
|
{
|
|
}
|
|
|
|
QueueFamilyCheckpointProperties2NV & operator=( QueueFamilyCheckpointProperties2NV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
QueueFamilyCheckpointProperties2NV & operator=( VkQueueFamilyCheckpointProperties2NV const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::QueueFamilyCheckpointProperties2NV const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
operator VkQueueFamilyCheckpointProperties2NV const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkQueueFamilyCheckpointProperties2NV *>( this );
|
|
}
|
|
|
|
operator VkQueueFamilyCheckpointProperties2NV &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkQueueFamilyCheckpointProperties2NV *>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
# if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
# else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::PipelineStageFlags2 const &>
|
|
# endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( sType, pNext, checkpointExecutionStageMask );
|
|
}
|
|
#endif
|
|
|
|
#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
|
|
auto operator<=>( QueueFamilyCheckpointProperties2NV const & ) const = default;
|
|
#else
|
|
bool operator==( QueueFamilyCheckpointProperties2NV const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
# if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
# else
|
|
return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( checkpointExecutionStageMask == rhs.checkpointExecutionStageMask );
|
|
# endif
|
|
}
|
|
|
|
bool operator!=( QueueFamilyCheckpointProperties2NV const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eQueueFamilyCheckpointProperties2NV;
|
|
void * pNext = {};
|
|
VULKAN_HPP_NAMESPACE::PipelineStageFlags2 checkpointExecutionStageMask = {};
|
|
};
|
|
|
|
template <>
|
|
struct CppType<StructureType, StructureType::eQueueFamilyCheckpointProperties2NV>
|
|
{
|
|
using Type = QueueFamilyCheckpointProperties2NV;
|
|
};
|
|
|
|
struct QueueFamilyProperties
|
|
{
|
|
using NativeType = VkQueueFamilyProperties;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR QueueFamilyProperties( VULKAN_HPP_NAMESPACE::QueueFlags queueFlags_ = {},
|
|
uint32_t queueCount_ = {},
|
|
uint32_t timestampValidBits_ = {},
|
|
VULKAN_HPP_NAMESPACE::Extent3D minImageTransferGranularity_ = {} ) VULKAN_HPP_NOEXCEPT
|
|
: queueFlags( queueFlags_ )
|
|
, queueCount( queueCount_ )
|
|
, timestampValidBits( timestampValidBits_ )
|
|
, minImageTransferGranularity( minImageTransferGranularity_ )
|
|
{
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR QueueFamilyProperties( QueueFamilyProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
QueueFamilyProperties( VkQueueFamilyProperties const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: QueueFamilyProperties( *reinterpret_cast<QueueFamilyProperties const *>( &rhs ) )
|
|
{
|
|
}
|
|
|
|
QueueFamilyProperties & operator=( QueueFamilyProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
QueueFamilyProperties & operator=( VkQueueFamilyProperties const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::QueueFamilyProperties const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
operator VkQueueFamilyProperties const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkQueueFamilyProperties *>( this );
|
|
}
|
|
|
|
operator VkQueueFamilyProperties &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkQueueFamilyProperties *>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
# if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
# else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::QueueFlags const &, uint32_t const &, uint32_t const &, VULKAN_HPP_NAMESPACE::Extent3D const &>
|
|
# endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( queueFlags, queueCount, timestampValidBits, minImageTransferGranularity );
|
|
}
|
|
#endif
|
|
|
|
#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
|
|
auto operator<=>( QueueFamilyProperties const & ) const = default;
|
|
#else
|
|
bool operator==( QueueFamilyProperties const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
# if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
# else
|
|
return ( queueFlags == rhs.queueFlags ) && ( queueCount == rhs.queueCount ) && ( timestampValidBits == rhs.timestampValidBits ) &&
|
|
( minImageTransferGranularity == rhs.minImageTransferGranularity );
|
|
# endif
|
|
}
|
|
|
|
bool operator!=( QueueFamilyProperties const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::QueueFlags queueFlags = {};
|
|
uint32_t queueCount = {};
|
|
uint32_t timestampValidBits = {};
|
|
VULKAN_HPP_NAMESPACE::Extent3D minImageTransferGranularity = {};
|
|
};
|
|
|
|
struct QueueFamilyProperties2
|
|
{
|
|
using NativeType = VkQueueFamilyProperties2;
|
|
|
|
static const bool allowDuplicate = false;
|
|
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eQueueFamilyProperties2;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR QueueFamilyProperties2( VULKAN_HPP_NAMESPACE::QueueFamilyProperties queueFamilyProperties_ = {},
|
|
void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
|
|
: pNext( pNext_ )
|
|
, queueFamilyProperties( queueFamilyProperties_ )
|
|
{
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR QueueFamilyProperties2( QueueFamilyProperties2 const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
QueueFamilyProperties2( VkQueueFamilyProperties2 const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: QueueFamilyProperties2( *reinterpret_cast<QueueFamilyProperties2 const *>( &rhs ) )
|
|
{
|
|
}
|
|
|
|
QueueFamilyProperties2 & operator=( QueueFamilyProperties2 const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
QueueFamilyProperties2 & operator=( VkQueueFamilyProperties2 const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::QueueFamilyProperties2 const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
operator VkQueueFamilyProperties2 const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkQueueFamilyProperties2 *>( this );
|
|
}
|
|
|
|
operator VkQueueFamilyProperties2 &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkQueueFamilyProperties2 *>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
# if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
# else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::QueueFamilyProperties const &>
|
|
# endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( sType, pNext, queueFamilyProperties );
|
|
}
|
|
#endif
|
|
|
|
#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
|
|
auto operator<=>( QueueFamilyProperties2 const & ) const = default;
|
|
#else
|
|
bool operator==( QueueFamilyProperties2 const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
# if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
# else
|
|
return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( queueFamilyProperties == rhs.queueFamilyProperties );
|
|
# endif
|
|
}
|
|
|
|
bool operator!=( QueueFamilyProperties2 const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eQueueFamilyProperties2;
|
|
void * pNext = {};
|
|
VULKAN_HPP_NAMESPACE::QueueFamilyProperties queueFamilyProperties = {};
|
|
};
|
|
|
|
template <>
|
|
struct CppType<StructureType, StructureType::eQueueFamilyProperties2>
|
|
{
|
|
using Type = QueueFamilyProperties2;
|
|
};
|
|
|
|
using QueueFamilyProperties2KHR = QueueFamilyProperties2;
|
|
|
|
struct RefreshObjectKHR
|
|
{
|
|
using NativeType = VkRefreshObjectKHR;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR RefreshObjectKHR( VULKAN_HPP_NAMESPACE::ObjectType objectType_ = VULKAN_HPP_NAMESPACE::ObjectType::eUnknown,
|
|
uint64_t objectHandle_ = {},
|
|
VULKAN_HPP_NAMESPACE::RefreshObjectFlagsKHR flags_ = {} ) VULKAN_HPP_NOEXCEPT
|
|
: objectType( objectType_ )
|
|
, objectHandle( objectHandle_ )
|
|
, flags( flags_ )
|
|
{
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR RefreshObjectKHR( RefreshObjectKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
RefreshObjectKHR( VkRefreshObjectKHR const & rhs ) VULKAN_HPP_NOEXCEPT : RefreshObjectKHR( *reinterpret_cast<RefreshObjectKHR const *>( &rhs ) ) {}
|
|
|
|
RefreshObjectKHR & operator=( RefreshObjectKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
RefreshObjectKHR & operator=( VkRefreshObjectKHR const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::RefreshObjectKHR const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
|
|
VULKAN_HPP_CONSTEXPR_14 RefreshObjectKHR & setObjectType( VULKAN_HPP_NAMESPACE::ObjectType objectType_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
objectType = objectType_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 RefreshObjectKHR & setObjectHandle( uint64_t objectHandle_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
objectHandle = objectHandle_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 RefreshObjectKHR & setFlags( VULKAN_HPP_NAMESPACE::RefreshObjectFlagsKHR flags_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
flags = flags_;
|
|
return *this;
|
|
}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
|
|
|
|
operator VkRefreshObjectKHR const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkRefreshObjectKHR *>( this );
|
|
}
|
|
|
|
operator VkRefreshObjectKHR &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkRefreshObjectKHR *>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
# if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
# else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::ObjectType const &, uint64_t const &, VULKAN_HPP_NAMESPACE::RefreshObjectFlagsKHR const &>
|
|
# endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( objectType, objectHandle, flags );
|
|
}
|
|
#endif
|
|
|
|
#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
|
|
auto operator<=>( RefreshObjectKHR const & ) const = default;
|
|
#else
|
|
bool operator==( RefreshObjectKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
# if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
# else
|
|
return ( objectType == rhs.objectType ) && ( objectHandle == rhs.objectHandle ) && ( flags == rhs.flags );
|
|
# endif
|
|
}
|
|
|
|
bool operator!=( RefreshObjectKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::ObjectType objectType = VULKAN_HPP_NAMESPACE::ObjectType::eUnknown;
|
|
uint64_t objectHandle = {};
|
|
VULKAN_HPP_NAMESPACE::RefreshObjectFlagsKHR flags = {};
|
|
};
|
|
|
|
struct RefreshObjectListKHR
|
|
{
|
|
using NativeType = VkRefreshObjectListKHR;
|
|
|
|
static const bool allowDuplicate = false;
|
|
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eRefreshObjectListKHR;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR RefreshObjectListKHR( uint32_t objectCount_ = {},
|
|
const VULKAN_HPP_NAMESPACE::RefreshObjectKHR * pObjects_ = {},
|
|
const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
|
|
: pNext( pNext_ )
|
|
, objectCount( objectCount_ )
|
|
, pObjects( pObjects_ )
|
|
{
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR RefreshObjectListKHR( RefreshObjectListKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
RefreshObjectListKHR( VkRefreshObjectListKHR const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: RefreshObjectListKHR( *reinterpret_cast<RefreshObjectListKHR const *>( &rhs ) )
|
|
{
|
|
}
|
|
|
|
# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
|
|
RefreshObjectListKHR( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::RefreshObjectKHR> const & objects_,
|
|
const void * pNext_ = nullptr )
|
|
: pNext( pNext_ ), objectCount( static_cast<uint32_t>( objects_.size() ) ), pObjects( objects_.data() )
|
|
{
|
|
}
|
|
# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
|
|
|
|
RefreshObjectListKHR & operator=( RefreshObjectListKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
RefreshObjectListKHR & operator=( VkRefreshObjectListKHR const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::RefreshObjectListKHR const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
|
|
VULKAN_HPP_CONSTEXPR_14 RefreshObjectListKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pNext = pNext_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 RefreshObjectListKHR & setObjectCount( uint32_t objectCount_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
objectCount = objectCount_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 RefreshObjectListKHR & setPObjects( const VULKAN_HPP_NAMESPACE::RefreshObjectKHR * pObjects_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pObjects = pObjects_;
|
|
return *this;
|
|
}
|
|
|
|
# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
|
|
RefreshObjectListKHR &
|
|
setObjects( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::RefreshObjectKHR> const & objects_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
objectCount = static_cast<uint32_t>( objects_.size() );
|
|
pObjects = objects_.data();
|
|
return *this;
|
|
}
|
|
# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
|
|
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
|
|
|
|
operator VkRefreshObjectListKHR const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkRefreshObjectListKHR *>( this );
|
|
}
|
|
|
|
operator VkRefreshObjectListKHR &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkRefreshObjectListKHR *>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
# if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
# else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, uint32_t const &, const VULKAN_HPP_NAMESPACE::RefreshObjectKHR * const &>
|
|
# endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( sType, pNext, objectCount, pObjects );
|
|
}
|
|
#endif
|
|
|
|
#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
|
|
auto operator<=>( RefreshObjectListKHR const & ) const = default;
|
|
#else
|
|
bool operator==( RefreshObjectListKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
# if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
# else
|
|
return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( objectCount == rhs.objectCount ) && ( pObjects == rhs.pObjects );
|
|
# endif
|
|
}
|
|
|
|
bool operator!=( RefreshObjectListKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eRefreshObjectListKHR;
|
|
const void * pNext = {};
|
|
uint32_t objectCount = {};
|
|
const VULKAN_HPP_NAMESPACE::RefreshObjectKHR * pObjects = {};
|
|
};
|
|
|
|
template <>
|
|
struct CppType<StructureType, StructureType::eRefreshObjectListKHR>
|
|
{
|
|
using Type = RefreshObjectListKHR;
|
|
};
|
|
|
|
struct RenderPassAttachmentBeginInfo
|
|
{
|
|
using NativeType = VkRenderPassAttachmentBeginInfo;
|
|
|
|
static const bool allowDuplicate = false;
|
|
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eRenderPassAttachmentBeginInfo;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR RenderPassAttachmentBeginInfo( uint32_t attachmentCount_ = {},
|
|
const VULKAN_HPP_NAMESPACE::ImageView * pAttachments_ = {},
|
|
const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
|
|
: pNext( pNext_ )
|
|
, attachmentCount( attachmentCount_ )
|
|
, pAttachments( pAttachments_ )
|
|
{
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR RenderPassAttachmentBeginInfo( RenderPassAttachmentBeginInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
RenderPassAttachmentBeginInfo( VkRenderPassAttachmentBeginInfo const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: RenderPassAttachmentBeginInfo( *reinterpret_cast<RenderPassAttachmentBeginInfo const *>( &rhs ) )
|
|
{
|
|
}
|
|
|
|
# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
|
|
RenderPassAttachmentBeginInfo( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::ImageView> const & attachments_,
|
|
const void * pNext_ = nullptr )
|
|
: pNext( pNext_ ), attachmentCount( static_cast<uint32_t>( attachments_.size() ) ), pAttachments( attachments_.data() )
|
|
{
|
|
}
|
|
# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
|
|
|
|
RenderPassAttachmentBeginInfo & operator=( RenderPassAttachmentBeginInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
RenderPassAttachmentBeginInfo & operator=( VkRenderPassAttachmentBeginInfo const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::RenderPassAttachmentBeginInfo const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
|
|
VULKAN_HPP_CONSTEXPR_14 RenderPassAttachmentBeginInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pNext = pNext_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 RenderPassAttachmentBeginInfo & setAttachmentCount( uint32_t attachmentCount_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
attachmentCount = attachmentCount_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 RenderPassAttachmentBeginInfo & setPAttachments( const VULKAN_HPP_NAMESPACE::ImageView * pAttachments_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pAttachments = pAttachments_;
|
|
return *this;
|
|
}
|
|
|
|
# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
|
|
RenderPassAttachmentBeginInfo &
|
|
setAttachments( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::ImageView> const & attachments_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
attachmentCount = static_cast<uint32_t>( attachments_.size() );
|
|
pAttachments = attachments_.data();
|
|
return *this;
|
|
}
|
|
# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
|
|
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
|
|
|
|
operator VkRenderPassAttachmentBeginInfo const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkRenderPassAttachmentBeginInfo *>( this );
|
|
}
|
|
|
|
operator VkRenderPassAttachmentBeginInfo &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkRenderPassAttachmentBeginInfo *>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
# if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
# else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, uint32_t const &, const VULKAN_HPP_NAMESPACE::ImageView * const &>
|
|
# endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( sType, pNext, attachmentCount, pAttachments );
|
|
}
|
|
#endif
|
|
|
|
#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
|
|
auto operator<=>( RenderPassAttachmentBeginInfo const & ) const = default;
|
|
#else
|
|
bool operator==( RenderPassAttachmentBeginInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
# if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
# else
|
|
return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( attachmentCount == rhs.attachmentCount ) && ( pAttachments == rhs.pAttachments );
|
|
# endif
|
|
}
|
|
|
|
bool operator!=( RenderPassAttachmentBeginInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eRenderPassAttachmentBeginInfo;
|
|
const void * pNext = {};
|
|
uint32_t attachmentCount = {};
|
|
const VULKAN_HPP_NAMESPACE::ImageView * pAttachments = {};
|
|
};
|
|
|
|
template <>
|
|
struct CppType<StructureType, StructureType::eRenderPassAttachmentBeginInfo>
|
|
{
|
|
using Type = RenderPassAttachmentBeginInfo;
|
|
};
|
|
|
|
using RenderPassAttachmentBeginInfoKHR = RenderPassAttachmentBeginInfo;
|
|
|
|
struct RenderPassBeginInfo
|
|
{
|
|
using NativeType = VkRenderPassBeginInfo;
|
|
|
|
static const bool allowDuplicate = false;
|
|
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eRenderPassBeginInfo;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR_14 RenderPassBeginInfo( VULKAN_HPP_NAMESPACE::RenderPass renderPass_ = {},
|
|
VULKAN_HPP_NAMESPACE::Framebuffer framebuffer_ = {},
|
|
VULKAN_HPP_NAMESPACE::Rect2D renderArea_ = {},
|
|
uint32_t clearValueCount_ = {},
|
|
const VULKAN_HPP_NAMESPACE::ClearValue * pClearValues_ = {},
|
|
const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
|
|
: pNext( pNext_ )
|
|
, renderPass( renderPass_ )
|
|
, framebuffer( framebuffer_ )
|
|
, renderArea( renderArea_ )
|
|
, clearValueCount( clearValueCount_ )
|
|
, pClearValues( pClearValues_ )
|
|
{
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 RenderPassBeginInfo( RenderPassBeginInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
RenderPassBeginInfo( VkRenderPassBeginInfo const & rhs ) VULKAN_HPP_NOEXCEPT : RenderPassBeginInfo( *reinterpret_cast<RenderPassBeginInfo const *>( &rhs ) )
|
|
{
|
|
}
|
|
|
|
# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
|
|
RenderPassBeginInfo( VULKAN_HPP_NAMESPACE::RenderPass renderPass_,
|
|
VULKAN_HPP_NAMESPACE::Framebuffer framebuffer_,
|
|
VULKAN_HPP_NAMESPACE::Rect2D renderArea_,
|
|
VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::ClearValue> const & clearValues_,
|
|
const void * pNext_ = nullptr )
|
|
: pNext( pNext_ )
|
|
, renderPass( renderPass_ )
|
|
, framebuffer( framebuffer_ )
|
|
, renderArea( renderArea_ )
|
|
, clearValueCount( static_cast<uint32_t>( clearValues_.size() ) )
|
|
, pClearValues( clearValues_.data() )
|
|
{
|
|
}
|
|
# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
|
|
|
|
RenderPassBeginInfo & operator=( RenderPassBeginInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
RenderPassBeginInfo & operator=( VkRenderPassBeginInfo const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::RenderPassBeginInfo const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
|
|
VULKAN_HPP_CONSTEXPR_14 RenderPassBeginInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pNext = pNext_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 RenderPassBeginInfo & setRenderPass( VULKAN_HPP_NAMESPACE::RenderPass renderPass_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
renderPass = renderPass_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 RenderPassBeginInfo & setFramebuffer( VULKAN_HPP_NAMESPACE::Framebuffer framebuffer_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
framebuffer = framebuffer_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 RenderPassBeginInfo & setRenderArea( VULKAN_HPP_NAMESPACE::Rect2D const & renderArea_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
renderArea = renderArea_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 RenderPassBeginInfo & setClearValueCount( uint32_t clearValueCount_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
clearValueCount = clearValueCount_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 RenderPassBeginInfo & setPClearValues( const VULKAN_HPP_NAMESPACE::ClearValue * pClearValues_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pClearValues = pClearValues_;
|
|
return *this;
|
|
}
|
|
|
|
# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
|
|
RenderPassBeginInfo &
|
|
setClearValues( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::ClearValue> const & clearValues_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
clearValueCount = static_cast<uint32_t>( clearValues_.size() );
|
|
pClearValues = clearValues_.data();
|
|
return *this;
|
|
}
|
|
# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
|
|
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
|
|
|
|
operator VkRenderPassBeginInfo const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkRenderPassBeginInfo *>( this );
|
|
}
|
|
|
|
operator VkRenderPassBeginInfo &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkRenderPassBeginInfo *>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
# if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
# else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &,
|
|
const void * const &,
|
|
VULKAN_HPP_NAMESPACE::RenderPass const &,
|
|
VULKAN_HPP_NAMESPACE::Framebuffer const &,
|
|
VULKAN_HPP_NAMESPACE::Rect2D const &,
|
|
uint32_t const &,
|
|
const VULKAN_HPP_NAMESPACE::ClearValue * const &>
|
|
# endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( sType, pNext, renderPass, framebuffer, renderArea, clearValueCount, pClearValues );
|
|
}
|
|
#endif
|
|
|
|
#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
|
|
auto operator<=>( RenderPassBeginInfo const & ) const = default;
|
|
#else
|
|
bool operator==( RenderPassBeginInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
# if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
# else
|
|
return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( renderPass == rhs.renderPass ) && ( framebuffer == rhs.framebuffer ) &&
|
|
( renderArea == rhs.renderArea ) && ( clearValueCount == rhs.clearValueCount ) && ( pClearValues == rhs.pClearValues );
|
|
# endif
|
|
}
|
|
|
|
bool operator!=( RenderPassBeginInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eRenderPassBeginInfo;
|
|
const void * pNext = {};
|
|
VULKAN_HPP_NAMESPACE::RenderPass renderPass = {};
|
|
VULKAN_HPP_NAMESPACE::Framebuffer framebuffer = {};
|
|
VULKAN_HPP_NAMESPACE::Rect2D renderArea = {};
|
|
uint32_t clearValueCount = {};
|
|
const VULKAN_HPP_NAMESPACE::ClearValue * pClearValues = {};
|
|
};
|
|
|
|
template <>
|
|
struct CppType<StructureType, StructureType::eRenderPassBeginInfo>
|
|
{
|
|
using Type = RenderPassBeginInfo;
|
|
};
|
|
|
|
struct SubpassDescription
|
|
{
|
|
using NativeType = VkSubpassDescription;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR SubpassDescription( VULKAN_HPP_NAMESPACE::SubpassDescriptionFlags flags_ = {},
|
|
VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint_ = VULKAN_HPP_NAMESPACE::PipelineBindPoint::eGraphics,
|
|
uint32_t inputAttachmentCount_ = {},
|
|
const VULKAN_HPP_NAMESPACE::AttachmentReference * pInputAttachments_ = {},
|
|
uint32_t colorAttachmentCount_ = {},
|
|
const VULKAN_HPP_NAMESPACE::AttachmentReference * pColorAttachments_ = {},
|
|
const VULKAN_HPP_NAMESPACE::AttachmentReference * pResolveAttachments_ = {},
|
|
const VULKAN_HPP_NAMESPACE::AttachmentReference * pDepthStencilAttachment_ = {},
|
|
uint32_t preserveAttachmentCount_ = {},
|
|
const uint32_t * pPreserveAttachments_ = {} ) VULKAN_HPP_NOEXCEPT
|
|
: flags( flags_ )
|
|
, pipelineBindPoint( pipelineBindPoint_ )
|
|
, inputAttachmentCount( inputAttachmentCount_ )
|
|
, pInputAttachments( pInputAttachments_ )
|
|
, colorAttachmentCount( colorAttachmentCount_ )
|
|
, pColorAttachments( pColorAttachments_ )
|
|
, pResolveAttachments( pResolveAttachments_ )
|
|
, pDepthStencilAttachment( pDepthStencilAttachment_ )
|
|
, preserveAttachmentCount( preserveAttachmentCount_ )
|
|
, pPreserveAttachments( pPreserveAttachments_ )
|
|
{
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR SubpassDescription( SubpassDescription const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
SubpassDescription( VkSubpassDescription const & rhs ) VULKAN_HPP_NOEXCEPT : SubpassDescription( *reinterpret_cast<SubpassDescription const *>( &rhs ) ) {}
|
|
|
|
# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
|
|
SubpassDescription( VULKAN_HPP_NAMESPACE::SubpassDescriptionFlags flags_,
|
|
VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint_,
|
|
VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::AttachmentReference> const & inputAttachments_,
|
|
VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::AttachmentReference> const & colorAttachments_ = {},
|
|
VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::AttachmentReference> const & resolveAttachments_ = {},
|
|
const VULKAN_HPP_NAMESPACE::AttachmentReference * pDepthStencilAttachment_ = {},
|
|
VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const uint32_t> const & preserveAttachments_ = {} )
|
|
: flags( flags_ )
|
|
, pipelineBindPoint( pipelineBindPoint_ )
|
|
, inputAttachmentCount( static_cast<uint32_t>( inputAttachments_.size() ) )
|
|
, pInputAttachments( inputAttachments_.data() )
|
|
, colorAttachmentCount( static_cast<uint32_t>( colorAttachments_.size() ) )
|
|
, pColorAttachments( colorAttachments_.data() )
|
|
, pResolveAttachments( resolveAttachments_.data() )
|
|
, pDepthStencilAttachment( pDepthStencilAttachment_ )
|
|
, preserveAttachmentCount( static_cast<uint32_t>( preserveAttachments_.size() ) )
|
|
, pPreserveAttachments( preserveAttachments_.data() )
|
|
{
|
|
# ifdef VULKAN_HPP_NO_EXCEPTIONS
|
|
VULKAN_HPP_ASSERT( resolveAttachments_.empty() || ( colorAttachments_.size() == resolveAttachments_.size() ) );
|
|
# else
|
|
if ( !resolveAttachments_.empty() && ( colorAttachments_.size() != resolveAttachments_.size() ) )
|
|
{
|
|
throw LogicError(
|
|
VULKAN_HPP_NAMESPACE_STRING
|
|
"::SubpassDescription::SubpassDescription: !resolveAttachments_.empty() && ( colorAttachments_.size() != resolveAttachments_.size() )" );
|
|
}
|
|
# endif /*VULKAN_HPP_NO_EXCEPTIONS*/
|
|
}
|
|
# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
|
|
|
|
SubpassDescription & operator=( SubpassDescription const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
SubpassDescription & operator=( VkSubpassDescription const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::SubpassDescription const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
|
|
VULKAN_HPP_CONSTEXPR_14 SubpassDescription & setFlags( VULKAN_HPP_NAMESPACE::SubpassDescriptionFlags flags_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
flags = flags_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 SubpassDescription & setPipelineBindPoint( VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pipelineBindPoint = pipelineBindPoint_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 SubpassDescription & setInputAttachmentCount( uint32_t inputAttachmentCount_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
inputAttachmentCount = inputAttachmentCount_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 SubpassDescription &
|
|
setPInputAttachments( const VULKAN_HPP_NAMESPACE::AttachmentReference * pInputAttachments_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pInputAttachments = pInputAttachments_;
|
|
return *this;
|
|
}
|
|
|
|
# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
|
|
SubpassDescription & setInputAttachments(
|
|
VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::AttachmentReference> const & inputAttachments_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
inputAttachmentCount = static_cast<uint32_t>( inputAttachments_.size() );
|
|
pInputAttachments = inputAttachments_.data();
|
|
return *this;
|
|
}
|
|
# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 SubpassDescription & setColorAttachmentCount( uint32_t colorAttachmentCount_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
colorAttachmentCount = colorAttachmentCount_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 SubpassDescription &
|
|
setPColorAttachments( const VULKAN_HPP_NAMESPACE::AttachmentReference * pColorAttachments_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pColorAttachments = pColorAttachments_;
|
|
return *this;
|
|
}
|
|
|
|
# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
|
|
SubpassDescription & setColorAttachments(
|
|
VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::AttachmentReference> const & colorAttachments_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
colorAttachmentCount = static_cast<uint32_t>( colorAttachments_.size() );
|
|
pColorAttachments = colorAttachments_.data();
|
|
return *this;
|
|
}
|
|
# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 SubpassDescription &
|
|
setPResolveAttachments( const VULKAN_HPP_NAMESPACE::AttachmentReference * pResolveAttachments_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pResolveAttachments = pResolveAttachments_;
|
|
return *this;
|
|
}
|
|
|
|
# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
|
|
SubpassDescription & setResolveAttachments(
|
|
VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::AttachmentReference> const & resolveAttachments_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
colorAttachmentCount = static_cast<uint32_t>( resolveAttachments_.size() );
|
|
pResolveAttachments = resolveAttachments_.data();
|
|
return *this;
|
|
}
|
|
# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 SubpassDescription &
|
|
setPDepthStencilAttachment( const VULKAN_HPP_NAMESPACE::AttachmentReference * pDepthStencilAttachment_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pDepthStencilAttachment = pDepthStencilAttachment_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 SubpassDescription & setPreserveAttachmentCount( uint32_t preserveAttachmentCount_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
preserveAttachmentCount = preserveAttachmentCount_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 SubpassDescription & setPPreserveAttachments( const uint32_t * pPreserveAttachments_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pPreserveAttachments = pPreserveAttachments_;
|
|
return *this;
|
|
}
|
|
|
|
# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
|
|
SubpassDescription &
|
|
setPreserveAttachments( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const uint32_t> const & preserveAttachments_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
preserveAttachmentCount = static_cast<uint32_t>( preserveAttachments_.size() );
|
|
pPreserveAttachments = preserveAttachments_.data();
|
|
return *this;
|
|
}
|
|
# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
|
|
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
|
|
|
|
operator VkSubpassDescription const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkSubpassDescription *>( this );
|
|
}
|
|
|
|
operator VkSubpassDescription &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkSubpassDescription *>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
# if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
# else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::SubpassDescriptionFlags const &,
|
|
VULKAN_HPP_NAMESPACE::PipelineBindPoint const &,
|
|
uint32_t const &,
|
|
const VULKAN_HPP_NAMESPACE::AttachmentReference * const &,
|
|
uint32_t const &,
|
|
const VULKAN_HPP_NAMESPACE::AttachmentReference * const &,
|
|
const VULKAN_HPP_NAMESPACE::AttachmentReference * const &,
|
|
const VULKAN_HPP_NAMESPACE::AttachmentReference * const &,
|
|
uint32_t const &,
|
|
const uint32_t * const &>
|
|
# endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( flags,
|
|
pipelineBindPoint,
|
|
inputAttachmentCount,
|
|
pInputAttachments,
|
|
colorAttachmentCount,
|
|
pColorAttachments,
|
|
pResolveAttachments,
|
|
pDepthStencilAttachment,
|
|
preserveAttachmentCount,
|
|
pPreserveAttachments );
|
|
}
|
|
#endif
|
|
|
|
#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
|
|
auto operator<=>( SubpassDescription const & ) const = default;
|
|
#else
|
|
bool operator==( SubpassDescription const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
# if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
# else
|
|
return ( flags == rhs.flags ) && ( pipelineBindPoint == rhs.pipelineBindPoint ) && ( inputAttachmentCount == rhs.inputAttachmentCount ) &&
|
|
( pInputAttachments == rhs.pInputAttachments ) && ( colorAttachmentCount == rhs.colorAttachmentCount ) &&
|
|
( pColorAttachments == rhs.pColorAttachments ) && ( pResolveAttachments == rhs.pResolveAttachments ) &&
|
|
( pDepthStencilAttachment == rhs.pDepthStencilAttachment ) && ( preserveAttachmentCount == rhs.preserveAttachmentCount ) &&
|
|
( pPreserveAttachments == rhs.pPreserveAttachments );
|
|
# endif
|
|
}
|
|
|
|
bool operator!=( SubpassDescription const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::SubpassDescriptionFlags flags = {};
|
|
VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint = VULKAN_HPP_NAMESPACE::PipelineBindPoint::eGraphics;
|
|
uint32_t inputAttachmentCount = {};
|
|
const VULKAN_HPP_NAMESPACE::AttachmentReference * pInputAttachments = {};
|
|
uint32_t colorAttachmentCount = {};
|
|
const VULKAN_HPP_NAMESPACE::AttachmentReference * pColorAttachments = {};
|
|
const VULKAN_HPP_NAMESPACE::AttachmentReference * pResolveAttachments = {};
|
|
const VULKAN_HPP_NAMESPACE::AttachmentReference * pDepthStencilAttachment = {};
|
|
uint32_t preserveAttachmentCount = {};
|
|
const uint32_t * pPreserveAttachments = {};
|
|
};
|
|
|
|
struct SubpassDependency
|
|
{
|
|
using NativeType = VkSubpassDependency;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR SubpassDependency( uint32_t srcSubpass_ = {},
|
|
uint32_t dstSubpass_ = {},
|
|
VULKAN_HPP_NAMESPACE::PipelineStageFlags srcStageMask_ = {},
|
|
VULKAN_HPP_NAMESPACE::PipelineStageFlags dstStageMask_ = {},
|
|
VULKAN_HPP_NAMESPACE::AccessFlags srcAccessMask_ = {},
|
|
VULKAN_HPP_NAMESPACE::AccessFlags dstAccessMask_ = {},
|
|
VULKAN_HPP_NAMESPACE::DependencyFlags dependencyFlags_ = {} ) VULKAN_HPP_NOEXCEPT
|
|
: srcSubpass( srcSubpass_ )
|
|
, dstSubpass( dstSubpass_ )
|
|
, srcStageMask( srcStageMask_ )
|
|
, dstStageMask( dstStageMask_ )
|
|
, srcAccessMask( srcAccessMask_ )
|
|
, dstAccessMask( dstAccessMask_ )
|
|
, dependencyFlags( dependencyFlags_ )
|
|
{
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR SubpassDependency( SubpassDependency const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
SubpassDependency( VkSubpassDependency const & rhs ) VULKAN_HPP_NOEXCEPT : SubpassDependency( *reinterpret_cast<SubpassDependency const *>( &rhs ) ) {}
|
|
|
|
SubpassDependency & operator=( SubpassDependency const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
SubpassDependency & operator=( VkSubpassDependency const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::SubpassDependency const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
|
|
VULKAN_HPP_CONSTEXPR_14 SubpassDependency & setSrcSubpass( uint32_t srcSubpass_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
srcSubpass = srcSubpass_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 SubpassDependency & setDstSubpass( uint32_t dstSubpass_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
dstSubpass = dstSubpass_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 SubpassDependency & setSrcStageMask( VULKAN_HPP_NAMESPACE::PipelineStageFlags srcStageMask_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
srcStageMask = srcStageMask_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 SubpassDependency & setDstStageMask( VULKAN_HPP_NAMESPACE::PipelineStageFlags dstStageMask_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
dstStageMask = dstStageMask_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 SubpassDependency & setSrcAccessMask( VULKAN_HPP_NAMESPACE::AccessFlags srcAccessMask_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
srcAccessMask = srcAccessMask_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 SubpassDependency & setDstAccessMask( VULKAN_HPP_NAMESPACE::AccessFlags dstAccessMask_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
dstAccessMask = dstAccessMask_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 SubpassDependency & setDependencyFlags( VULKAN_HPP_NAMESPACE::DependencyFlags dependencyFlags_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
dependencyFlags = dependencyFlags_;
|
|
return *this;
|
|
}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
|
|
|
|
operator VkSubpassDependency const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkSubpassDependency *>( this );
|
|
}
|
|
|
|
operator VkSubpassDependency &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkSubpassDependency *>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
# if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
# else
|
|
std::tuple<uint32_t const &,
|
|
uint32_t const &,
|
|
VULKAN_HPP_NAMESPACE::PipelineStageFlags const &,
|
|
VULKAN_HPP_NAMESPACE::PipelineStageFlags const &,
|
|
VULKAN_HPP_NAMESPACE::AccessFlags const &,
|
|
VULKAN_HPP_NAMESPACE::AccessFlags const &,
|
|
VULKAN_HPP_NAMESPACE::DependencyFlags const &>
|
|
# endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( srcSubpass, dstSubpass, srcStageMask, dstStageMask, srcAccessMask, dstAccessMask, dependencyFlags );
|
|
}
|
|
#endif
|
|
|
|
#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
|
|
auto operator<=>( SubpassDependency const & ) const = default;
|
|
#else
|
|
bool operator==( SubpassDependency const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
# if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
# else
|
|
return ( srcSubpass == rhs.srcSubpass ) && ( dstSubpass == rhs.dstSubpass ) && ( srcStageMask == rhs.srcStageMask ) &&
|
|
( dstStageMask == rhs.dstStageMask ) && ( srcAccessMask == rhs.srcAccessMask ) && ( dstAccessMask == rhs.dstAccessMask ) &&
|
|
( dependencyFlags == rhs.dependencyFlags );
|
|
# endif
|
|
}
|
|
|
|
bool operator!=( SubpassDependency const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
uint32_t srcSubpass = {};
|
|
uint32_t dstSubpass = {};
|
|
VULKAN_HPP_NAMESPACE::PipelineStageFlags srcStageMask = {};
|
|
VULKAN_HPP_NAMESPACE::PipelineStageFlags dstStageMask = {};
|
|
VULKAN_HPP_NAMESPACE::AccessFlags srcAccessMask = {};
|
|
VULKAN_HPP_NAMESPACE::AccessFlags dstAccessMask = {};
|
|
VULKAN_HPP_NAMESPACE::DependencyFlags dependencyFlags = {};
|
|
};
|
|
|
|
struct RenderPassCreateInfo
|
|
{
|
|
using NativeType = VkRenderPassCreateInfo;
|
|
|
|
static const bool allowDuplicate = false;
|
|
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eRenderPassCreateInfo;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR RenderPassCreateInfo( VULKAN_HPP_NAMESPACE::RenderPassCreateFlags flags_ = {},
|
|
uint32_t attachmentCount_ = {},
|
|
const VULKAN_HPP_NAMESPACE::AttachmentDescription * pAttachments_ = {},
|
|
uint32_t subpassCount_ = {},
|
|
const VULKAN_HPP_NAMESPACE::SubpassDescription * pSubpasses_ = {},
|
|
uint32_t dependencyCount_ = {},
|
|
const VULKAN_HPP_NAMESPACE::SubpassDependency * pDependencies_ = {},
|
|
const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
|
|
: pNext( pNext_ )
|
|
, flags( flags_ )
|
|
, attachmentCount( attachmentCount_ )
|
|
, pAttachments( pAttachments_ )
|
|
, subpassCount( subpassCount_ )
|
|
, pSubpasses( pSubpasses_ )
|
|
, dependencyCount( dependencyCount_ )
|
|
, pDependencies( pDependencies_ )
|
|
{
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR RenderPassCreateInfo( RenderPassCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
RenderPassCreateInfo( VkRenderPassCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: RenderPassCreateInfo( *reinterpret_cast<RenderPassCreateInfo const *>( &rhs ) )
|
|
{
|
|
}
|
|
|
|
# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
|
|
RenderPassCreateInfo( VULKAN_HPP_NAMESPACE::RenderPassCreateFlags flags_,
|
|
VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::AttachmentDescription> const & attachments_,
|
|
VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::SubpassDescription> const & subpasses_ = {},
|
|
VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::SubpassDependency> const & dependencies_ = {},
|
|
const void * pNext_ = nullptr )
|
|
: pNext( pNext_ )
|
|
, flags( flags_ )
|
|
, attachmentCount( static_cast<uint32_t>( attachments_.size() ) )
|
|
, pAttachments( attachments_.data() )
|
|
, subpassCount( static_cast<uint32_t>( subpasses_.size() ) )
|
|
, pSubpasses( subpasses_.data() )
|
|
, dependencyCount( static_cast<uint32_t>( dependencies_.size() ) )
|
|
, pDependencies( dependencies_.data() )
|
|
{
|
|
}
|
|
# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
|
|
|
|
RenderPassCreateInfo & operator=( RenderPassCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
RenderPassCreateInfo & operator=( VkRenderPassCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::RenderPassCreateInfo const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
|
|
VULKAN_HPP_CONSTEXPR_14 RenderPassCreateInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pNext = pNext_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 RenderPassCreateInfo & setFlags( VULKAN_HPP_NAMESPACE::RenderPassCreateFlags flags_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
flags = flags_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 RenderPassCreateInfo & setAttachmentCount( uint32_t attachmentCount_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
attachmentCount = attachmentCount_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 RenderPassCreateInfo & setPAttachments( const VULKAN_HPP_NAMESPACE::AttachmentDescription * pAttachments_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pAttachments = pAttachments_;
|
|
return *this;
|
|
}
|
|
|
|
# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
|
|
RenderPassCreateInfo & setAttachments(
|
|
VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::AttachmentDescription> const & attachments_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
attachmentCount = static_cast<uint32_t>( attachments_.size() );
|
|
pAttachments = attachments_.data();
|
|
return *this;
|
|
}
|
|
# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 RenderPassCreateInfo & setSubpassCount( uint32_t subpassCount_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
subpassCount = subpassCount_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 RenderPassCreateInfo & setPSubpasses( const VULKAN_HPP_NAMESPACE::SubpassDescription * pSubpasses_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pSubpasses = pSubpasses_;
|
|
return *this;
|
|
}
|
|
|
|
# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
|
|
RenderPassCreateInfo &
|
|
setSubpasses( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::SubpassDescription> const & subpasses_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
subpassCount = static_cast<uint32_t>( subpasses_.size() );
|
|
pSubpasses = subpasses_.data();
|
|
return *this;
|
|
}
|
|
# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 RenderPassCreateInfo & setDependencyCount( uint32_t dependencyCount_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
dependencyCount = dependencyCount_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 RenderPassCreateInfo & setPDependencies( const VULKAN_HPP_NAMESPACE::SubpassDependency * pDependencies_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pDependencies = pDependencies_;
|
|
return *this;
|
|
}
|
|
|
|
# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
|
|
RenderPassCreateInfo &
|
|
setDependencies( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::SubpassDependency> const & dependencies_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
dependencyCount = static_cast<uint32_t>( dependencies_.size() );
|
|
pDependencies = dependencies_.data();
|
|
return *this;
|
|
}
|
|
# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
|
|
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
|
|
|
|
operator VkRenderPassCreateInfo const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkRenderPassCreateInfo *>( this );
|
|
}
|
|
|
|
operator VkRenderPassCreateInfo &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkRenderPassCreateInfo *>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
# if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
# else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &,
|
|
const void * const &,
|
|
VULKAN_HPP_NAMESPACE::RenderPassCreateFlags const &,
|
|
uint32_t const &,
|
|
const VULKAN_HPP_NAMESPACE::AttachmentDescription * const &,
|
|
uint32_t const &,
|
|
const VULKAN_HPP_NAMESPACE::SubpassDescription * const &,
|
|
uint32_t const &,
|
|
const VULKAN_HPP_NAMESPACE::SubpassDependency * const &>
|
|
# endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( sType, pNext, flags, attachmentCount, pAttachments, subpassCount, pSubpasses, dependencyCount, pDependencies );
|
|
}
|
|
#endif
|
|
|
|
#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
|
|
auto operator<=>( RenderPassCreateInfo const & ) const = default;
|
|
#else
|
|
bool operator==( RenderPassCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
# if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
# else
|
|
return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && ( attachmentCount == rhs.attachmentCount ) &&
|
|
( pAttachments == rhs.pAttachments ) && ( subpassCount == rhs.subpassCount ) && ( pSubpasses == rhs.pSubpasses ) &&
|
|
( dependencyCount == rhs.dependencyCount ) && ( pDependencies == rhs.pDependencies );
|
|
# endif
|
|
}
|
|
|
|
bool operator!=( RenderPassCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eRenderPassCreateInfo;
|
|
const void * pNext = {};
|
|
VULKAN_HPP_NAMESPACE::RenderPassCreateFlags flags = {};
|
|
uint32_t attachmentCount = {};
|
|
const VULKAN_HPP_NAMESPACE::AttachmentDescription * pAttachments = {};
|
|
uint32_t subpassCount = {};
|
|
const VULKAN_HPP_NAMESPACE::SubpassDescription * pSubpasses = {};
|
|
uint32_t dependencyCount = {};
|
|
const VULKAN_HPP_NAMESPACE::SubpassDependency * pDependencies = {};
|
|
};
|
|
|
|
template <>
|
|
struct CppType<StructureType, StructureType::eRenderPassCreateInfo>
|
|
{
|
|
using Type = RenderPassCreateInfo;
|
|
};
|
|
|
|
struct SubpassDescription2
|
|
{
|
|
using NativeType = VkSubpassDescription2;
|
|
|
|
static const bool allowDuplicate = false;
|
|
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eSubpassDescription2;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR SubpassDescription2( VULKAN_HPP_NAMESPACE::SubpassDescriptionFlags flags_ = {},
|
|
VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint_ = VULKAN_HPP_NAMESPACE::PipelineBindPoint::eGraphics,
|
|
uint32_t viewMask_ = {},
|
|
uint32_t inputAttachmentCount_ = {},
|
|
const VULKAN_HPP_NAMESPACE::AttachmentReference2 * pInputAttachments_ = {},
|
|
uint32_t colorAttachmentCount_ = {},
|
|
const VULKAN_HPP_NAMESPACE::AttachmentReference2 * pColorAttachments_ = {},
|
|
const VULKAN_HPP_NAMESPACE::AttachmentReference2 * pResolveAttachments_ = {},
|
|
const VULKAN_HPP_NAMESPACE::AttachmentReference2 * pDepthStencilAttachment_ = {},
|
|
uint32_t preserveAttachmentCount_ = {},
|
|
const uint32_t * pPreserveAttachments_ = {},
|
|
const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
|
|
: pNext( pNext_ )
|
|
, flags( flags_ )
|
|
, pipelineBindPoint( pipelineBindPoint_ )
|
|
, viewMask( viewMask_ )
|
|
, inputAttachmentCount( inputAttachmentCount_ )
|
|
, pInputAttachments( pInputAttachments_ )
|
|
, colorAttachmentCount( colorAttachmentCount_ )
|
|
, pColorAttachments( pColorAttachments_ )
|
|
, pResolveAttachments( pResolveAttachments_ )
|
|
, pDepthStencilAttachment( pDepthStencilAttachment_ )
|
|
, preserveAttachmentCount( preserveAttachmentCount_ )
|
|
, pPreserveAttachments( pPreserveAttachments_ )
|
|
{
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR SubpassDescription2( SubpassDescription2 const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
SubpassDescription2( VkSubpassDescription2 const & rhs ) VULKAN_HPP_NOEXCEPT : SubpassDescription2( *reinterpret_cast<SubpassDescription2 const *>( &rhs ) )
|
|
{
|
|
}
|
|
|
|
# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
|
|
SubpassDescription2( VULKAN_HPP_NAMESPACE::SubpassDescriptionFlags flags_,
|
|
VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint_,
|
|
uint32_t viewMask_,
|
|
VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::AttachmentReference2> const & inputAttachments_,
|
|
VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::AttachmentReference2> const & colorAttachments_ = {},
|
|
VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::AttachmentReference2> const & resolveAttachments_ = {},
|
|
const VULKAN_HPP_NAMESPACE::AttachmentReference2 * pDepthStencilAttachment_ = {},
|
|
VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const uint32_t> const & preserveAttachments_ = {},
|
|
const void * pNext_ = nullptr )
|
|
: pNext( pNext_ )
|
|
, flags( flags_ )
|
|
, pipelineBindPoint( pipelineBindPoint_ )
|
|
, viewMask( viewMask_ )
|
|
, inputAttachmentCount( static_cast<uint32_t>( inputAttachments_.size() ) )
|
|
, pInputAttachments( inputAttachments_.data() )
|
|
, colorAttachmentCount( static_cast<uint32_t>( colorAttachments_.size() ) )
|
|
, pColorAttachments( colorAttachments_.data() )
|
|
, pResolveAttachments( resolveAttachments_.data() )
|
|
, pDepthStencilAttachment( pDepthStencilAttachment_ )
|
|
, preserveAttachmentCount( static_cast<uint32_t>( preserveAttachments_.size() ) )
|
|
, pPreserveAttachments( preserveAttachments_.data() )
|
|
{
|
|
# ifdef VULKAN_HPP_NO_EXCEPTIONS
|
|
VULKAN_HPP_ASSERT( resolveAttachments_.empty() || ( colorAttachments_.size() == resolveAttachments_.size() ) );
|
|
# else
|
|
if ( !resolveAttachments_.empty() && ( colorAttachments_.size() != resolveAttachments_.size() ) )
|
|
{
|
|
throw LogicError(
|
|
VULKAN_HPP_NAMESPACE_STRING
|
|
"::SubpassDescription2::SubpassDescription2: !resolveAttachments_.empty() && ( colorAttachments_.size() != resolveAttachments_.size() )" );
|
|
}
|
|
# endif /*VULKAN_HPP_NO_EXCEPTIONS*/
|
|
}
|
|
# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
|
|
|
|
SubpassDescription2 & operator=( SubpassDescription2 const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
SubpassDescription2 & operator=( VkSubpassDescription2 const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::SubpassDescription2 const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
|
|
VULKAN_HPP_CONSTEXPR_14 SubpassDescription2 & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pNext = pNext_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 SubpassDescription2 & setFlags( VULKAN_HPP_NAMESPACE::SubpassDescriptionFlags flags_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
flags = flags_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 SubpassDescription2 & setPipelineBindPoint( VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pipelineBindPoint = pipelineBindPoint_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 SubpassDescription2 & setViewMask( uint32_t viewMask_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
viewMask = viewMask_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 SubpassDescription2 & setInputAttachmentCount( uint32_t inputAttachmentCount_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
inputAttachmentCount = inputAttachmentCount_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 SubpassDescription2 &
|
|
setPInputAttachments( const VULKAN_HPP_NAMESPACE::AttachmentReference2 * pInputAttachments_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pInputAttachments = pInputAttachments_;
|
|
return *this;
|
|
}
|
|
|
|
# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
|
|
SubpassDescription2 & setInputAttachments(
|
|
VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::AttachmentReference2> const & inputAttachments_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
inputAttachmentCount = static_cast<uint32_t>( inputAttachments_.size() );
|
|
pInputAttachments = inputAttachments_.data();
|
|
return *this;
|
|
}
|
|
# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 SubpassDescription2 & setColorAttachmentCount( uint32_t colorAttachmentCount_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
colorAttachmentCount = colorAttachmentCount_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 SubpassDescription2 &
|
|
setPColorAttachments( const VULKAN_HPP_NAMESPACE::AttachmentReference2 * pColorAttachments_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pColorAttachments = pColorAttachments_;
|
|
return *this;
|
|
}
|
|
|
|
# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
|
|
SubpassDescription2 & setColorAttachments(
|
|
VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::AttachmentReference2> const & colorAttachments_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
colorAttachmentCount = static_cast<uint32_t>( colorAttachments_.size() );
|
|
pColorAttachments = colorAttachments_.data();
|
|
return *this;
|
|
}
|
|
# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 SubpassDescription2 &
|
|
setPResolveAttachments( const VULKAN_HPP_NAMESPACE::AttachmentReference2 * pResolveAttachments_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pResolveAttachments = pResolveAttachments_;
|
|
return *this;
|
|
}
|
|
|
|
# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
|
|
SubpassDescription2 & setResolveAttachments(
|
|
VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::AttachmentReference2> const & resolveAttachments_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
colorAttachmentCount = static_cast<uint32_t>( resolveAttachments_.size() );
|
|
pResolveAttachments = resolveAttachments_.data();
|
|
return *this;
|
|
}
|
|
# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 SubpassDescription2 &
|
|
setPDepthStencilAttachment( const VULKAN_HPP_NAMESPACE::AttachmentReference2 * pDepthStencilAttachment_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pDepthStencilAttachment = pDepthStencilAttachment_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 SubpassDescription2 & setPreserveAttachmentCount( uint32_t preserveAttachmentCount_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
preserveAttachmentCount = preserveAttachmentCount_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 SubpassDescription2 & setPPreserveAttachments( const uint32_t * pPreserveAttachments_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pPreserveAttachments = pPreserveAttachments_;
|
|
return *this;
|
|
}
|
|
|
|
# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
|
|
SubpassDescription2 &
|
|
setPreserveAttachments( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const uint32_t> const & preserveAttachments_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
preserveAttachmentCount = static_cast<uint32_t>( preserveAttachments_.size() );
|
|
pPreserveAttachments = preserveAttachments_.data();
|
|
return *this;
|
|
}
|
|
# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
|
|
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
|
|
|
|
operator VkSubpassDescription2 const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkSubpassDescription2 *>( this );
|
|
}
|
|
|
|
operator VkSubpassDescription2 &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkSubpassDescription2 *>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
# if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
# else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &,
|
|
const void * const &,
|
|
VULKAN_HPP_NAMESPACE::SubpassDescriptionFlags const &,
|
|
VULKAN_HPP_NAMESPACE::PipelineBindPoint const &,
|
|
uint32_t const &,
|
|
uint32_t const &,
|
|
const VULKAN_HPP_NAMESPACE::AttachmentReference2 * const &,
|
|
uint32_t const &,
|
|
const VULKAN_HPP_NAMESPACE::AttachmentReference2 * const &,
|
|
const VULKAN_HPP_NAMESPACE::AttachmentReference2 * const &,
|
|
const VULKAN_HPP_NAMESPACE::AttachmentReference2 * const &,
|
|
uint32_t const &,
|
|
const uint32_t * const &>
|
|
# endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( sType,
|
|
pNext,
|
|
flags,
|
|
pipelineBindPoint,
|
|
viewMask,
|
|
inputAttachmentCount,
|
|
pInputAttachments,
|
|
colorAttachmentCount,
|
|
pColorAttachments,
|
|
pResolveAttachments,
|
|
pDepthStencilAttachment,
|
|
preserveAttachmentCount,
|
|
pPreserveAttachments );
|
|
}
|
|
#endif
|
|
|
|
#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
|
|
auto operator<=>( SubpassDescription2 const & ) const = default;
|
|
#else
|
|
bool operator==( SubpassDescription2 const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
# if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
# else
|
|
return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && ( pipelineBindPoint == rhs.pipelineBindPoint ) &&
|
|
( viewMask == rhs.viewMask ) && ( inputAttachmentCount == rhs.inputAttachmentCount ) && ( pInputAttachments == rhs.pInputAttachments ) &&
|
|
( colorAttachmentCount == rhs.colorAttachmentCount ) && ( pColorAttachments == rhs.pColorAttachments ) &&
|
|
( pResolveAttachments == rhs.pResolveAttachments ) && ( pDepthStencilAttachment == rhs.pDepthStencilAttachment ) &&
|
|
( preserveAttachmentCount == rhs.preserveAttachmentCount ) && ( pPreserveAttachments == rhs.pPreserveAttachments );
|
|
# endif
|
|
}
|
|
|
|
bool operator!=( SubpassDescription2 const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eSubpassDescription2;
|
|
const void * pNext = {};
|
|
VULKAN_HPP_NAMESPACE::SubpassDescriptionFlags flags = {};
|
|
VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint = VULKAN_HPP_NAMESPACE::PipelineBindPoint::eGraphics;
|
|
uint32_t viewMask = {};
|
|
uint32_t inputAttachmentCount = {};
|
|
const VULKAN_HPP_NAMESPACE::AttachmentReference2 * pInputAttachments = {};
|
|
uint32_t colorAttachmentCount = {};
|
|
const VULKAN_HPP_NAMESPACE::AttachmentReference2 * pColorAttachments = {};
|
|
const VULKAN_HPP_NAMESPACE::AttachmentReference2 * pResolveAttachments = {};
|
|
const VULKAN_HPP_NAMESPACE::AttachmentReference2 * pDepthStencilAttachment = {};
|
|
uint32_t preserveAttachmentCount = {};
|
|
const uint32_t * pPreserveAttachments = {};
|
|
};
|
|
|
|
template <>
|
|
struct CppType<StructureType, StructureType::eSubpassDescription2>
|
|
{
|
|
using Type = SubpassDescription2;
|
|
};
|
|
|
|
using SubpassDescription2KHR = SubpassDescription2;
|
|
|
|
struct SubpassDependency2
|
|
{
|
|
using NativeType = VkSubpassDependency2;
|
|
|
|
static const bool allowDuplicate = false;
|
|
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eSubpassDependency2;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR SubpassDependency2( uint32_t srcSubpass_ = {},
|
|
uint32_t dstSubpass_ = {},
|
|
VULKAN_HPP_NAMESPACE::PipelineStageFlags srcStageMask_ = {},
|
|
VULKAN_HPP_NAMESPACE::PipelineStageFlags dstStageMask_ = {},
|
|
VULKAN_HPP_NAMESPACE::AccessFlags srcAccessMask_ = {},
|
|
VULKAN_HPP_NAMESPACE::AccessFlags dstAccessMask_ = {},
|
|
VULKAN_HPP_NAMESPACE::DependencyFlags dependencyFlags_ = {},
|
|
int32_t viewOffset_ = {},
|
|
const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
|
|
: pNext( pNext_ )
|
|
, srcSubpass( srcSubpass_ )
|
|
, dstSubpass( dstSubpass_ )
|
|
, srcStageMask( srcStageMask_ )
|
|
, dstStageMask( dstStageMask_ )
|
|
, srcAccessMask( srcAccessMask_ )
|
|
, dstAccessMask( dstAccessMask_ )
|
|
, dependencyFlags( dependencyFlags_ )
|
|
, viewOffset( viewOffset_ )
|
|
{
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR SubpassDependency2( SubpassDependency2 const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
SubpassDependency2( VkSubpassDependency2 const & rhs ) VULKAN_HPP_NOEXCEPT : SubpassDependency2( *reinterpret_cast<SubpassDependency2 const *>( &rhs ) ) {}
|
|
|
|
SubpassDependency2 & operator=( SubpassDependency2 const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
SubpassDependency2 & operator=( VkSubpassDependency2 const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::SubpassDependency2 const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
|
|
VULKAN_HPP_CONSTEXPR_14 SubpassDependency2 & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pNext = pNext_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 SubpassDependency2 & setSrcSubpass( uint32_t srcSubpass_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
srcSubpass = srcSubpass_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 SubpassDependency2 & setDstSubpass( uint32_t dstSubpass_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
dstSubpass = dstSubpass_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 SubpassDependency2 & setSrcStageMask( VULKAN_HPP_NAMESPACE::PipelineStageFlags srcStageMask_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
srcStageMask = srcStageMask_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 SubpassDependency2 & setDstStageMask( VULKAN_HPP_NAMESPACE::PipelineStageFlags dstStageMask_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
dstStageMask = dstStageMask_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 SubpassDependency2 & setSrcAccessMask( VULKAN_HPP_NAMESPACE::AccessFlags srcAccessMask_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
srcAccessMask = srcAccessMask_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 SubpassDependency2 & setDstAccessMask( VULKAN_HPP_NAMESPACE::AccessFlags dstAccessMask_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
dstAccessMask = dstAccessMask_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 SubpassDependency2 & setDependencyFlags( VULKAN_HPP_NAMESPACE::DependencyFlags dependencyFlags_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
dependencyFlags = dependencyFlags_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 SubpassDependency2 & setViewOffset( int32_t viewOffset_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
viewOffset = viewOffset_;
|
|
return *this;
|
|
}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
|
|
|
|
operator VkSubpassDependency2 const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkSubpassDependency2 *>( this );
|
|
}
|
|
|
|
operator VkSubpassDependency2 &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkSubpassDependency2 *>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
# if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
# else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &,
|
|
const void * const &,
|
|
uint32_t const &,
|
|
uint32_t const &,
|
|
VULKAN_HPP_NAMESPACE::PipelineStageFlags const &,
|
|
VULKAN_HPP_NAMESPACE::PipelineStageFlags const &,
|
|
VULKAN_HPP_NAMESPACE::AccessFlags const &,
|
|
VULKAN_HPP_NAMESPACE::AccessFlags const &,
|
|
VULKAN_HPP_NAMESPACE::DependencyFlags const &,
|
|
int32_t const &>
|
|
# endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( sType, pNext, srcSubpass, dstSubpass, srcStageMask, dstStageMask, srcAccessMask, dstAccessMask, dependencyFlags, viewOffset );
|
|
}
|
|
#endif
|
|
|
|
#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
|
|
auto operator<=>( SubpassDependency2 const & ) const = default;
|
|
#else
|
|
bool operator==( SubpassDependency2 const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
# if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
# else
|
|
return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( srcSubpass == rhs.srcSubpass ) && ( dstSubpass == rhs.dstSubpass ) &&
|
|
( srcStageMask == rhs.srcStageMask ) && ( dstStageMask == rhs.dstStageMask ) && ( srcAccessMask == rhs.srcAccessMask ) &&
|
|
( dstAccessMask == rhs.dstAccessMask ) && ( dependencyFlags == rhs.dependencyFlags ) && ( viewOffset == rhs.viewOffset );
|
|
# endif
|
|
}
|
|
|
|
bool operator!=( SubpassDependency2 const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eSubpassDependency2;
|
|
const void * pNext = {};
|
|
uint32_t srcSubpass = {};
|
|
uint32_t dstSubpass = {};
|
|
VULKAN_HPP_NAMESPACE::PipelineStageFlags srcStageMask = {};
|
|
VULKAN_HPP_NAMESPACE::PipelineStageFlags dstStageMask = {};
|
|
VULKAN_HPP_NAMESPACE::AccessFlags srcAccessMask = {};
|
|
VULKAN_HPP_NAMESPACE::AccessFlags dstAccessMask = {};
|
|
VULKAN_HPP_NAMESPACE::DependencyFlags dependencyFlags = {};
|
|
int32_t viewOffset = {};
|
|
};
|
|
|
|
template <>
|
|
struct CppType<StructureType, StructureType::eSubpassDependency2>
|
|
{
|
|
using Type = SubpassDependency2;
|
|
};
|
|
|
|
using SubpassDependency2KHR = SubpassDependency2;
|
|
|
|
struct RenderPassCreateInfo2
|
|
{
|
|
using NativeType = VkRenderPassCreateInfo2;
|
|
|
|
static const bool allowDuplicate = false;
|
|
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eRenderPassCreateInfo2;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR RenderPassCreateInfo2( VULKAN_HPP_NAMESPACE::RenderPassCreateFlags flags_ = {},
|
|
uint32_t attachmentCount_ = {},
|
|
const VULKAN_HPP_NAMESPACE::AttachmentDescription2 * pAttachments_ = {},
|
|
uint32_t subpassCount_ = {},
|
|
const VULKAN_HPP_NAMESPACE::SubpassDescription2 * pSubpasses_ = {},
|
|
uint32_t dependencyCount_ = {},
|
|
const VULKAN_HPP_NAMESPACE::SubpassDependency2 * pDependencies_ = {},
|
|
uint32_t correlatedViewMaskCount_ = {},
|
|
const uint32_t * pCorrelatedViewMasks_ = {},
|
|
const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
|
|
: pNext( pNext_ )
|
|
, flags( flags_ )
|
|
, attachmentCount( attachmentCount_ )
|
|
, pAttachments( pAttachments_ )
|
|
, subpassCount( subpassCount_ )
|
|
, pSubpasses( pSubpasses_ )
|
|
, dependencyCount( dependencyCount_ )
|
|
, pDependencies( pDependencies_ )
|
|
, correlatedViewMaskCount( correlatedViewMaskCount_ )
|
|
, pCorrelatedViewMasks( pCorrelatedViewMasks_ )
|
|
{
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR RenderPassCreateInfo2( RenderPassCreateInfo2 const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
RenderPassCreateInfo2( VkRenderPassCreateInfo2 const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: RenderPassCreateInfo2( *reinterpret_cast<RenderPassCreateInfo2 const *>( &rhs ) )
|
|
{
|
|
}
|
|
|
|
# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
|
|
RenderPassCreateInfo2( VULKAN_HPP_NAMESPACE::RenderPassCreateFlags flags_,
|
|
VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::AttachmentDescription2> const & attachments_,
|
|
VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::SubpassDescription2> const & subpasses_ = {},
|
|
VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::SubpassDependency2> const & dependencies_ = {},
|
|
VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const uint32_t> const & correlatedViewMasks_ = {},
|
|
const void * pNext_ = nullptr )
|
|
: pNext( pNext_ )
|
|
, flags( flags_ )
|
|
, attachmentCount( static_cast<uint32_t>( attachments_.size() ) )
|
|
, pAttachments( attachments_.data() )
|
|
, subpassCount( static_cast<uint32_t>( subpasses_.size() ) )
|
|
, pSubpasses( subpasses_.data() )
|
|
, dependencyCount( static_cast<uint32_t>( dependencies_.size() ) )
|
|
, pDependencies( dependencies_.data() )
|
|
, correlatedViewMaskCount( static_cast<uint32_t>( correlatedViewMasks_.size() ) )
|
|
, pCorrelatedViewMasks( correlatedViewMasks_.data() )
|
|
{
|
|
}
|
|
# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
|
|
|
|
RenderPassCreateInfo2 & operator=( RenderPassCreateInfo2 const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
RenderPassCreateInfo2 & operator=( VkRenderPassCreateInfo2 const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::RenderPassCreateInfo2 const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
|
|
VULKAN_HPP_CONSTEXPR_14 RenderPassCreateInfo2 & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pNext = pNext_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 RenderPassCreateInfo2 & setFlags( VULKAN_HPP_NAMESPACE::RenderPassCreateFlags flags_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
flags = flags_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 RenderPassCreateInfo2 & setAttachmentCount( uint32_t attachmentCount_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
attachmentCount = attachmentCount_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 RenderPassCreateInfo2 & setPAttachments( const VULKAN_HPP_NAMESPACE::AttachmentDescription2 * pAttachments_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pAttachments = pAttachments_;
|
|
return *this;
|
|
}
|
|
|
|
# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
|
|
RenderPassCreateInfo2 & setAttachments(
|
|
VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::AttachmentDescription2> const & attachments_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
attachmentCount = static_cast<uint32_t>( attachments_.size() );
|
|
pAttachments = attachments_.data();
|
|
return *this;
|
|
}
|
|
# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 RenderPassCreateInfo2 & setSubpassCount( uint32_t subpassCount_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
subpassCount = subpassCount_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 RenderPassCreateInfo2 & setPSubpasses( const VULKAN_HPP_NAMESPACE::SubpassDescription2 * pSubpasses_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pSubpasses = pSubpasses_;
|
|
return *this;
|
|
}
|
|
|
|
# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
|
|
RenderPassCreateInfo2 &
|
|
setSubpasses( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::SubpassDescription2> const & subpasses_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
subpassCount = static_cast<uint32_t>( subpasses_.size() );
|
|
pSubpasses = subpasses_.data();
|
|
return *this;
|
|
}
|
|
# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 RenderPassCreateInfo2 & setDependencyCount( uint32_t dependencyCount_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
dependencyCount = dependencyCount_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 RenderPassCreateInfo2 & setPDependencies( const VULKAN_HPP_NAMESPACE::SubpassDependency2 * pDependencies_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pDependencies = pDependencies_;
|
|
return *this;
|
|
}
|
|
|
|
# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
|
|
RenderPassCreateInfo2 &
|
|
setDependencies( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::SubpassDependency2> const & dependencies_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
dependencyCount = static_cast<uint32_t>( dependencies_.size() );
|
|
pDependencies = dependencies_.data();
|
|
return *this;
|
|
}
|
|
# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 RenderPassCreateInfo2 & setCorrelatedViewMaskCount( uint32_t correlatedViewMaskCount_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
correlatedViewMaskCount = correlatedViewMaskCount_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 RenderPassCreateInfo2 & setPCorrelatedViewMasks( const uint32_t * pCorrelatedViewMasks_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pCorrelatedViewMasks = pCorrelatedViewMasks_;
|
|
return *this;
|
|
}
|
|
|
|
# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
|
|
RenderPassCreateInfo2 &
|
|
setCorrelatedViewMasks( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const uint32_t> const & correlatedViewMasks_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
correlatedViewMaskCount = static_cast<uint32_t>( correlatedViewMasks_.size() );
|
|
pCorrelatedViewMasks = correlatedViewMasks_.data();
|
|
return *this;
|
|
}
|
|
# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
|
|
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
|
|
|
|
operator VkRenderPassCreateInfo2 const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkRenderPassCreateInfo2 *>( this );
|
|
}
|
|
|
|
operator VkRenderPassCreateInfo2 &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkRenderPassCreateInfo2 *>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
# if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
# else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &,
|
|
const void * const &,
|
|
VULKAN_HPP_NAMESPACE::RenderPassCreateFlags const &,
|
|
uint32_t const &,
|
|
const VULKAN_HPP_NAMESPACE::AttachmentDescription2 * const &,
|
|
uint32_t const &,
|
|
const VULKAN_HPP_NAMESPACE::SubpassDescription2 * const &,
|
|
uint32_t const &,
|
|
const VULKAN_HPP_NAMESPACE::SubpassDependency2 * const &,
|
|
uint32_t const &,
|
|
const uint32_t * const &>
|
|
# endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( sType,
|
|
pNext,
|
|
flags,
|
|
attachmentCount,
|
|
pAttachments,
|
|
subpassCount,
|
|
pSubpasses,
|
|
dependencyCount,
|
|
pDependencies,
|
|
correlatedViewMaskCount,
|
|
pCorrelatedViewMasks );
|
|
}
|
|
#endif
|
|
|
|
#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
|
|
auto operator<=>( RenderPassCreateInfo2 const & ) const = default;
|
|
#else
|
|
bool operator==( RenderPassCreateInfo2 const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
# if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
# else
|
|
return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && ( attachmentCount == rhs.attachmentCount ) &&
|
|
( pAttachments == rhs.pAttachments ) && ( subpassCount == rhs.subpassCount ) && ( pSubpasses == rhs.pSubpasses ) &&
|
|
( dependencyCount == rhs.dependencyCount ) && ( pDependencies == rhs.pDependencies ) &&
|
|
( correlatedViewMaskCount == rhs.correlatedViewMaskCount ) && ( pCorrelatedViewMasks == rhs.pCorrelatedViewMasks );
|
|
# endif
|
|
}
|
|
|
|
bool operator!=( RenderPassCreateInfo2 const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eRenderPassCreateInfo2;
|
|
const void * pNext = {};
|
|
VULKAN_HPP_NAMESPACE::RenderPassCreateFlags flags = {};
|
|
uint32_t attachmentCount = {};
|
|
const VULKAN_HPP_NAMESPACE::AttachmentDescription2 * pAttachments = {};
|
|
uint32_t subpassCount = {};
|
|
const VULKAN_HPP_NAMESPACE::SubpassDescription2 * pSubpasses = {};
|
|
uint32_t dependencyCount = {};
|
|
const VULKAN_HPP_NAMESPACE::SubpassDependency2 * pDependencies = {};
|
|
uint32_t correlatedViewMaskCount = {};
|
|
const uint32_t * pCorrelatedViewMasks = {};
|
|
};
|
|
|
|
template <>
|
|
struct CppType<StructureType, StructureType::eRenderPassCreateInfo2>
|
|
{
|
|
using Type = RenderPassCreateInfo2;
|
|
};
|
|
|
|
using RenderPassCreateInfo2KHR = RenderPassCreateInfo2;
|
|
|
|
struct RenderPassInputAttachmentAspectCreateInfo
|
|
{
|
|
using NativeType = VkRenderPassInputAttachmentAspectCreateInfo;
|
|
|
|
static const bool allowDuplicate = false;
|
|
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eRenderPassInputAttachmentAspectCreateInfo;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR RenderPassInputAttachmentAspectCreateInfo( uint32_t aspectReferenceCount_ = {},
|
|
const VULKAN_HPP_NAMESPACE::InputAttachmentAspectReference * pAspectReferences_ = {},
|
|
const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
|
|
: pNext( pNext_ )
|
|
, aspectReferenceCount( aspectReferenceCount_ )
|
|
, pAspectReferences( pAspectReferences_ )
|
|
{
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR RenderPassInputAttachmentAspectCreateInfo( RenderPassInputAttachmentAspectCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
RenderPassInputAttachmentAspectCreateInfo( VkRenderPassInputAttachmentAspectCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: RenderPassInputAttachmentAspectCreateInfo( *reinterpret_cast<RenderPassInputAttachmentAspectCreateInfo const *>( &rhs ) )
|
|
{
|
|
}
|
|
|
|
# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
|
|
RenderPassInputAttachmentAspectCreateInfo(
|
|
VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::InputAttachmentAspectReference> const & aspectReferences_,
|
|
const void * pNext_ = nullptr )
|
|
: pNext( pNext_ ), aspectReferenceCount( static_cast<uint32_t>( aspectReferences_.size() ) ), pAspectReferences( aspectReferences_.data() )
|
|
{
|
|
}
|
|
# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
|
|
|
|
RenderPassInputAttachmentAspectCreateInfo & operator=( RenderPassInputAttachmentAspectCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
RenderPassInputAttachmentAspectCreateInfo & operator=( VkRenderPassInputAttachmentAspectCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::RenderPassInputAttachmentAspectCreateInfo const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
|
|
VULKAN_HPP_CONSTEXPR_14 RenderPassInputAttachmentAspectCreateInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pNext = pNext_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 RenderPassInputAttachmentAspectCreateInfo & setAspectReferenceCount( uint32_t aspectReferenceCount_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
aspectReferenceCount = aspectReferenceCount_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 RenderPassInputAttachmentAspectCreateInfo &
|
|
setPAspectReferences( const VULKAN_HPP_NAMESPACE::InputAttachmentAspectReference * pAspectReferences_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pAspectReferences = pAspectReferences_;
|
|
return *this;
|
|
}
|
|
|
|
# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
|
|
RenderPassInputAttachmentAspectCreateInfo & setAspectReferences(
|
|
VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::InputAttachmentAspectReference> const & aspectReferences_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
aspectReferenceCount = static_cast<uint32_t>( aspectReferences_.size() );
|
|
pAspectReferences = aspectReferences_.data();
|
|
return *this;
|
|
}
|
|
# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
|
|
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
|
|
|
|
operator VkRenderPassInputAttachmentAspectCreateInfo const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkRenderPassInputAttachmentAspectCreateInfo *>( this );
|
|
}
|
|
|
|
operator VkRenderPassInputAttachmentAspectCreateInfo &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkRenderPassInputAttachmentAspectCreateInfo *>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
# if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
# else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &,
|
|
const void * const &,
|
|
uint32_t const &,
|
|
const VULKAN_HPP_NAMESPACE::InputAttachmentAspectReference * const &>
|
|
# endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( sType, pNext, aspectReferenceCount, pAspectReferences );
|
|
}
|
|
#endif
|
|
|
|
#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
|
|
auto operator<=>( RenderPassInputAttachmentAspectCreateInfo const & ) const = default;
|
|
#else
|
|
bool operator==( RenderPassInputAttachmentAspectCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
# if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
# else
|
|
return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( aspectReferenceCount == rhs.aspectReferenceCount ) &&
|
|
( pAspectReferences == rhs.pAspectReferences );
|
|
# endif
|
|
}
|
|
|
|
bool operator!=( RenderPassInputAttachmentAspectCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eRenderPassInputAttachmentAspectCreateInfo;
|
|
const void * pNext = {};
|
|
uint32_t aspectReferenceCount = {};
|
|
const VULKAN_HPP_NAMESPACE::InputAttachmentAspectReference * pAspectReferences = {};
|
|
};
|
|
|
|
template <>
|
|
struct CppType<StructureType, StructureType::eRenderPassInputAttachmentAspectCreateInfo>
|
|
{
|
|
using Type = RenderPassInputAttachmentAspectCreateInfo;
|
|
};
|
|
|
|
using RenderPassInputAttachmentAspectCreateInfoKHR = RenderPassInputAttachmentAspectCreateInfo;
|
|
|
|
struct RenderPassMultiviewCreateInfo
|
|
{
|
|
using NativeType = VkRenderPassMultiviewCreateInfo;
|
|
|
|
static const bool allowDuplicate = false;
|
|
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eRenderPassMultiviewCreateInfo;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR RenderPassMultiviewCreateInfo( uint32_t subpassCount_ = {},
|
|
const uint32_t * pViewMasks_ = {},
|
|
uint32_t dependencyCount_ = {},
|
|
const int32_t * pViewOffsets_ = {},
|
|
uint32_t correlationMaskCount_ = {},
|
|
const uint32_t * pCorrelationMasks_ = {},
|
|
const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
|
|
: pNext( pNext_ )
|
|
, subpassCount( subpassCount_ )
|
|
, pViewMasks( pViewMasks_ )
|
|
, dependencyCount( dependencyCount_ )
|
|
, pViewOffsets( pViewOffsets_ )
|
|
, correlationMaskCount( correlationMaskCount_ )
|
|
, pCorrelationMasks( pCorrelationMasks_ )
|
|
{
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR RenderPassMultiviewCreateInfo( RenderPassMultiviewCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
RenderPassMultiviewCreateInfo( VkRenderPassMultiviewCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: RenderPassMultiviewCreateInfo( *reinterpret_cast<RenderPassMultiviewCreateInfo const *>( &rhs ) )
|
|
{
|
|
}
|
|
|
|
# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
|
|
RenderPassMultiviewCreateInfo( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const uint32_t> const & viewMasks_,
|
|
VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const int32_t> const & viewOffsets_ = {},
|
|
VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const uint32_t> const & correlationMasks_ = {},
|
|
const void * pNext_ = nullptr )
|
|
: pNext( pNext_ )
|
|
, subpassCount( static_cast<uint32_t>( viewMasks_.size() ) )
|
|
, pViewMasks( viewMasks_.data() )
|
|
, dependencyCount( static_cast<uint32_t>( viewOffsets_.size() ) )
|
|
, pViewOffsets( viewOffsets_.data() )
|
|
, correlationMaskCount( static_cast<uint32_t>( correlationMasks_.size() ) )
|
|
, pCorrelationMasks( correlationMasks_.data() )
|
|
{
|
|
}
|
|
# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
|
|
|
|
RenderPassMultiviewCreateInfo & operator=( RenderPassMultiviewCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
RenderPassMultiviewCreateInfo & operator=( VkRenderPassMultiviewCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::RenderPassMultiviewCreateInfo const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
|
|
VULKAN_HPP_CONSTEXPR_14 RenderPassMultiviewCreateInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pNext = pNext_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 RenderPassMultiviewCreateInfo & setSubpassCount( uint32_t subpassCount_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
subpassCount = subpassCount_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 RenderPassMultiviewCreateInfo & setPViewMasks( const uint32_t * pViewMasks_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pViewMasks = pViewMasks_;
|
|
return *this;
|
|
}
|
|
|
|
# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
|
|
RenderPassMultiviewCreateInfo & setViewMasks( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const uint32_t> const & viewMasks_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
subpassCount = static_cast<uint32_t>( viewMasks_.size() );
|
|
pViewMasks = viewMasks_.data();
|
|
return *this;
|
|
}
|
|
# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 RenderPassMultiviewCreateInfo & setDependencyCount( uint32_t dependencyCount_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
dependencyCount = dependencyCount_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 RenderPassMultiviewCreateInfo & setPViewOffsets( const int32_t * pViewOffsets_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pViewOffsets = pViewOffsets_;
|
|
return *this;
|
|
}
|
|
|
|
# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
|
|
RenderPassMultiviewCreateInfo & setViewOffsets( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const int32_t> const & viewOffsets_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
dependencyCount = static_cast<uint32_t>( viewOffsets_.size() );
|
|
pViewOffsets = viewOffsets_.data();
|
|
return *this;
|
|
}
|
|
# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 RenderPassMultiviewCreateInfo & setCorrelationMaskCount( uint32_t correlationMaskCount_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
correlationMaskCount = correlationMaskCount_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 RenderPassMultiviewCreateInfo & setPCorrelationMasks( const uint32_t * pCorrelationMasks_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pCorrelationMasks = pCorrelationMasks_;
|
|
return *this;
|
|
}
|
|
|
|
# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
|
|
RenderPassMultiviewCreateInfo &
|
|
setCorrelationMasks( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const uint32_t> const & correlationMasks_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
correlationMaskCount = static_cast<uint32_t>( correlationMasks_.size() );
|
|
pCorrelationMasks = correlationMasks_.data();
|
|
return *this;
|
|
}
|
|
# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
|
|
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
|
|
|
|
operator VkRenderPassMultiviewCreateInfo const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkRenderPassMultiviewCreateInfo *>( this );
|
|
}
|
|
|
|
operator VkRenderPassMultiviewCreateInfo &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkRenderPassMultiviewCreateInfo *>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
# if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
# else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &,
|
|
const void * const &,
|
|
uint32_t const &,
|
|
const uint32_t * const &,
|
|
uint32_t const &,
|
|
const int32_t * const &,
|
|
uint32_t const &,
|
|
const uint32_t * const &>
|
|
# endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( sType, pNext, subpassCount, pViewMasks, dependencyCount, pViewOffsets, correlationMaskCount, pCorrelationMasks );
|
|
}
|
|
#endif
|
|
|
|
#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
|
|
auto operator<=>( RenderPassMultiviewCreateInfo const & ) const = default;
|
|
#else
|
|
bool operator==( RenderPassMultiviewCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
# if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
# else
|
|
return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( subpassCount == rhs.subpassCount ) && ( pViewMasks == rhs.pViewMasks ) &&
|
|
( dependencyCount == rhs.dependencyCount ) && ( pViewOffsets == rhs.pViewOffsets ) && ( correlationMaskCount == rhs.correlationMaskCount ) &&
|
|
( pCorrelationMasks == rhs.pCorrelationMasks );
|
|
# endif
|
|
}
|
|
|
|
bool operator!=( RenderPassMultiviewCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eRenderPassMultiviewCreateInfo;
|
|
const void * pNext = {};
|
|
uint32_t subpassCount = {};
|
|
const uint32_t * pViewMasks = {};
|
|
uint32_t dependencyCount = {};
|
|
const int32_t * pViewOffsets = {};
|
|
uint32_t correlationMaskCount = {};
|
|
const uint32_t * pCorrelationMasks = {};
|
|
};
|
|
|
|
template <>
|
|
struct CppType<StructureType, StructureType::eRenderPassMultiviewCreateInfo>
|
|
{
|
|
using Type = RenderPassMultiviewCreateInfo;
|
|
};
|
|
|
|
using RenderPassMultiviewCreateInfoKHR = RenderPassMultiviewCreateInfo;
|
|
|
|
struct SubpassSampleLocationsEXT
|
|
{
|
|
using NativeType = VkSubpassSampleLocationsEXT;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR SubpassSampleLocationsEXT( uint32_t subpassIndex_ = {},
|
|
VULKAN_HPP_NAMESPACE::SampleLocationsInfoEXT sampleLocationsInfo_ = {} ) VULKAN_HPP_NOEXCEPT
|
|
: subpassIndex( subpassIndex_ )
|
|
, sampleLocationsInfo( sampleLocationsInfo_ )
|
|
{
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR SubpassSampleLocationsEXT( SubpassSampleLocationsEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
SubpassSampleLocationsEXT( VkSubpassSampleLocationsEXT const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: SubpassSampleLocationsEXT( *reinterpret_cast<SubpassSampleLocationsEXT const *>( &rhs ) )
|
|
{
|
|
}
|
|
|
|
SubpassSampleLocationsEXT & operator=( SubpassSampleLocationsEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
SubpassSampleLocationsEXT & operator=( VkSubpassSampleLocationsEXT const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::SubpassSampleLocationsEXT const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
|
|
VULKAN_HPP_CONSTEXPR_14 SubpassSampleLocationsEXT & setSubpassIndex( uint32_t subpassIndex_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
subpassIndex = subpassIndex_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 SubpassSampleLocationsEXT &
|
|
setSampleLocationsInfo( VULKAN_HPP_NAMESPACE::SampleLocationsInfoEXT const & sampleLocationsInfo_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
sampleLocationsInfo = sampleLocationsInfo_;
|
|
return *this;
|
|
}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
|
|
|
|
operator VkSubpassSampleLocationsEXT const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkSubpassSampleLocationsEXT *>( this );
|
|
}
|
|
|
|
operator VkSubpassSampleLocationsEXT &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkSubpassSampleLocationsEXT *>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
# if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
# else
|
|
std::tuple<uint32_t const &, VULKAN_HPP_NAMESPACE::SampleLocationsInfoEXT const &>
|
|
# endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( subpassIndex, sampleLocationsInfo );
|
|
}
|
|
#endif
|
|
|
|
#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
|
|
auto operator<=>( SubpassSampleLocationsEXT const & ) const = default;
|
|
#else
|
|
bool operator==( SubpassSampleLocationsEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
# if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
# else
|
|
return ( subpassIndex == rhs.subpassIndex ) && ( sampleLocationsInfo == rhs.sampleLocationsInfo );
|
|
# endif
|
|
}
|
|
|
|
bool operator!=( SubpassSampleLocationsEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
uint32_t subpassIndex = {};
|
|
VULKAN_HPP_NAMESPACE::SampleLocationsInfoEXT sampleLocationsInfo = {};
|
|
};
|
|
|
|
struct RenderPassSampleLocationsBeginInfoEXT
|
|
{
|
|
using NativeType = VkRenderPassSampleLocationsBeginInfoEXT;
|
|
|
|
static const bool allowDuplicate = false;
|
|
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eRenderPassSampleLocationsBeginInfoEXT;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR
|
|
RenderPassSampleLocationsBeginInfoEXT( uint32_t attachmentInitialSampleLocationsCount_ = {},
|
|
const VULKAN_HPP_NAMESPACE::AttachmentSampleLocationsEXT * pAttachmentInitialSampleLocations_ = {},
|
|
uint32_t postSubpassSampleLocationsCount_ = {},
|
|
const VULKAN_HPP_NAMESPACE::SubpassSampleLocationsEXT * pPostSubpassSampleLocations_ = {},
|
|
const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
|
|
: pNext( pNext_ )
|
|
, attachmentInitialSampleLocationsCount( attachmentInitialSampleLocationsCount_ )
|
|
, pAttachmentInitialSampleLocations( pAttachmentInitialSampleLocations_ )
|
|
, postSubpassSampleLocationsCount( postSubpassSampleLocationsCount_ )
|
|
, pPostSubpassSampleLocations( pPostSubpassSampleLocations_ )
|
|
{
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR RenderPassSampleLocationsBeginInfoEXT( RenderPassSampleLocationsBeginInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
RenderPassSampleLocationsBeginInfoEXT( VkRenderPassSampleLocationsBeginInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: RenderPassSampleLocationsBeginInfoEXT( *reinterpret_cast<RenderPassSampleLocationsBeginInfoEXT const *>( &rhs ) )
|
|
{
|
|
}
|
|
|
|
# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
|
|
RenderPassSampleLocationsBeginInfoEXT(
|
|
VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::AttachmentSampleLocationsEXT> const & attachmentInitialSampleLocations_,
|
|
VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::SubpassSampleLocationsEXT> const & postSubpassSampleLocations_ = {},
|
|
const void * pNext_ = nullptr )
|
|
: pNext( pNext_ )
|
|
, attachmentInitialSampleLocationsCount( static_cast<uint32_t>( attachmentInitialSampleLocations_.size() ) )
|
|
, pAttachmentInitialSampleLocations( attachmentInitialSampleLocations_.data() )
|
|
, postSubpassSampleLocationsCount( static_cast<uint32_t>( postSubpassSampleLocations_.size() ) )
|
|
, pPostSubpassSampleLocations( postSubpassSampleLocations_.data() )
|
|
{
|
|
}
|
|
# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
|
|
|
|
RenderPassSampleLocationsBeginInfoEXT & operator=( RenderPassSampleLocationsBeginInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
RenderPassSampleLocationsBeginInfoEXT & operator=( VkRenderPassSampleLocationsBeginInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::RenderPassSampleLocationsBeginInfoEXT const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
|
|
VULKAN_HPP_CONSTEXPR_14 RenderPassSampleLocationsBeginInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pNext = pNext_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 RenderPassSampleLocationsBeginInfoEXT &
|
|
setAttachmentInitialSampleLocationsCount( uint32_t attachmentInitialSampleLocationsCount_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
attachmentInitialSampleLocationsCount = attachmentInitialSampleLocationsCount_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 RenderPassSampleLocationsBeginInfoEXT &
|
|
setPAttachmentInitialSampleLocations( const VULKAN_HPP_NAMESPACE::AttachmentSampleLocationsEXT * pAttachmentInitialSampleLocations_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pAttachmentInitialSampleLocations = pAttachmentInitialSampleLocations_;
|
|
return *this;
|
|
}
|
|
|
|
# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
|
|
RenderPassSampleLocationsBeginInfoEXT & setAttachmentInitialSampleLocations(
|
|
VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::AttachmentSampleLocationsEXT> const & attachmentInitialSampleLocations_ )
|
|
VULKAN_HPP_NOEXCEPT
|
|
{
|
|
attachmentInitialSampleLocationsCount = static_cast<uint32_t>( attachmentInitialSampleLocations_.size() );
|
|
pAttachmentInitialSampleLocations = attachmentInitialSampleLocations_.data();
|
|
return *this;
|
|
}
|
|
# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 RenderPassSampleLocationsBeginInfoEXT &
|
|
setPostSubpassSampleLocationsCount( uint32_t postSubpassSampleLocationsCount_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
postSubpassSampleLocationsCount = postSubpassSampleLocationsCount_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 RenderPassSampleLocationsBeginInfoEXT &
|
|
setPPostSubpassSampleLocations( const VULKAN_HPP_NAMESPACE::SubpassSampleLocationsEXT * pPostSubpassSampleLocations_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pPostSubpassSampleLocations = pPostSubpassSampleLocations_;
|
|
return *this;
|
|
}
|
|
|
|
# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
|
|
RenderPassSampleLocationsBeginInfoEXT & setPostSubpassSampleLocations(
|
|
VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::SubpassSampleLocationsEXT> const & postSubpassSampleLocations_ )
|
|
VULKAN_HPP_NOEXCEPT
|
|
{
|
|
postSubpassSampleLocationsCount = static_cast<uint32_t>( postSubpassSampleLocations_.size() );
|
|
pPostSubpassSampleLocations = postSubpassSampleLocations_.data();
|
|
return *this;
|
|
}
|
|
# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
|
|
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
|
|
|
|
operator VkRenderPassSampleLocationsBeginInfoEXT const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkRenderPassSampleLocationsBeginInfoEXT *>( this );
|
|
}
|
|
|
|
operator VkRenderPassSampleLocationsBeginInfoEXT &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkRenderPassSampleLocationsBeginInfoEXT *>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
# if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
# else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &,
|
|
const void * const &,
|
|
uint32_t const &,
|
|
const VULKAN_HPP_NAMESPACE::AttachmentSampleLocationsEXT * const &,
|
|
uint32_t const &,
|
|
const VULKAN_HPP_NAMESPACE::SubpassSampleLocationsEXT * const &>
|
|
# endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie(
|
|
sType, pNext, attachmentInitialSampleLocationsCount, pAttachmentInitialSampleLocations, postSubpassSampleLocationsCount, pPostSubpassSampleLocations );
|
|
}
|
|
#endif
|
|
|
|
#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
|
|
auto operator<=>( RenderPassSampleLocationsBeginInfoEXT const & ) const = default;
|
|
#else
|
|
bool operator==( RenderPassSampleLocationsBeginInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
# if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
# else
|
|
return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( attachmentInitialSampleLocationsCount == rhs.attachmentInitialSampleLocationsCount ) &&
|
|
( pAttachmentInitialSampleLocations == rhs.pAttachmentInitialSampleLocations ) &&
|
|
( postSubpassSampleLocationsCount == rhs.postSubpassSampleLocationsCount ) && ( pPostSubpassSampleLocations == rhs.pPostSubpassSampleLocations );
|
|
# endif
|
|
}
|
|
|
|
bool operator!=( RenderPassSampleLocationsBeginInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eRenderPassSampleLocationsBeginInfoEXT;
|
|
const void * pNext = {};
|
|
uint32_t attachmentInitialSampleLocationsCount = {};
|
|
const VULKAN_HPP_NAMESPACE::AttachmentSampleLocationsEXT * pAttachmentInitialSampleLocations = {};
|
|
uint32_t postSubpassSampleLocationsCount = {};
|
|
const VULKAN_HPP_NAMESPACE::SubpassSampleLocationsEXT * pPostSubpassSampleLocations = {};
|
|
};
|
|
|
|
template <>
|
|
struct CppType<StructureType, StructureType::eRenderPassSampleLocationsBeginInfoEXT>
|
|
{
|
|
using Type = RenderPassSampleLocationsBeginInfoEXT;
|
|
};
|
|
|
|
struct RenderingAttachmentInfo
|
|
{
|
|
using NativeType = VkRenderingAttachmentInfo;
|
|
|
|
static const bool allowDuplicate = false;
|
|
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eRenderingAttachmentInfo;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR_14 RenderingAttachmentInfo( VULKAN_HPP_NAMESPACE::ImageView imageView_ = {},
|
|
VULKAN_HPP_NAMESPACE::ImageLayout imageLayout_ = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined,
|
|
VULKAN_HPP_NAMESPACE::ResolveModeFlagBits resolveMode_ = VULKAN_HPP_NAMESPACE::ResolveModeFlagBits::eNone,
|
|
VULKAN_HPP_NAMESPACE::ImageView resolveImageView_ = {},
|
|
VULKAN_HPP_NAMESPACE::ImageLayout resolveImageLayout_ = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined,
|
|
VULKAN_HPP_NAMESPACE::AttachmentLoadOp loadOp_ = VULKAN_HPP_NAMESPACE::AttachmentLoadOp::eLoad,
|
|
VULKAN_HPP_NAMESPACE::AttachmentStoreOp storeOp_ = VULKAN_HPP_NAMESPACE::AttachmentStoreOp::eStore,
|
|
VULKAN_HPP_NAMESPACE::ClearValue clearValue_ = {},
|
|
const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
|
|
: pNext( pNext_ )
|
|
, imageView( imageView_ )
|
|
, imageLayout( imageLayout_ )
|
|
, resolveMode( resolveMode_ )
|
|
, resolveImageView( resolveImageView_ )
|
|
, resolveImageLayout( resolveImageLayout_ )
|
|
, loadOp( loadOp_ )
|
|
, storeOp( storeOp_ )
|
|
, clearValue( clearValue_ )
|
|
{
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 RenderingAttachmentInfo( RenderingAttachmentInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
RenderingAttachmentInfo( VkRenderingAttachmentInfo const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: RenderingAttachmentInfo( *reinterpret_cast<RenderingAttachmentInfo const *>( &rhs ) )
|
|
{
|
|
}
|
|
|
|
RenderingAttachmentInfo & operator=( RenderingAttachmentInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
RenderingAttachmentInfo & operator=( VkRenderingAttachmentInfo const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::RenderingAttachmentInfo const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
|
|
VULKAN_HPP_CONSTEXPR_14 RenderingAttachmentInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pNext = pNext_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 RenderingAttachmentInfo & setImageView( VULKAN_HPP_NAMESPACE::ImageView imageView_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
imageView = imageView_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 RenderingAttachmentInfo & setImageLayout( VULKAN_HPP_NAMESPACE::ImageLayout imageLayout_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
imageLayout = imageLayout_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 RenderingAttachmentInfo & setResolveMode( VULKAN_HPP_NAMESPACE::ResolveModeFlagBits resolveMode_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
resolveMode = resolveMode_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 RenderingAttachmentInfo & setResolveImageView( VULKAN_HPP_NAMESPACE::ImageView resolveImageView_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
resolveImageView = resolveImageView_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 RenderingAttachmentInfo & setResolveImageLayout( VULKAN_HPP_NAMESPACE::ImageLayout resolveImageLayout_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
resolveImageLayout = resolveImageLayout_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 RenderingAttachmentInfo & setLoadOp( VULKAN_HPP_NAMESPACE::AttachmentLoadOp loadOp_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
loadOp = loadOp_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 RenderingAttachmentInfo & setStoreOp( VULKAN_HPP_NAMESPACE::AttachmentStoreOp storeOp_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
storeOp = storeOp_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 RenderingAttachmentInfo & setClearValue( VULKAN_HPP_NAMESPACE::ClearValue const & clearValue_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
clearValue = clearValue_;
|
|
return *this;
|
|
}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
|
|
|
|
operator VkRenderingAttachmentInfo const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkRenderingAttachmentInfo *>( this );
|
|
}
|
|
|
|
operator VkRenderingAttachmentInfo &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkRenderingAttachmentInfo *>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
# if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
# else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &,
|
|
const void * const &,
|
|
VULKAN_HPP_NAMESPACE::ImageView const &,
|
|
VULKAN_HPP_NAMESPACE::ImageLayout const &,
|
|
VULKAN_HPP_NAMESPACE::ResolveModeFlagBits const &,
|
|
VULKAN_HPP_NAMESPACE::ImageView const &,
|
|
VULKAN_HPP_NAMESPACE::ImageLayout const &,
|
|
VULKAN_HPP_NAMESPACE::AttachmentLoadOp const &,
|
|
VULKAN_HPP_NAMESPACE::AttachmentStoreOp const &,
|
|
VULKAN_HPP_NAMESPACE::ClearValue const &>
|
|
# endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( sType, pNext, imageView, imageLayout, resolveMode, resolveImageView, resolveImageLayout, loadOp, storeOp, clearValue );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eRenderingAttachmentInfo;
|
|
const void * pNext = {};
|
|
VULKAN_HPP_NAMESPACE::ImageView imageView = {};
|
|
VULKAN_HPP_NAMESPACE::ImageLayout imageLayout = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined;
|
|
VULKAN_HPP_NAMESPACE::ResolveModeFlagBits resolveMode = VULKAN_HPP_NAMESPACE::ResolveModeFlagBits::eNone;
|
|
VULKAN_HPP_NAMESPACE::ImageView resolveImageView = {};
|
|
VULKAN_HPP_NAMESPACE::ImageLayout resolveImageLayout = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined;
|
|
VULKAN_HPP_NAMESPACE::AttachmentLoadOp loadOp = VULKAN_HPP_NAMESPACE::AttachmentLoadOp::eLoad;
|
|
VULKAN_HPP_NAMESPACE::AttachmentStoreOp storeOp = VULKAN_HPP_NAMESPACE::AttachmentStoreOp::eStore;
|
|
VULKAN_HPP_NAMESPACE::ClearValue clearValue = {};
|
|
};
|
|
|
|
template <>
|
|
struct CppType<StructureType, StructureType::eRenderingAttachmentInfo>
|
|
{
|
|
using Type = RenderingAttachmentInfo;
|
|
};
|
|
|
|
using RenderingAttachmentInfoKHR = RenderingAttachmentInfo;
|
|
|
|
struct RenderingInfo
|
|
{
|
|
using NativeType = VkRenderingInfo;
|
|
|
|
static const bool allowDuplicate = false;
|
|
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eRenderingInfo;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR_14 RenderingInfo( VULKAN_HPP_NAMESPACE::RenderingFlags flags_ = {},
|
|
VULKAN_HPP_NAMESPACE::Rect2D renderArea_ = {},
|
|
uint32_t layerCount_ = {},
|
|
uint32_t viewMask_ = {},
|
|
uint32_t colorAttachmentCount_ = {},
|
|
const VULKAN_HPP_NAMESPACE::RenderingAttachmentInfo * pColorAttachments_ = {},
|
|
const VULKAN_HPP_NAMESPACE::RenderingAttachmentInfo * pDepthAttachment_ = {},
|
|
const VULKAN_HPP_NAMESPACE::RenderingAttachmentInfo * pStencilAttachment_ = {},
|
|
const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
|
|
: pNext( pNext_ )
|
|
, flags( flags_ )
|
|
, renderArea( renderArea_ )
|
|
, layerCount( layerCount_ )
|
|
, viewMask( viewMask_ )
|
|
, colorAttachmentCount( colorAttachmentCount_ )
|
|
, pColorAttachments( pColorAttachments_ )
|
|
, pDepthAttachment( pDepthAttachment_ )
|
|
, pStencilAttachment( pStencilAttachment_ )
|
|
{
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 RenderingInfo( RenderingInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
RenderingInfo( VkRenderingInfo const & rhs ) VULKAN_HPP_NOEXCEPT : RenderingInfo( *reinterpret_cast<RenderingInfo const *>( &rhs ) ) {}
|
|
|
|
# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
|
|
RenderingInfo( VULKAN_HPP_NAMESPACE::RenderingFlags flags_,
|
|
VULKAN_HPP_NAMESPACE::Rect2D renderArea_,
|
|
uint32_t layerCount_,
|
|
uint32_t viewMask_,
|
|
VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::RenderingAttachmentInfo> const & colorAttachments_,
|
|
const VULKAN_HPP_NAMESPACE::RenderingAttachmentInfo * pDepthAttachment_ = {},
|
|
const VULKAN_HPP_NAMESPACE::RenderingAttachmentInfo * pStencilAttachment_ = {},
|
|
const void * pNext_ = nullptr )
|
|
: pNext( pNext_ )
|
|
, flags( flags_ )
|
|
, renderArea( renderArea_ )
|
|
, layerCount( layerCount_ )
|
|
, viewMask( viewMask_ )
|
|
, colorAttachmentCount( static_cast<uint32_t>( colorAttachments_.size() ) )
|
|
, pColorAttachments( colorAttachments_.data() )
|
|
, pDepthAttachment( pDepthAttachment_ )
|
|
, pStencilAttachment( pStencilAttachment_ )
|
|
{
|
|
}
|
|
# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
|
|
|
|
RenderingInfo & operator=( RenderingInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
RenderingInfo & operator=( VkRenderingInfo const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::RenderingInfo const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
|
|
VULKAN_HPP_CONSTEXPR_14 RenderingInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pNext = pNext_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 RenderingInfo & setFlags( VULKAN_HPP_NAMESPACE::RenderingFlags flags_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
flags = flags_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 RenderingInfo & setRenderArea( VULKAN_HPP_NAMESPACE::Rect2D const & renderArea_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
renderArea = renderArea_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 RenderingInfo & setLayerCount( uint32_t layerCount_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
layerCount = layerCount_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 RenderingInfo & setViewMask( uint32_t viewMask_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
viewMask = viewMask_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 RenderingInfo & setColorAttachmentCount( uint32_t colorAttachmentCount_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
colorAttachmentCount = colorAttachmentCount_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 RenderingInfo & setPColorAttachments( const VULKAN_HPP_NAMESPACE::RenderingAttachmentInfo * pColorAttachments_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pColorAttachments = pColorAttachments_;
|
|
return *this;
|
|
}
|
|
|
|
# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
|
|
RenderingInfo & setColorAttachments(
|
|
VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::RenderingAttachmentInfo> const & colorAttachments_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
colorAttachmentCount = static_cast<uint32_t>( colorAttachments_.size() );
|
|
pColorAttachments = colorAttachments_.data();
|
|
return *this;
|
|
}
|
|
# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 RenderingInfo & setPDepthAttachment( const VULKAN_HPP_NAMESPACE::RenderingAttachmentInfo * pDepthAttachment_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pDepthAttachment = pDepthAttachment_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 RenderingInfo &
|
|
setPStencilAttachment( const VULKAN_HPP_NAMESPACE::RenderingAttachmentInfo * pStencilAttachment_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pStencilAttachment = pStencilAttachment_;
|
|
return *this;
|
|
}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
|
|
|
|
operator VkRenderingInfo const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkRenderingInfo *>( this );
|
|
}
|
|
|
|
operator VkRenderingInfo &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkRenderingInfo *>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
# if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
# else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &,
|
|
const void * const &,
|
|
VULKAN_HPP_NAMESPACE::RenderingFlags const &,
|
|
VULKAN_HPP_NAMESPACE::Rect2D const &,
|
|
uint32_t const &,
|
|
uint32_t const &,
|
|
uint32_t const &,
|
|
const VULKAN_HPP_NAMESPACE::RenderingAttachmentInfo * const &,
|
|
const VULKAN_HPP_NAMESPACE::RenderingAttachmentInfo * const &,
|
|
const VULKAN_HPP_NAMESPACE::RenderingAttachmentInfo * const &>
|
|
# endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( sType, pNext, flags, renderArea, layerCount, viewMask, colorAttachmentCount, pColorAttachments, pDepthAttachment, pStencilAttachment );
|
|
}
|
|
#endif
|
|
|
|
#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
|
|
auto operator<=>( RenderingInfo const & ) const = default;
|
|
#else
|
|
bool operator==( RenderingInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
# if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
# else
|
|
return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && ( renderArea == rhs.renderArea ) &&
|
|
( layerCount == rhs.layerCount ) && ( viewMask == rhs.viewMask ) && ( colorAttachmentCount == rhs.colorAttachmentCount ) &&
|
|
( pColorAttachments == rhs.pColorAttachments ) && ( pDepthAttachment == rhs.pDepthAttachment ) && ( pStencilAttachment == rhs.pStencilAttachment );
|
|
# endif
|
|
}
|
|
|
|
bool operator!=( RenderingInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eRenderingInfo;
|
|
const void * pNext = {};
|
|
VULKAN_HPP_NAMESPACE::RenderingFlags flags = {};
|
|
VULKAN_HPP_NAMESPACE::Rect2D renderArea = {};
|
|
uint32_t layerCount = {};
|
|
uint32_t viewMask = {};
|
|
uint32_t colorAttachmentCount = {};
|
|
const VULKAN_HPP_NAMESPACE::RenderingAttachmentInfo * pColorAttachments = {};
|
|
const VULKAN_HPP_NAMESPACE::RenderingAttachmentInfo * pDepthAttachment = {};
|
|
const VULKAN_HPP_NAMESPACE::RenderingAttachmentInfo * pStencilAttachment = {};
|
|
};
|
|
|
|
template <>
|
|
struct CppType<StructureType, StructureType::eRenderingInfo>
|
|
{
|
|
using Type = RenderingInfo;
|
|
};
|
|
|
|
using RenderingInfoKHR = RenderingInfo;
|
|
|
|
struct ResolveImageInfo2
|
|
{
|
|
using NativeType = VkResolveImageInfo2;
|
|
|
|
static const bool allowDuplicate = false;
|
|
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eResolveImageInfo2;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR ResolveImageInfo2( VULKAN_HPP_NAMESPACE::Image srcImage_ = {},
|
|
VULKAN_HPP_NAMESPACE::ImageLayout srcImageLayout_ = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined,
|
|
VULKAN_HPP_NAMESPACE::Image dstImage_ = {},
|
|
VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout_ = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined,
|
|
uint32_t regionCount_ = {},
|
|
const VULKAN_HPP_NAMESPACE::ImageResolve2 * pRegions_ = {},
|
|
const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
|
|
: pNext( pNext_ )
|
|
, srcImage( srcImage_ )
|
|
, srcImageLayout( srcImageLayout_ )
|
|
, dstImage( dstImage_ )
|
|
, dstImageLayout( dstImageLayout_ )
|
|
, regionCount( regionCount_ )
|
|
, pRegions( pRegions_ )
|
|
{
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR ResolveImageInfo2( ResolveImageInfo2 const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
ResolveImageInfo2( VkResolveImageInfo2 const & rhs ) VULKAN_HPP_NOEXCEPT : ResolveImageInfo2( *reinterpret_cast<ResolveImageInfo2 const *>( &rhs ) ) {}
|
|
|
|
# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
|
|
ResolveImageInfo2( VULKAN_HPP_NAMESPACE::Image srcImage_,
|
|
VULKAN_HPP_NAMESPACE::ImageLayout srcImageLayout_,
|
|
VULKAN_HPP_NAMESPACE::Image dstImage_,
|
|
VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout_,
|
|
VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::ImageResolve2> const & regions_,
|
|
const void * pNext_ = nullptr )
|
|
: pNext( pNext_ )
|
|
, srcImage( srcImage_ )
|
|
, srcImageLayout( srcImageLayout_ )
|
|
, dstImage( dstImage_ )
|
|
, dstImageLayout( dstImageLayout_ )
|
|
, regionCount( static_cast<uint32_t>( regions_.size() ) )
|
|
, pRegions( regions_.data() )
|
|
{
|
|
}
|
|
# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
|
|
|
|
ResolveImageInfo2 & operator=( ResolveImageInfo2 const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
ResolveImageInfo2 & operator=( VkResolveImageInfo2 const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ResolveImageInfo2 const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
|
|
VULKAN_HPP_CONSTEXPR_14 ResolveImageInfo2 & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pNext = pNext_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 ResolveImageInfo2 & setSrcImage( VULKAN_HPP_NAMESPACE::Image srcImage_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
srcImage = srcImage_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 ResolveImageInfo2 & setSrcImageLayout( VULKAN_HPP_NAMESPACE::ImageLayout srcImageLayout_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
srcImageLayout = srcImageLayout_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 ResolveImageInfo2 & setDstImage( VULKAN_HPP_NAMESPACE::Image dstImage_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
dstImage = dstImage_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 ResolveImageInfo2 & setDstImageLayout( VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
dstImageLayout = dstImageLayout_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 ResolveImageInfo2 & setRegionCount( uint32_t regionCount_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
regionCount = regionCount_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 ResolveImageInfo2 & setPRegions( const VULKAN_HPP_NAMESPACE::ImageResolve2 * pRegions_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pRegions = pRegions_;
|
|
return *this;
|
|
}
|
|
|
|
# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
|
|
ResolveImageInfo2 &
|
|
setRegions( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::ImageResolve2> const & regions_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
regionCount = static_cast<uint32_t>( regions_.size() );
|
|
pRegions = regions_.data();
|
|
return *this;
|
|
}
|
|
# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
|
|
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
|
|
|
|
operator VkResolveImageInfo2 const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkResolveImageInfo2 *>( this );
|
|
}
|
|
|
|
operator VkResolveImageInfo2 &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkResolveImageInfo2 *>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
# if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
# else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &,
|
|
const void * const &,
|
|
VULKAN_HPP_NAMESPACE::Image const &,
|
|
VULKAN_HPP_NAMESPACE::ImageLayout const &,
|
|
VULKAN_HPP_NAMESPACE::Image const &,
|
|
VULKAN_HPP_NAMESPACE::ImageLayout const &,
|
|
uint32_t const &,
|
|
const VULKAN_HPP_NAMESPACE::ImageResolve2 * const &>
|
|
# endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( sType, pNext, srcImage, srcImageLayout, dstImage, dstImageLayout, regionCount, pRegions );
|
|
}
|
|
#endif
|
|
|
|
#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
|
|
auto operator<=>( ResolveImageInfo2 const & ) const = default;
|
|
#else
|
|
bool operator==( ResolveImageInfo2 const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
# if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
# else
|
|
return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( srcImage == rhs.srcImage ) && ( srcImageLayout == rhs.srcImageLayout ) &&
|
|
( dstImage == rhs.dstImage ) && ( dstImageLayout == rhs.dstImageLayout ) && ( regionCount == rhs.regionCount ) && ( pRegions == rhs.pRegions );
|
|
# endif
|
|
}
|
|
|
|
bool operator!=( ResolveImageInfo2 const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eResolveImageInfo2;
|
|
const void * pNext = {};
|
|
VULKAN_HPP_NAMESPACE::Image srcImage = {};
|
|
VULKAN_HPP_NAMESPACE::ImageLayout srcImageLayout = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined;
|
|
VULKAN_HPP_NAMESPACE::Image dstImage = {};
|
|
VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined;
|
|
uint32_t regionCount = {};
|
|
const VULKAN_HPP_NAMESPACE::ImageResolve2 * pRegions = {};
|
|
};
|
|
|
|
template <>
|
|
struct CppType<StructureType, StructureType::eResolveImageInfo2>
|
|
{
|
|
using Type = ResolveImageInfo2;
|
|
};
|
|
|
|
using ResolveImageInfo2KHR = ResolveImageInfo2;
|
|
|
|
struct SamplerCreateInfo
|
|
{
|
|
using NativeType = VkSamplerCreateInfo;
|
|
|
|
static const bool allowDuplicate = false;
|
|
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eSamplerCreateInfo;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR SamplerCreateInfo( VULKAN_HPP_NAMESPACE::SamplerCreateFlags flags_ = {},
|
|
VULKAN_HPP_NAMESPACE::Filter magFilter_ = VULKAN_HPP_NAMESPACE::Filter::eNearest,
|
|
VULKAN_HPP_NAMESPACE::Filter minFilter_ = VULKAN_HPP_NAMESPACE::Filter::eNearest,
|
|
VULKAN_HPP_NAMESPACE::SamplerMipmapMode mipmapMode_ = VULKAN_HPP_NAMESPACE::SamplerMipmapMode::eNearest,
|
|
VULKAN_HPP_NAMESPACE::SamplerAddressMode addressModeU_ = VULKAN_HPP_NAMESPACE::SamplerAddressMode::eRepeat,
|
|
VULKAN_HPP_NAMESPACE::SamplerAddressMode addressModeV_ = VULKAN_HPP_NAMESPACE::SamplerAddressMode::eRepeat,
|
|
VULKAN_HPP_NAMESPACE::SamplerAddressMode addressModeW_ = VULKAN_HPP_NAMESPACE::SamplerAddressMode::eRepeat,
|
|
float mipLodBias_ = {},
|
|
VULKAN_HPP_NAMESPACE::Bool32 anisotropyEnable_ = {},
|
|
float maxAnisotropy_ = {},
|
|
VULKAN_HPP_NAMESPACE::Bool32 compareEnable_ = {},
|
|
VULKAN_HPP_NAMESPACE::CompareOp compareOp_ = VULKAN_HPP_NAMESPACE::CompareOp::eNever,
|
|
float minLod_ = {},
|
|
float maxLod_ = {},
|
|
VULKAN_HPP_NAMESPACE::BorderColor borderColor_ = VULKAN_HPP_NAMESPACE::BorderColor::eFloatTransparentBlack,
|
|
VULKAN_HPP_NAMESPACE::Bool32 unnormalizedCoordinates_ = {},
|
|
const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
|
|
: pNext( pNext_ )
|
|
, flags( flags_ )
|
|
, magFilter( magFilter_ )
|
|
, minFilter( minFilter_ )
|
|
, mipmapMode( mipmapMode_ )
|
|
, addressModeU( addressModeU_ )
|
|
, addressModeV( addressModeV_ )
|
|
, addressModeW( addressModeW_ )
|
|
, mipLodBias( mipLodBias_ )
|
|
, anisotropyEnable( anisotropyEnable_ )
|
|
, maxAnisotropy( maxAnisotropy_ )
|
|
, compareEnable( compareEnable_ )
|
|
, compareOp( compareOp_ )
|
|
, minLod( minLod_ )
|
|
, maxLod( maxLod_ )
|
|
, borderColor( borderColor_ )
|
|
, unnormalizedCoordinates( unnormalizedCoordinates_ )
|
|
{
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR SamplerCreateInfo( SamplerCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
SamplerCreateInfo( VkSamplerCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT : SamplerCreateInfo( *reinterpret_cast<SamplerCreateInfo const *>( &rhs ) ) {}
|
|
|
|
SamplerCreateInfo & operator=( SamplerCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
SamplerCreateInfo & operator=( VkSamplerCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::SamplerCreateInfo const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
|
|
VULKAN_HPP_CONSTEXPR_14 SamplerCreateInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pNext = pNext_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 SamplerCreateInfo & setFlags( VULKAN_HPP_NAMESPACE::SamplerCreateFlags flags_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
flags = flags_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 SamplerCreateInfo & setMagFilter( VULKAN_HPP_NAMESPACE::Filter magFilter_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
magFilter = magFilter_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 SamplerCreateInfo & setMinFilter( VULKAN_HPP_NAMESPACE::Filter minFilter_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
minFilter = minFilter_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 SamplerCreateInfo & setMipmapMode( VULKAN_HPP_NAMESPACE::SamplerMipmapMode mipmapMode_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
mipmapMode = mipmapMode_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 SamplerCreateInfo & setAddressModeU( VULKAN_HPP_NAMESPACE::SamplerAddressMode addressModeU_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
addressModeU = addressModeU_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 SamplerCreateInfo & setAddressModeV( VULKAN_HPP_NAMESPACE::SamplerAddressMode addressModeV_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
addressModeV = addressModeV_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 SamplerCreateInfo & setAddressModeW( VULKAN_HPP_NAMESPACE::SamplerAddressMode addressModeW_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
addressModeW = addressModeW_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 SamplerCreateInfo & setMipLodBias( float mipLodBias_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
mipLodBias = mipLodBias_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 SamplerCreateInfo & setAnisotropyEnable( VULKAN_HPP_NAMESPACE::Bool32 anisotropyEnable_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
anisotropyEnable = anisotropyEnable_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 SamplerCreateInfo & setMaxAnisotropy( float maxAnisotropy_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
maxAnisotropy = maxAnisotropy_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 SamplerCreateInfo & setCompareEnable( VULKAN_HPP_NAMESPACE::Bool32 compareEnable_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
compareEnable = compareEnable_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 SamplerCreateInfo & setCompareOp( VULKAN_HPP_NAMESPACE::CompareOp compareOp_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
compareOp = compareOp_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 SamplerCreateInfo & setMinLod( float minLod_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
minLod = minLod_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 SamplerCreateInfo & setMaxLod( float maxLod_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
maxLod = maxLod_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 SamplerCreateInfo & setBorderColor( VULKAN_HPP_NAMESPACE::BorderColor borderColor_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
borderColor = borderColor_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 SamplerCreateInfo & setUnnormalizedCoordinates( VULKAN_HPP_NAMESPACE::Bool32 unnormalizedCoordinates_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
unnormalizedCoordinates = unnormalizedCoordinates_;
|
|
return *this;
|
|
}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
|
|
|
|
operator VkSamplerCreateInfo const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkSamplerCreateInfo *>( this );
|
|
}
|
|
|
|
operator VkSamplerCreateInfo &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkSamplerCreateInfo *>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
# if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
# else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &,
|
|
const void * const &,
|
|
VULKAN_HPP_NAMESPACE::SamplerCreateFlags const &,
|
|
VULKAN_HPP_NAMESPACE::Filter const &,
|
|
VULKAN_HPP_NAMESPACE::Filter const &,
|
|
VULKAN_HPP_NAMESPACE::SamplerMipmapMode const &,
|
|
VULKAN_HPP_NAMESPACE::SamplerAddressMode const &,
|
|
VULKAN_HPP_NAMESPACE::SamplerAddressMode const &,
|
|
VULKAN_HPP_NAMESPACE::SamplerAddressMode const &,
|
|
float const &,
|
|
VULKAN_HPP_NAMESPACE::Bool32 const &,
|
|
float const &,
|
|
VULKAN_HPP_NAMESPACE::Bool32 const &,
|
|
VULKAN_HPP_NAMESPACE::CompareOp const &,
|
|
float const &,
|
|
float const &,
|
|
VULKAN_HPP_NAMESPACE::BorderColor const &,
|
|
VULKAN_HPP_NAMESPACE::Bool32 const &>
|
|
# endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( sType,
|
|
pNext,
|
|
flags,
|
|
magFilter,
|
|
minFilter,
|
|
mipmapMode,
|
|
addressModeU,
|
|
addressModeV,
|
|
addressModeW,
|
|
mipLodBias,
|
|
anisotropyEnable,
|
|
maxAnisotropy,
|
|
compareEnable,
|
|
compareOp,
|
|
minLod,
|
|
maxLod,
|
|
borderColor,
|
|
unnormalizedCoordinates );
|
|
}
|
|
#endif
|
|
|
|
#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
|
|
auto operator<=>( SamplerCreateInfo const & ) const = default;
|
|
#else
|
|
bool operator==( SamplerCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
# if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
# else
|
|
return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && ( magFilter == rhs.magFilter ) && ( minFilter == rhs.minFilter ) &&
|
|
( mipmapMode == rhs.mipmapMode ) && ( addressModeU == rhs.addressModeU ) && ( addressModeV == rhs.addressModeV ) &&
|
|
( addressModeW == rhs.addressModeW ) && ( mipLodBias == rhs.mipLodBias ) && ( anisotropyEnable == rhs.anisotropyEnable ) &&
|
|
( maxAnisotropy == rhs.maxAnisotropy ) && ( compareEnable == rhs.compareEnable ) && ( compareOp == rhs.compareOp ) && ( minLod == rhs.minLod ) &&
|
|
( maxLod == rhs.maxLod ) && ( borderColor == rhs.borderColor ) && ( unnormalizedCoordinates == rhs.unnormalizedCoordinates );
|
|
# endif
|
|
}
|
|
|
|
bool operator!=( SamplerCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eSamplerCreateInfo;
|
|
const void * pNext = {};
|
|
VULKAN_HPP_NAMESPACE::SamplerCreateFlags flags = {};
|
|
VULKAN_HPP_NAMESPACE::Filter magFilter = VULKAN_HPP_NAMESPACE::Filter::eNearest;
|
|
VULKAN_HPP_NAMESPACE::Filter minFilter = VULKAN_HPP_NAMESPACE::Filter::eNearest;
|
|
VULKAN_HPP_NAMESPACE::SamplerMipmapMode mipmapMode = VULKAN_HPP_NAMESPACE::SamplerMipmapMode::eNearest;
|
|
VULKAN_HPP_NAMESPACE::SamplerAddressMode addressModeU = VULKAN_HPP_NAMESPACE::SamplerAddressMode::eRepeat;
|
|
VULKAN_HPP_NAMESPACE::SamplerAddressMode addressModeV = VULKAN_HPP_NAMESPACE::SamplerAddressMode::eRepeat;
|
|
VULKAN_HPP_NAMESPACE::SamplerAddressMode addressModeW = VULKAN_HPP_NAMESPACE::SamplerAddressMode::eRepeat;
|
|
float mipLodBias = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 anisotropyEnable = {};
|
|
float maxAnisotropy = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 compareEnable = {};
|
|
VULKAN_HPP_NAMESPACE::CompareOp compareOp = VULKAN_HPP_NAMESPACE::CompareOp::eNever;
|
|
float minLod = {};
|
|
float maxLod = {};
|
|
VULKAN_HPP_NAMESPACE::BorderColor borderColor = VULKAN_HPP_NAMESPACE::BorderColor::eFloatTransparentBlack;
|
|
VULKAN_HPP_NAMESPACE::Bool32 unnormalizedCoordinates = {};
|
|
};
|
|
|
|
template <>
|
|
struct CppType<StructureType, StructureType::eSamplerCreateInfo>
|
|
{
|
|
using Type = SamplerCreateInfo;
|
|
};
|
|
|
|
struct SamplerCustomBorderColorCreateInfoEXT
|
|
{
|
|
using NativeType = VkSamplerCustomBorderColorCreateInfoEXT;
|
|
|
|
static const bool allowDuplicate = false;
|
|
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eSamplerCustomBorderColorCreateInfoEXT;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR_14 SamplerCustomBorderColorCreateInfoEXT( VULKAN_HPP_NAMESPACE::ClearColorValue customBorderColor_ = {},
|
|
VULKAN_HPP_NAMESPACE::Format format_ = VULKAN_HPP_NAMESPACE::Format::eUndefined,
|
|
const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
|
|
: pNext( pNext_ )
|
|
, customBorderColor( customBorderColor_ )
|
|
, format( format_ )
|
|
{
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 SamplerCustomBorderColorCreateInfoEXT( SamplerCustomBorderColorCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
SamplerCustomBorderColorCreateInfoEXT( VkSamplerCustomBorderColorCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: SamplerCustomBorderColorCreateInfoEXT( *reinterpret_cast<SamplerCustomBorderColorCreateInfoEXT const *>( &rhs ) )
|
|
{
|
|
}
|
|
|
|
SamplerCustomBorderColorCreateInfoEXT & operator=( SamplerCustomBorderColorCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
SamplerCustomBorderColorCreateInfoEXT & operator=( VkSamplerCustomBorderColorCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::SamplerCustomBorderColorCreateInfoEXT const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
|
|
VULKAN_HPP_CONSTEXPR_14 SamplerCustomBorderColorCreateInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pNext = pNext_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 SamplerCustomBorderColorCreateInfoEXT &
|
|
setCustomBorderColor( VULKAN_HPP_NAMESPACE::ClearColorValue const & customBorderColor_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
customBorderColor = customBorderColor_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 SamplerCustomBorderColorCreateInfoEXT & setFormat( VULKAN_HPP_NAMESPACE::Format format_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
format = format_;
|
|
return *this;
|
|
}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
|
|
|
|
operator VkSamplerCustomBorderColorCreateInfoEXT const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkSamplerCustomBorderColorCreateInfoEXT *>( this );
|
|
}
|
|
|
|
operator VkSamplerCustomBorderColorCreateInfoEXT &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkSamplerCustomBorderColorCreateInfoEXT *>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
# if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
# else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &,
|
|
const void * const &,
|
|
VULKAN_HPP_NAMESPACE::ClearColorValue const &,
|
|
VULKAN_HPP_NAMESPACE::Format const &>
|
|
# endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( sType, pNext, customBorderColor, format );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eSamplerCustomBorderColorCreateInfoEXT;
|
|
const void * pNext = {};
|
|
VULKAN_HPP_NAMESPACE::ClearColorValue customBorderColor = {};
|
|
VULKAN_HPP_NAMESPACE::Format format = VULKAN_HPP_NAMESPACE::Format::eUndefined;
|
|
};
|
|
|
|
template <>
|
|
struct CppType<StructureType, StructureType::eSamplerCustomBorderColorCreateInfoEXT>
|
|
{
|
|
using Type = SamplerCustomBorderColorCreateInfoEXT;
|
|
};
|
|
|
|
struct SamplerReductionModeCreateInfo
|
|
{
|
|
using NativeType = VkSamplerReductionModeCreateInfo;
|
|
|
|
static const bool allowDuplicate = false;
|
|
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eSamplerReductionModeCreateInfo;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR
|
|
SamplerReductionModeCreateInfo( VULKAN_HPP_NAMESPACE::SamplerReductionMode reductionMode_ = VULKAN_HPP_NAMESPACE::SamplerReductionMode::eWeightedAverage,
|
|
const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
|
|
: pNext( pNext_ )
|
|
, reductionMode( reductionMode_ )
|
|
{
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR SamplerReductionModeCreateInfo( SamplerReductionModeCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
SamplerReductionModeCreateInfo( VkSamplerReductionModeCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: SamplerReductionModeCreateInfo( *reinterpret_cast<SamplerReductionModeCreateInfo const *>( &rhs ) )
|
|
{
|
|
}
|
|
|
|
SamplerReductionModeCreateInfo & operator=( SamplerReductionModeCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
SamplerReductionModeCreateInfo & operator=( VkSamplerReductionModeCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::SamplerReductionModeCreateInfo const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
|
|
VULKAN_HPP_CONSTEXPR_14 SamplerReductionModeCreateInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pNext = pNext_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 SamplerReductionModeCreateInfo & setReductionMode( VULKAN_HPP_NAMESPACE::SamplerReductionMode reductionMode_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
reductionMode = reductionMode_;
|
|
return *this;
|
|
}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
|
|
|
|
operator VkSamplerReductionModeCreateInfo const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkSamplerReductionModeCreateInfo *>( this );
|
|
}
|
|
|
|
operator VkSamplerReductionModeCreateInfo &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkSamplerReductionModeCreateInfo *>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
# if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
# else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::SamplerReductionMode const &>
|
|
# endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( sType, pNext, reductionMode );
|
|
}
|
|
#endif
|
|
|
|
#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
|
|
auto operator<=>( SamplerReductionModeCreateInfo const & ) const = default;
|
|
#else
|
|
bool operator==( SamplerReductionModeCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
# if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
# else
|
|
return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( reductionMode == rhs.reductionMode );
|
|
# endif
|
|
}
|
|
|
|
bool operator!=( SamplerReductionModeCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eSamplerReductionModeCreateInfo;
|
|
const void * pNext = {};
|
|
VULKAN_HPP_NAMESPACE::SamplerReductionMode reductionMode = VULKAN_HPP_NAMESPACE::SamplerReductionMode::eWeightedAverage;
|
|
};
|
|
|
|
template <>
|
|
struct CppType<StructureType, StructureType::eSamplerReductionModeCreateInfo>
|
|
{
|
|
using Type = SamplerReductionModeCreateInfo;
|
|
};
|
|
|
|
using SamplerReductionModeCreateInfoEXT = SamplerReductionModeCreateInfo;
|
|
|
|
struct SamplerYcbcrConversionCreateInfo
|
|
{
|
|
using NativeType = VkSamplerYcbcrConversionCreateInfo;
|
|
|
|
static const bool allowDuplicate = false;
|
|
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eSamplerYcbcrConversionCreateInfo;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR SamplerYcbcrConversionCreateInfo(
|
|
VULKAN_HPP_NAMESPACE::Format format_ = VULKAN_HPP_NAMESPACE::Format::eUndefined,
|
|
VULKAN_HPP_NAMESPACE::SamplerYcbcrModelConversion ycbcrModel_ = VULKAN_HPP_NAMESPACE::SamplerYcbcrModelConversion::eRgbIdentity,
|
|
VULKAN_HPP_NAMESPACE::SamplerYcbcrRange ycbcrRange_ = VULKAN_HPP_NAMESPACE::SamplerYcbcrRange::eItuFull,
|
|
VULKAN_HPP_NAMESPACE::ComponentMapping components_ = {},
|
|
VULKAN_HPP_NAMESPACE::ChromaLocation xChromaOffset_ = VULKAN_HPP_NAMESPACE::ChromaLocation::eCositedEven,
|
|
VULKAN_HPP_NAMESPACE::ChromaLocation yChromaOffset_ = VULKAN_HPP_NAMESPACE::ChromaLocation::eCositedEven,
|
|
VULKAN_HPP_NAMESPACE::Filter chromaFilter_ = VULKAN_HPP_NAMESPACE::Filter::eNearest,
|
|
VULKAN_HPP_NAMESPACE::Bool32 forceExplicitReconstruction_ = {},
|
|
const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
|
|
: pNext( pNext_ )
|
|
, format( format_ )
|
|
, ycbcrModel( ycbcrModel_ )
|
|
, ycbcrRange( ycbcrRange_ )
|
|
, components( components_ )
|
|
, xChromaOffset( xChromaOffset_ )
|
|
, yChromaOffset( yChromaOffset_ )
|
|
, chromaFilter( chromaFilter_ )
|
|
, forceExplicitReconstruction( forceExplicitReconstruction_ )
|
|
{
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR SamplerYcbcrConversionCreateInfo( SamplerYcbcrConversionCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
SamplerYcbcrConversionCreateInfo( VkSamplerYcbcrConversionCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: SamplerYcbcrConversionCreateInfo( *reinterpret_cast<SamplerYcbcrConversionCreateInfo const *>( &rhs ) )
|
|
{
|
|
}
|
|
|
|
SamplerYcbcrConversionCreateInfo & operator=( SamplerYcbcrConversionCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
SamplerYcbcrConversionCreateInfo & operator=( VkSamplerYcbcrConversionCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::SamplerYcbcrConversionCreateInfo const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
|
|
VULKAN_HPP_CONSTEXPR_14 SamplerYcbcrConversionCreateInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pNext = pNext_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 SamplerYcbcrConversionCreateInfo & setFormat( VULKAN_HPP_NAMESPACE::Format format_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
format = format_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 SamplerYcbcrConversionCreateInfo &
|
|
setYcbcrModel( VULKAN_HPP_NAMESPACE::SamplerYcbcrModelConversion ycbcrModel_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
ycbcrModel = ycbcrModel_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 SamplerYcbcrConversionCreateInfo & setYcbcrRange( VULKAN_HPP_NAMESPACE::SamplerYcbcrRange ycbcrRange_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
ycbcrRange = ycbcrRange_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 SamplerYcbcrConversionCreateInfo & setComponents( VULKAN_HPP_NAMESPACE::ComponentMapping const & components_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
components = components_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 SamplerYcbcrConversionCreateInfo & setXChromaOffset( VULKAN_HPP_NAMESPACE::ChromaLocation xChromaOffset_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
xChromaOffset = xChromaOffset_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 SamplerYcbcrConversionCreateInfo & setYChromaOffset( VULKAN_HPP_NAMESPACE::ChromaLocation yChromaOffset_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
yChromaOffset = yChromaOffset_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 SamplerYcbcrConversionCreateInfo & setChromaFilter( VULKAN_HPP_NAMESPACE::Filter chromaFilter_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
chromaFilter = chromaFilter_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 SamplerYcbcrConversionCreateInfo &
|
|
setForceExplicitReconstruction( VULKAN_HPP_NAMESPACE::Bool32 forceExplicitReconstruction_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
forceExplicitReconstruction = forceExplicitReconstruction_;
|
|
return *this;
|
|
}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
|
|
|
|
operator VkSamplerYcbcrConversionCreateInfo const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkSamplerYcbcrConversionCreateInfo *>( this );
|
|
}
|
|
|
|
operator VkSamplerYcbcrConversionCreateInfo &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkSamplerYcbcrConversionCreateInfo *>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
# if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
# else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &,
|
|
const void * const &,
|
|
VULKAN_HPP_NAMESPACE::Format const &,
|
|
VULKAN_HPP_NAMESPACE::SamplerYcbcrModelConversion const &,
|
|
VULKAN_HPP_NAMESPACE::SamplerYcbcrRange const &,
|
|
VULKAN_HPP_NAMESPACE::ComponentMapping const &,
|
|
VULKAN_HPP_NAMESPACE::ChromaLocation const &,
|
|
VULKAN_HPP_NAMESPACE::ChromaLocation const &,
|
|
VULKAN_HPP_NAMESPACE::Filter const &,
|
|
VULKAN_HPP_NAMESPACE::Bool32 const &>
|
|
# endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( sType, pNext, format, ycbcrModel, ycbcrRange, components, xChromaOffset, yChromaOffset, chromaFilter, forceExplicitReconstruction );
|
|
}
|
|
#endif
|
|
|
|
#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
|
|
auto operator<=>( SamplerYcbcrConversionCreateInfo const & ) const = default;
|
|
#else
|
|
bool operator==( SamplerYcbcrConversionCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
# if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
# else
|
|
return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( format == rhs.format ) && ( ycbcrModel == rhs.ycbcrModel ) &&
|
|
( ycbcrRange == rhs.ycbcrRange ) && ( components == rhs.components ) && ( xChromaOffset == rhs.xChromaOffset ) &&
|
|
( yChromaOffset == rhs.yChromaOffset ) && ( chromaFilter == rhs.chromaFilter ) &&
|
|
( forceExplicitReconstruction == rhs.forceExplicitReconstruction );
|
|
# endif
|
|
}
|
|
|
|
bool operator!=( SamplerYcbcrConversionCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eSamplerYcbcrConversionCreateInfo;
|
|
const void * pNext = {};
|
|
VULKAN_HPP_NAMESPACE::Format format = VULKAN_HPP_NAMESPACE::Format::eUndefined;
|
|
VULKAN_HPP_NAMESPACE::SamplerYcbcrModelConversion ycbcrModel = VULKAN_HPP_NAMESPACE::SamplerYcbcrModelConversion::eRgbIdentity;
|
|
VULKAN_HPP_NAMESPACE::SamplerYcbcrRange ycbcrRange = VULKAN_HPP_NAMESPACE::SamplerYcbcrRange::eItuFull;
|
|
VULKAN_HPP_NAMESPACE::ComponentMapping components = {};
|
|
VULKAN_HPP_NAMESPACE::ChromaLocation xChromaOffset = VULKAN_HPP_NAMESPACE::ChromaLocation::eCositedEven;
|
|
VULKAN_HPP_NAMESPACE::ChromaLocation yChromaOffset = VULKAN_HPP_NAMESPACE::ChromaLocation::eCositedEven;
|
|
VULKAN_HPP_NAMESPACE::Filter chromaFilter = VULKAN_HPP_NAMESPACE::Filter::eNearest;
|
|
VULKAN_HPP_NAMESPACE::Bool32 forceExplicitReconstruction = {};
|
|
};
|
|
|
|
template <>
|
|
struct CppType<StructureType, StructureType::eSamplerYcbcrConversionCreateInfo>
|
|
{
|
|
using Type = SamplerYcbcrConversionCreateInfo;
|
|
};
|
|
|
|
using SamplerYcbcrConversionCreateInfoKHR = SamplerYcbcrConversionCreateInfo;
|
|
|
|
struct SamplerYcbcrConversionImageFormatProperties
|
|
{
|
|
using NativeType = VkSamplerYcbcrConversionImageFormatProperties;
|
|
|
|
static const bool allowDuplicate = false;
|
|
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eSamplerYcbcrConversionImageFormatProperties;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR SamplerYcbcrConversionImageFormatProperties( uint32_t combinedImageSamplerDescriptorCount_ = {},
|
|
void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
|
|
: pNext( pNext_ )
|
|
, combinedImageSamplerDescriptorCount( combinedImageSamplerDescriptorCount_ )
|
|
{
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR SamplerYcbcrConversionImageFormatProperties( SamplerYcbcrConversionImageFormatProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
SamplerYcbcrConversionImageFormatProperties( VkSamplerYcbcrConversionImageFormatProperties const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: SamplerYcbcrConversionImageFormatProperties( *reinterpret_cast<SamplerYcbcrConversionImageFormatProperties const *>( &rhs ) )
|
|
{
|
|
}
|
|
|
|
SamplerYcbcrConversionImageFormatProperties & operator=( SamplerYcbcrConversionImageFormatProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
SamplerYcbcrConversionImageFormatProperties & operator=( VkSamplerYcbcrConversionImageFormatProperties const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::SamplerYcbcrConversionImageFormatProperties const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
operator VkSamplerYcbcrConversionImageFormatProperties const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkSamplerYcbcrConversionImageFormatProperties *>( this );
|
|
}
|
|
|
|
operator VkSamplerYcbcrConversionImageFormatProperties &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkSamplerYcbcrConversionImageFormatProperties *>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
# if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
# else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, uint32_t const &>
|
|
# endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( sType, pNext, combinedImageSamplerDescriptorCount );
|
|
}
|
|
#endif
|
|
|
|
#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
|
|
auto operator<=>( SamplerYcbcrConversionImageFormatProperties const & ) const = default;
|
|
#else
|
|
bool operator==( SamplerYcbcrConversionImageFormatProperties const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
# if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
# else
|
|
return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( combinedImageSamplerDescriptorCount == rhs.combinedImageSamplerDescriptorCount );
|
|
# endif
|
|
}
|
|
|
|
bool operator!=( SamplerYcbcrConversionImageFormatProperties const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eSamplerYcbcrConversionImageFormatProperties;
|
|
void * pNext = {};
|
|
uint32_t combinedImageSamplerDescriptorCount = {};
|
|
};
|
|
|
|
template <>
|
|
struct CppType<StructureType, StructureType::eSamplerYcbcrConversionImageFormatProperties>
|
|
{
|
|
using Type = SamplerYcbcrConversionImageFormatProperties;
|
|
};
|
|
|
|
using SamplerYcbcrConversionImageFormatPropertiesKHR = SamplerYcbcrConversionImageFormatProperties;
|
|
|
|
struct SamplerYcbcrConversionInfo
|
|
{
|
|
using NativeType = VkSamplerYcbcrConversionInfo;
|
|
|
|
static const bool allowDuplicate = false;
|
|
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eSamplerYcbcrConversionInfo;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR SamplerYcbcrConversionInfo( VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion conversion_ = {},
|
|
const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
|
|
: pNext( pNext_ )
|
|
, conversion( conversion_ )
|
|
{
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR SamplerYcbcrConversionInfo( SamplerYcbcrConversionInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
SamplerYcbcrConversionInfo( VkSamplerYcbcrConversionInfo const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: SamplerYcbcrConversionInfo( *reinterpret_cast<SamplerYcbcrConversionInfo const *>( &rhs ) )
|
|
{
|
|
}
|
|
|
|
SamplerYcbcrConversionInfo & operator=( SamplerYcbcrConversionInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
SamplerYcbcrConversionInfo & operator=( VkSamplerYcbcrConversionInfo const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::SamplerYcbcrConversionInfo const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
|
|
VULKAN_HPP_CONSTEXPR_14 SamplerYcbcrConversionInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pNext = pNext_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 SamplerYcbcrConversionInfo & setConversion( VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion conversion_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
conversion = conversion_;
|
|
return *this;
|
|
}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
|
|
|
|
operator VkSamplerYcbcrConversionInfo const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkSamplerYcbcrConversionInfo *>( this );
|
|
}
|
|
|
|
operator VkSamplerYcbcrConversionInfo &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkSamplerYcbcrConversionInfo *>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
# if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
# else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion const &>
|
|
# endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( sType, pNext, conversion );
|
|
}
|
|
#endif
|
|
|
|
#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
|
|
auto operator<=>( SamplerYcbcrConversionInfo const & ) const = default;
|
|
#else
|
|
bool operator==( SamplerYcbcrConversionInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
# if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
# else
|
|
return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( conversion == rhs.conversion );
|
|
# endif
|
|
}
|
|
|
|
bool operator!=( SamplerYcbcrConversionInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eSamplerYcbcrConversionInfo;
|
|
const void * pNext = {};
|
|
VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion conversion = {};
|
|
};
|
|
|
|
template <>
|
|
struct CppType<StructureType, StructureType::eSamplerYcbcrConversionInfo>
|
|
{
|
|
using Type = SamplerYcbcrConversionInfo;
|
|
};
|
|
|
|
using SamplerYcbcrConversionInfoKHR = SamplerYcbcrConversionInfo;
|
|
|
|
#if defined( VK_USE_PLATFORM_SCI )
|
|
struct SciSyncAttributesInfoNV
|
|
{
|
|
using NativeType = VkSciSyncAttributesInfoNV;
|
|
|
|
static const bool allowDuplicate = false;
|
|
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eSciSyncAttributesInfoNV;
|
|
|
|
# if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR
|
|
SciSyncAttributesInfoNV( VULKAN_HPP_NAMESPACE::SciSyncClientTypeNV clientType_ = VULKAN_HPP_NAMESPACE::SciSyncClientTypeNV::eSignaler,
|
|
VULKAN_HPP_NAMESPACE::SciSyncPrimitiveTypeNV primitiveType_ = VULKAN_HPP_NAMESPACE::SciSyncPrimitiveTypeNV::eFence,
|
|
const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
|
|
: pNext( pNext_ )
|
|
, clientType( clientType_ )
|
|
, primitiveType( primitiveType_ )
|
|
{
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR SciSyncAttributesInfoNV( SciSyncAttributesInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
SciSyncAttributesInfoNV( VkSciSyncAttributesInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: SciSyncAttributesInfoNV( *reinterpret_cast<SciSyncAttributesInfoNV const *>( &rhs ) )
|
|
{
|
|
}
|
|
|
|
SciSyncAttributesInfoNV & operator=( SciSyncAttributesInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
# endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
SciSyncAttributesInfoNV & operator=( VkSciSyncAttributesInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::SciSyncAttributesInfoNV const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
# if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
|
|
VULKAN_HPP_CONSTEXPR_14 SciSyncAttributesInfoNV & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pNext = pNext_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 SciSyncAttributesInfoNV & setClientType( VULKAN_HPP_NAMESPACE::SciSyncClientTypeNV clientType_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
clientType = clientType_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 SciSyncAttributesInfoNV & setPrimitiveType( VULKAN_HPP_NAMESPACE::SciSyncPrimitiveTypeNV primitiveType_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
primitiveType = primitiveType_;
|
|
return *this;
|
|
}
|
|
# endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
|
|
|
|
operator VkSciSyncAttributesInfoNV const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkSciSyncAttributesInfoNV *>( this );
|
|
}
|
|
|
|
operator VkSciSyncAttributesInfoNV &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkSciSyncAttributesInfoNV *>( this );
|
|
}
|
|
|
|
# if defined( VULKAN_HPP_USE_REFLECT )
|
|
# if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
# else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &,
|
|
const void * const &,
|
|
VULKAN_HPP_NAMESPACE::SciSyncClientTypeNV const &,
|
|
VULKAN_HPP_NAMESPACE::SciSyncPrimitiveTypeNV const &>
|
|
# endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( sType, pNext, clientType, primitiveType );
|
|
}
|
|
# endif
|
|
|
|
# if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
|
|
auto operator<=>( SciSyncAttributesInfoNV const & ) const = default;
|
|
# else
|
|
bool operator==( SciSyncAttributesInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
# if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
# else
|
|
return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( clientType == rhs.clientType ) && ( primitiveType == rhs.primitiveType );
|
|
# endif
|
|
}
|
|
|
|
bool operator!=( SciSyncAttributesInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
# endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eSciSyncAttributesInfoNV;
|
|
const void * pNext = {};
|
|
VULKAN_HPP_NAMESPACE::SciSyncClientTypeNV clientType = VULKAN_HPP_NAMESPACE::SciSyncClientTypeNV::eSignaler;
|
|
VULKAN_HPP_NAMESPACE::SciSyncPrimitiveTypeNV primitiveType = VULKAN_HPP_NAMESPACE::SciSyncPrimitiveTypeNV::eFence;
|
|
};
|
|
|
|
template <>
|
|
struct CppType<StructureType, StructureType::eSciSyncAttributesInfoNV>
|
|
{
|
|
using Type = SciSyncAttributesInfoNV;
|
|
};
|
|
#endif /*VK_USE_PLATFORM_SCI*/
|
|
|
|
#if defined( VK_USE_PLATFORM_SCREEN_QNX )
|
|
struct ScreenBufferFormatPropertiesQNX
|
|
{
|
|
using NativeType = VkScreenBufferFormatPropertiesQNX;
|
|
|
|
static const bool allowDuplicate = false;
|
|
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eScreenBufferFormatPropertiesQNX;
|
|
|
|
# if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR ScreenBufferFormatPropertiesQNX(
|
|
VULKAN_HPP_NAMESPACE::Format format_ = VULKAN_HPP_NAMESPACE::Format::eUndefined,
|
|
uint64_t externalFormat_ = {},
|
|
uint64_t screenUsage_ = {},
|
|
VULKAN_HPP_NAMESPACE::FormatFeatureFlags formatFeatures_ = {},
|
|
VULKAN_HPP_NAMESPACE::ComponentMapping samplerYcbcrConversionComponents_ = {},
|
|
VULKAN_HPP_NAMESPACE::SamplerYcbcrModelConversion suggestedYcbcrModel_ = VULKAN_HPP_NAMESPACE::SamplerYcbcrModelConversion::eRgbIdentity,
|
|
VULKAN_HPP_NAMESPACE::SamplerYcbcrRange suggestedYcbcrRange_ = VULKAN_HPP_NAMESPACE::SamplerYcbcrRange::eItuFull,
|
|
VULKAN_HPP_NAMESPACE::ChromaLocation suggestedXChromaOffset_ = VULKAN_HPP_NAMESPACE::ChromaLocation::eCositedEven,
|
|
VULKAN_HPP_NAMESPACE::ChromaLocation suggestedYChromaOffset_ = VULKAN_HPP_NAMESPACE::ChromaLocation::eCositedEven,
|
|
void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
|
|
: pNext( pNext_ )
|
|
, format( format_ )
|
|
, externalFormat( externalFormat_ )
|
|
, screenUsage( screenUsage_ )
|
|
, formatFeatures( formatFeatures_ )
|
|
, samplerYcbcrConversionComponents( samplerYcbcrConversionComponents_ )
|
|
, suggestedYcbcrModel( suggestedYcbcrModel_ )
|
|
, suggestedYcbcrRange( suggestedYcbcrRange_ )
|
|
, suggestedXChromaOffset( suggestedXChromaOffset_ )
|
|
, suggestedYChromaOffset( suggestedYChromaOffset_ )
|
|
{
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR ScreenBufferFormatPropertiesQNX( ScreenBufferFormatPropertiesQNX const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
ScreenBufferFormatPropertiesQNX( VkScreenBufferFormatPropertiesQNX const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: ScreenBufferFormatPropertiesQNX( *reinterpret_cast<ScreenBufferFormatPropertiesQNX const *>( &rhs ) )
|
|
{
|
|
}
|
|
|
|
ScreenBufferFormatPropertiesQNX & operator=( ScreenBufferFormatPropertiesQNX const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
# endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
ScreenBufferFormatPropertiesQNX & operator=( VkScreenBufferFormatPropertiesQNX const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ScreenBufferFormatPropertiesQNX const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
operator VkScreenBufferFormatPropertiesQNX const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkScreenBufferFormatPropertiesQNX *>( this );
|
|
}
|
|
|
|
operator VkScreenBufferFormatPropertiesQNX &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkScreenBufferFormatPropertiesQNX *>( this );
|
|
}
|
|
|
|
# if defined( VULKAN_HPP_USE_REFLECT )
|
|
# if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
# else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &,
|
|
void * const &,
|
|
VULKAN_HPP_NAMESPACE::Format const &,
|
|
uint64_t const &,
|
|
uint64_t const &,
|
|
VULKAN_HPP_NAMESPACE::FormatFeatureFlags const &,
|
|
VULKAN_HPP_NAMESPACE::ComponentMapping const &,
|
|
VULKAN_HPP_NAMESPACE::SamplerYcbcrModelConversion const &,
|
|
VULKAN_HPP_NAMESPACE::SamplerYcbcrRange const &,
|
|
VULKAN_HPP_NAMESPACE::ChromaLocation const &,
|
|
VULKAN_HPP_NAMESPACE::ChromaLocation const &>
|
|
# endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( sType,
|
|
pNext,
|
|
format,
|
|
externalFormat,
|
|
screenUsage,
|
|
formatFeatures,
|
|
samplerYcbcrConversionComponents,
|
|
suggestedYcbcrModel,
|
|
suggestedYcbcrRange,
|
|
suggestedXChromaOffset,
|
|
suggestedYChromaOffset );
|
|
}
|
|
# endif
|
|
|
|
# if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
|
|
auto operator<=>( ScreenBufferFormatPropertiesQNX const & ) const = default;
|
|
# else
|
|
bool operator==( ScreenBufferFormatPropertiesQNX const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
# if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
# else
|
|
return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( format == rhs.format ) && ( externalFormat == rhs.externalFormat ) &&
|
|
( screenUsage == rhs.screenUsage ) && ( formatFeatures == rhs.formatFeatures ) &&
|
|
( samplerYcbcrConversionComponents == rhs.samplerYcbcrConversionComponents ) && ( suggestedYcbcrModel == rhs.suggestedYcbcrModel ) &&
|
|
( suggestedYcbcrRange == rhs.suggestedYcbcrRange ) && ( suggestedXChromaOffset == rhs.suggestedXChromaOffset ) &&
|
|
( suggestedYChromaOffset == rhs.suggestedYChromaOffset );
|
|
# endif
|
|
}
|
|
|
|
bool operator!=( ScreenBufferFormatPropertiesQNX const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
# endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eScreenBufferFormatPropertiesQNX;
|
|
void * pNext = {};
|
|
VULKAN_HPP_NAMESPACE::Format format = VULKAN_HPP_NAMESPACE::Format::eUndefined;
|
|
uint64_t externalFormat = {};
|
|
uint64_t screenUsage = {};
|
|
VULKAN_HPP_NAMESPACE::FormatFeatureFlags formatFeatures = {};
|
|
VULKAN_HPP_NAMESPACE::ComponentMapping samplerYcbcrConversionComponents = {};
|
|
VULKAN_HPP_NAMESPACE::SamplerYcbcrModelConversion suggestedYcbcrModel = VULKAN_HPP_NAMESPACE::SamplerYcbcrModelConversion::eRgbIdentity;
|
|
VULKAN_HPP_NAMESPACE::SamplerYcbcrRange suggestedYcbcrRange = VULKAN_HPP_NAMESPACE::SamplerYcbcrRange::eItuFull;
|
|
VULKAN_HPP_NAMESPACE::ChromaLocation suggestedXChromaOffset = VULKAN_HPP_NAMESPACE::ChromaLocation::eCositedEven;
|
|
VULKAN_HPP_NAMESPACE::ChromaLocation suggestedYChromaOffset = VULKAN_HPP_NAMESPACE::ChromaLocation::eCositedEven;
|
|
};
|
|
|
|
template <>
|
|
struct CppType<StructureType, StructureType::eScreenBufferFormatPropertiesQNX>
|
|
{
|
|
using Type = ScreenBufferFormatPropertiesQNX;
|
|
};
|
|
#endif /*VK_USE_PLATFORM_SCREEN_QNX*/
|
|
|
|
#if defined( VK_USE_PLATFORM_SCREEN_QNX )
|
|
struct ScreenBufferPropertiesQNX
|
|
{
|
|
using NativeType = VkScreenBufferPropertiesQNX;
|
|
|
|
static const bool allowDuplicate = false;
|
|
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eScreenBufferPropertiesQNX;
|
|
|
|
# if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR ScreenBufferPropertiesQNX( VULKAN_HPP_NAMESPACE::DeviceSize allocationSize_ = {},
|
|
uint32_t memoryTypeBits_ = {},
|
|
void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
|
|
: pNext( pNext_ )
|
|
, allocationSize( allocationSize_ )
|
|
, memoryTypeBits( memoryTypeBits_ )
|
|
{
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR ScreenBufferPropertiesQNX( ScreenBufferPropertiesQNX const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
ScreenBufferPropertiesQNX( VkScreenBufferPropertiesQNX const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: ScreenBufferPropertiesQNX( *reinterpret_cast<ScreenBufferPropertiesQNX const *>( &rhs ) )
|
|
{
|
|
}
|
|
|
|
ScreenBufferPropertiesQNX & operator=( ScreenBufferPropertiesQNX const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
# endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
ScreenBufferPropertiesQNX & operator=( VkScreenBufferPropertiesQNX const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ScreenBufferPropertiesQNX const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
operator VkScreenBufferPropertiesQNX const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkScreenBufferPropertiesQNX *>( this );
|
|
}
|
|
|
|
operator VkScreenBufferPropertiesQNX &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkScreenBufferPropertiesQNX *>( this );
|
|
}
|
|
|
|
# if defined( VULKAN_HPP_USE_REFLECT )
|
|
# if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
# else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::DeviceSize const &, uint32_t const &>
|
|
# endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( sType, pNext, allocationSize, memoryTypeBits );
|
|
}
|
|
# endif
|
|
|
|
# if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
|
|
auto operator<=>( ScreenBufferPropertiesQNX const & ) const = default;
|
|
# else
|
|
bool operator==( ScreenBufferPropertiesQNX const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
# if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
# else
|
|
return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( allocationSize == rhs.allocationSize ) && ( memoryTypeBits == rhs.memoryTypeBits );
|
|
# endif
|
|
}
|
|
|
|
bool operator!=( ScreenBufferPropertiesQNX const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
# endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eScreenBufferPropertiesQNX;
|
|
void * pNext = {};
|
|
VULKAN_HPP_NAMESPACE::DeviceSize allocationSize = {};
|
|
uint32_t memoryTypeBits = {};
|
|
};
|
|
|
|
template <>
|
|
struct CppType<StructureType, StructureType::eScreenBufferPropertiesQNX>
|
|
{
|
|
using Type = ScreenBufferPropertiesQNX;
|
|
};
|
|
#endif /*VK_USE_PLATFORM_SCREEN_QNX*/
|
|
|
|
struct SemaphoreCreateInfo
|
|
{
|
|
using NativeType = VkSemaphoreCreateInfo;
|
|
|
|
static const bool allowDuplicate = false;
|
|
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eSemaphoreCreateInfo;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR SemaphoreCreateInfo( VULKAN_HPP_NAMESPACE::SemaphoreCreateFlags flags_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
|
|
: pNext( pNext_ )
|
|
, flags( flags_ )
|
|
{
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR SemaphoreCreateInfo( SemaphoreCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
SemaphoreCreateInfo( VkSemaphoreCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT : SemaphoreCreateInfo( *reinterpret_cast<SemaphoreCreateInfo const *>( &rhs ) )
|
|
{
|
|
}
|
|
|
|
SemaphoreCreateInfo & operator=( SemaphoreCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
SemaphoreCreateInfo & operator=( VkSemaphoreCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::SemaphoreCreateInfo const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
|
|
VULKAN_HPP_CONSTEXPR_14 SemaphoreCreateInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pNext = pNext_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 SemaphoreCreateInfo & setFlags( VULKAN_HPP_NAMESPACE::SemaphoreCreateFlags flags_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
flags = flags_;
|
|
return *this;
|
|
}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
|
|
|
|
operator VkSemaphoreCreateInfo const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkSemaphoreCreateInfo *>( this );
|
|
}
|
|
|
|
operator VkSemaphoreCreateInfo &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkSemaphoreCreateInfo *>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
# if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
# else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::SemaphoreCreateFlags const &>
|
|
# endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( sType, pNext, flags );
|
|
}
|
|
#endif
|
|
|
|
#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
|
|
auto operator<=>( SemaphoreCreateInfo const & ) const = default;
|
|
#else
|
|
bool operator==( SemaphoreCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
# if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
# else
|
|
return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags );
|
|
# endif
|
|
}
|
|
|
|
bool operator!=( SemaphoreCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eSemaphoreCreateInfo;
|
|
const void * pNext = {};
|
|
VULKAN_HPP_NAMESPACE::SemaphoreCreateFlags flags = {};
|
|
};
|
|
|
|
template <>
|
|
struct CppType<StructureType, StructureType::eSemaphoreCreateInfo>
|
|
{
|
|
using Type = SemaphoreCreateInfo;
|
|
};
|
|
|
|
struct SemaphoreGetFdInfoKHR
|
|
{
|
|
using NativeType = VkSemaphoreGetFdInfoKHR;
|
|
|
|
static const bool allowDuplicate = false;
|
|
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eSemaphoreGetFdInfoKHR;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR SemaphoreGetFdInfoKHR(
|
|
VULKAN_HPP_NAMESPACE::Semaphore semaphore_ = {},
|
|
VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlagBits handleType_ = VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlagBits::eOpaqueFd,
|
|
const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
|
|
: pNext( pNext_ )
|
|
, semaphore( semaphore_ )
|
|
, handleType( handleType_ )
|
|
{
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR SemaphoreGetFdInfoKHR( SemaphoreGetFdInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
SemaphoreGetFdInfoKHR( VkSemaphoreGetFdInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: SemaphoreGetFdInfoKHR( *reinterpret_cast<SemaphoreGetFdInfoKHR const *>( &rhs ) )
|
|
{
|
|
}
|
|
|
|
SemaphoreGetFdInfoKHR & operator=( SemaphoreGetFdInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
SemaphoreGetFdInfoKHR & operator=( VkSemaphoreGetFdInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::SemaphoreGetFdInfoKHR const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
|
|
VULKAN_HPP_CONSTEXPR_14 SemaphoreGetFdInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pNext = pNext_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 SemaphoreGetFdInfoKHR & setSemaphore( VULKAN_HPP_NAMESPACE::Semaphore semaphore_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
semaphore = semaphore_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 SemaphoreGetFdInfoKHR & setHandleType( VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlagBits handleType_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
handleType = handleType_;
|
|
return *this;
|
|
}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
|
|
|
|
operator VkSemaphoreGetFdInfoKHR const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkSemaphoreGetFdInfoKHR *>( this );
|
|
}
|
|
|
|
operator VkSemaphoreGetFdInfoKHR &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkSemaphoreGetFdInfoKHR *>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
# if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
# else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &,
|
|
const void * const &,
|
|
VULKAN_HPP_NAMESPACE::Semaphore const &,
|
|
VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlagBits const &>
|
|
# endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( sType, pNext, semaphore, handleType );
|
|
}
|
|
#endif
|
|
|
|
#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
|
|
auto operator<=>( SemaphoreGetFdInfoKHR const & ) const = default;
|
|
#else
|
|
bool operator==( SemaphoreGetFdInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
# if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
# else
|
|
return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( semaphore == rhs.semaphore ) && ( handleType == rhs.handleType );
|
|
# endif
|
|
}
|
|
|
|
bool operator!=( SemaphoreGetFdInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eSemaphoreGetFdInfoKHR;
|
|
const void * pNext = {};
|
|
VULKAN_HPP_NAMESPACE::Semaphore semaphore = {};
|
|
VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlagBits handleType = VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlagBits::eOpaqueFd;
|
|
};
|
|
|
|
template <>
|
|
struct CppType<StructureType, StructureType::eSemaphoreGetFdInfoKHR>
|
|
{
|
|
using Type = SemaphoreGetFdInfoKHR;
|
|
};
|
|
|
|
#if defined( VK_USE_PLATFORM_SCI )
|
|
struct SemaphoreGetSciSyncInfoNV
|
|
{
|
|
using NativeType = VkSemaphoreGetSciSyncInfoNV;
|
|
|
|
static const bool allowDuplicate = false;
|
|
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eSemaphoreGetSciSyncInfoNV;
|
|
|
|
# if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR SemaphoreGetSciSyncInfoNV(
|
|
VULKAN_HPP_NAMESPACE::Semaphore semaphore_ = {},
|
|
VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlagBits handleType_ = VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlagBits::eOpaqueFd,
|
|
const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
|
|
: pNext( pNext_ )
|
|
, semaphore( semaphore_ )
|
|
, handleType( handleType_ )
|
|
{
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR SemaphoreGetSciSyncInfoNV( SemaphoreGetSciSyncInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
SemaphoreGetSciSyncInfoNV( VkSemaphoreGetSciSyncInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: SemaphoreGetSciSyncInfoNV( *reinterpret_cast<SemaphoreGetSciSyncInfoNV const *>( &rhs ) )
|
|
{
|
|
}
|
|
|
|
SemaphoreGetSciSyncInfoNV & operator=( SemaphoreGetSciSyncInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
# endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
SemaphoreGetSciSyncInfoNV & operator=( VkSemaphoreGetSciSyncInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::SemaphoreGetSciSyncInfoNV const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
# if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
|
|
VULKAN_HPP_CONSTEXPR_14 SemaphoreGetSciSyncInfoNV & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pNext = pNext_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 SemaphoreGetSciSyncInfoNV & setSemaphore( VULKAN_HPP_NAMESPACE::Semaphore semaphore_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
semaphore = semaphore_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 SemaphoreGetSciSyncInfoNV &
|
|
setHandleType( VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlagBits handleType_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
handleType = handleType_;
|
|
return *this;
|
|
}
|
|
# endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
|
|
|
|
operator VkSemaphoreGetSciSyncInfoNV const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkSemaphoreGetSciSyncInfoNV *>( this );
|
|
}
|
|
|
|
operator VkSemaphoreGetSciSyncInfoNV &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkSemaphoreGetSciSyncInfoNV *>( this );
|
|
}
|
|
|
|
# if defined( VULKAN_HPP_USE_REFLECT )
|
|
# if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
# else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &,
|
|
const void * const &,
|
|
VULKAN_HPP_NAMESPACE::Semaphore const &,
|
|
VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlagBits const &>
|
|
# endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( sType, pNext, semaphore, handleType );
|
|
}
|
|
# endif
|
|
|
|
# if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
|
|
auto operator<=>( SemaphoreGetSciSyncInfoNV const & ) const = default;
|
|
# else
|
|
bool operator==( SemaphoreGetSciSyncInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
# if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
# else
|
|
return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( semaphore == rhs.semaphore ) && ( handleType == rhs.handleType );
|
|
# endif
|
|
}
|
|
|
|
bool operator!=( SemaphoreGetSciSyncInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
# endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eSemaphoreGetSciSyncInfoNV;
|
|
const void * pNext = {};
|
|
VULKAN_HPP_NAMESPACE::Semaphore semaphore = {};
|
|
VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlagBits handleType = VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlagBits::eOpaqueFd;
|
|
};
|
|
|
|
template <>
|
|
struct CppType<StructureType, StructureType::eSemaphoreGetSciSyncInfoNV>
|
|
{
|
|
using Type = SemaphoreGetSciSyncInfoNV;
|
|
};
|
|
#endif /*VK_USE_PLATFORM_SCI*/
|
|
|
|
#if defined( VK_USE_PLATFORM_SCI )
|
|
struct SemaphoreSciSyncCreateInfoNV
|
|
{
|
|
using NativeType = VkSemaphoreSciSyncCreateInfoNV;
|
|
|
|
static const bool allowDuplicate = false;
|
|
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eSemaphoreSciSyncCreateInfoNV;
|
|
|
|
# if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR SemaphoreSciSyncCreateInfoNV( VULKAN_HPP_NAMESPACE::SemaphoreSciSyncPoolNV semaphorePool_ = {},
|
|
const NvSciSyncFence * pFence_ = {},
|
|
const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
|
|
: pNext( pNext_ )
|
|
, semaphorePool( semaphorePool_ )
|
|
, pFence( pFence_ )
|
|
{
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR SemaphoreSciSyncCreateInfoNV( SemaphoreSciSyncCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
SemaphoreSciSyncCreateInfoNV( VkSemaphoreSciSyncCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: SemaphoreSciSyncCreateInfoNV( *reinterpret_cast<SemaphoreSciSyncCreateInfoNV const *>( &rhs ) )
|
|
{
|
|
}
|
|
|
|
SemaphoreSciSyncCreateInfoNV & operator=( SemaphoreSciSyncCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
# endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
SemaphoreSciSyncCreateInfoNV & operator=( VkSemaphoreSciSyncCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::SemaphoreSciSyncCreateInfoNV const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
# if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
|
|
VULKAN_HPP_CONSTEXPR_14 SemaphoreSciSyncCreateInfoNV & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pNext = pNext_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 SemaphoreSciSyncCreateInfoNV & setSemaphorePool( VULKAN_HPP_NAMESPACE::SemaphoreSciSyncPoolNV semaphorePool_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
semaphorePool = semaphorePool_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 SemaphoreSciSyncCreateInfoNV & setPFence( const NvSciSyncFence * pFence_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pFence = pFence_;
|
|
return *this;
|
|
}
|
|
# endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
|
|
|
|
operator VkSemaphoreSciSyncCreateInfoNV const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkSemaphoreSciSyncCreateInfoNV *>( this );
|
|
}
|
|
|
|
operator VkSemaphoreSciSyncCreateInfoNV &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkSemaphoreSciSyncCreateInfoNV *>( this );
|
|
}
|
|
|
|
# if defined( VULKAN_HPP_USE_REFLECT )
|
|
# if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
# else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &,
|
|
const void * const &,
|
|
VULKAN_HPP_NAMESPACE::SemaphoreSciSyncPoolNV const &,
|
|
const NvSciSyncFence * const &>
|
|
# endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( sType, pNext, semaphorePool, pFence );
|
|
}
|
|
# endif
|
|
|
|
# if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
|
|
auto operator<=>( SemaphoreSciSyncCreateInfoNV const & ) const = default;
|
|
# else
|
|
bool operator==( SemaphoreSciSyncCreateInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
# if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
# else
|
|
return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( semaphorePool == rhs.semaphorePool ) && ( pFence == rhs.pFence );
|
|
# endif
|
|
}
|
|
|
|
bool operator!=( SemaphoreSciSyncCreateInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
# endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eSemaphoreSciSyncCreateInfoNV;
|
|
const void * pNext = {};
|
|
VULKAN_HPP_NAMESPACE::SemaphoreSciSyncPoolNV semaphorePool = {};
|
|
const NvSciSyncFence * pFence = {};
|
|
};
|
|
|
|
template <>
|
|
struct CppType<StructureType, StructureType::eSemaphoreSciSyncCreateInfoNV>
|
|
{
|
|
using Type = SemaphoreSciSyncCreateInfoNV;
|
|
};
|
|
#endif /*VK_USE_PLATFORM_SCI*/
|
|
|
|
#if defined( VK_USE_PLATFORM_SCI )
|
|
struct SemaphoreSciSyncPoolCreateInfoNV
|
|
{
|
|
using NativeType = VkSemaphoreSciSyncPoolCreateInfoNV;
|
|
|
|
static const bool allowDuplicate = false;
|
|
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eSemaphoreSciSyncPoolCreateInfoNV;
|
|
|
|
# if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR SemaphoreSciSyncPoolCreateInfoNV( NvSciSyncObj handle_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
|
|
: pNext( pNext_ )
|
|
, handle( handle_ )
|
|
{
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR SemaphoreSciSyncPoolCreateInfoNV( SemaphoreSciSyncPoolCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
SemaphoreSciSyncPoolCreateInfoNV( VkSemaphoreSciSyncPoolCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: SemaphoreSciSyncPoolCreateInfoNV( *reinterpret_cast<SemaphoreSciSyncPoolCreateInfoNV const *>( &rhs ) )
|
|
{
|
|
}
|
|
|
|
SemaphoreSciSyncPoolCreateInfoNV & operator=( SemaphoreSciSyncPoolCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
# endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
SemaphoreSciSyncPoolCreateInfoNV & operator=( VkSemaphoreSciSyncPoolCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::SemaphoreSciSyncPoolCreateInfoNV const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
# if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
|
|
VULKAN_HPP_CONSTEXPR_14 SemaphoreSciSyncPoolCreateInfoNV & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pNext = pNext_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 SemaphoreSciSyncPoolCreateInfoNV & setHandle( NvSciSyncObj handle_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
handle = handle_;
|
|
return *this;
|
|
}
|
|
# endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
|
|
|
|
operator VkSemaphoreSciSyncPoolCreateInfoNV const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkSemaphoreSciSyncPoolCreateInfoNV *>( this );
|
|
}
|
|
|
|
operator VkSemaphoreSciSyncPoolCreateInfoNV &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkSemaphoreSciSyncPoolCreateInfoNV *>( this );
|
|
}
|
|
|
|
# if defined( VULKAN_HPP_USE_REFLECT )
|
|
# if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
# else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, NvSciSyncObj const &>
|
|
# endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( sType, pNext, handle );
|
|
}
|
|
# endif
|
|
|
|
# if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
|
|
std::strong_ordering operator<=>( SemaphoreSciSyncPoolCreateInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
if ( auto cmp = sType <=> rhs.sType; cmp != 0 )
|
|
return cmp;
|
|
if ( auto cmp = pNext <=> rhs.pNext; cmp != 0 )
|
|
return cmp;
|
|
if ( auto cmp = memcmp( &handle, &rhs.handle, sizeof( NvSciSyncObj ) ); cmp != 0 )
|
|
return ( cmp < 0 ) ? std::strong_ordering::less : std::strong_ordering::greater;
|
|
|
|
return std::strong_ordering::equivalent;
|
|
}
|
|
# endif
|
|
|
|
bool operator==( SemaphoreSciSyncPoolCreateInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( memcmp( &handle, &rhs.handle, sizeof( NvSciSyncObj ) ) == 0 );
|
|
}
|
|
|
|
bool operator!=( SemaphoreSciSyncPoolCreateInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eSemaphoreSciSyncPoolCreateInfoNV;
|
|
const void * pNext = {};
|
|
NvSciSyncObj handle = {};
|
|
};
|
|
|
|
template <>
|
|
struct CppType<StructureType, StructureType::eSemaphoreSciSyncPoolCreateInfoNV>
|
|
{
|
|
using Type = SemaphoreSciSyncPoolCreateInfoNV;
|
|
};
|
|
#endif /*VK_USE_PLATFORM_SCI*/
|
|
|
|
struct SemaphoreSignalInfo
|
|
{
|
|
using NativeType = VkSemaphoreSignalInfo;
|
|
|
|
static const bool allowDuplicate = false;
|
|
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eSemaphoreSignalInfo;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR
|
|
SemaphoreSignalInfo( VULKAN_HPP_NAMESPACE::Semaphore semaphore_ = {}, uint64_t value_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
|
|
: pNext( pNext_ )
|
|
, semaphore( semaphore_ )
|
|
, value( value_ )
|
|
{
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR SemaphoreSignalInfo( SemaphoreSignalInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
SemaphoreSignalInfo( VkSemaphoreSignalInfo const & rhs ) VULKAN_HPP_NOEXCEPT : SemaphoreSignalInfo( *reinterpret_cast<SemaphoreSignalInfo const *>( &rhs ) )
|
|
{
|
|
}
|
|
|
|
SemaphoreSignalInfo & operator=( SemaphoreSignalInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
SemaphoreSignalInfo & operator=( VkSemaphoreSignalInfo const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::SemaphoreSignalInfo const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
|
|
VULKAN_HPP_CONSTEXPR_14 SemaphoreSignalInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pNext = pNext_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 SemaphoreSignalInfo & setSemaphore( VULKAN_HPP_NAMESPACE::Semaphore semaphore_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
semaphore = semaphore_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 SemaphoreSignalInfo & setValue( uint64_t value_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
value = value_;
|
|
return *this;
|
|
}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
|
|
|
|
operator VkSemaphoreSignalInfo const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkSemaphoreSignalInfo *>( this );
|
|
}
|
|
|
|
operator VkSemaphoreSignalInfo &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkSemaphoreSignalInfo *>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
# if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
# else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::Semaphore const &, uint64_t const &>
|
|
# endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( sType, pNext, semaphore, value );
|
|
}
|
|
#endif
|
|
|
|
#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
|
|
auto operator<=>( SemaphoreSignalInfo const & ) const = default;
|
|
#else
|
|
bool operator==( SemaphoreSignalInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
# if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
# else
|
|
return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( semaphore == rhs.semaphore ) && ( value == rhs.value );
|
|
# endif
|
|
}
|
|
|
|
bool operator!=( SemaphoreSignalInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eSemaphoreSignalInfo;
|
|
const void * pNext = {};
|
|
VULKAN_HPP_NAMESPACE::Semaphore semaphore = {};
|
|
uint64_t value = {};
|
|
};
|
|
|
|
template <>
|
|
struct CppType<StructureType, StructureType::eSemaphoreSignalInfo>
|
|
{
|
|
using Type = SemaphoreSignalInfo;
|
|
};
|
|
|
|
using SemaphoreSignalInfoKHR = SemaphoreSignalInfo;
|
|
|
|
struct SemaphoreSubmitInfo
|
|
{
|
|
using NativeType = VkSemaphoreSubmitInfo;
|
|
|
|
static const bool allowDuplicate = false;
|
|
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eSemaphoreSubmitInfo;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR SemaphoreSubmitInfo( VULKAN_HPP_NAMESPACE::Semaphore semaphore_ = {},
|
|
uint64_t value_ = {},
|
|
VULKAN_HPP_NAMESPACE::PipelineStageFlags2 stageMask_ = {},
|
|
uint32_t deviceIndex_ = {},
|
|
const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
|
|
: pNext( pNext_ )
|
|
, semaphore( semaphore_ )
|
|
, value( value_ )
|
|
, stageMask( stageMask_ )
|
|
, deviceIndex( deviceIndex_ )
|
|
{
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR SemaphoreSubmitInfo( SemaphoreSubmitInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
SemaphoreSubmitInfo( VkSemaphoreSubmitInfo const & rhs ) VULKAN_HPP_NOEXCEPT : SemaphoreSubmitInfo( *reinterpret_cast<SemaphoreSubmitInfo const *>( &rhs ) )
|
|
{
|
|
}
|
|
|
|
SemaphoreSubmitInfo & operator=( SemaphoreSubmitInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
SemaphoreSubmitInfo & operator=( VkSemaphoreSubmitInfo const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::SemaphoreSubmitInfo const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
|
|
VULKAN_HPP_CONSTEXPR_14 SemaphoreSubmitInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pNext = pNext_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 SemaphoreSubmitInfo & setSemaphore( VULKAN_HPP_NAMESPACE::Semaphore semaphore_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
semaphore = semaphore_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 SemaphoreSubmitInfo & setValue( uint64_t value_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
value = value_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 SemaphoreSubmitInfo & setStageMask( VULKAN_HPP_NAMESPACE::PipelineStageFlags2 stageMask_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
stageMask = stageMask_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 SemaphoreSubmitInfo & setDeviceIndex( uint32_t deviceIndex_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
deviceIndex = deviceIndex_;
|
|
return *this;
|
|
}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
|
|
|
|
operator VkSemaphoreSubmitInfo const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkSemaphoreSubmitInfo *>( this );
|
|
}
|
|
|
|
operator VkSemaphoreSubmitInfo &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkSemaphoreSubmitInfo *>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
# if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
# else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &,
|
|
const void * const &,
|
|
VULKAN_HPP_NAMESPACE::Semaphore const &,
|
|
uint64_t const &,
|
|
VULKAN_HPP_NAMESPACE::PipelineStageFlags2 const &,
|
|
uint32_t const &>
|
|
# endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( sType, pNext, semaphore, value, stageMask, deviceIndex );
|
|
}
|
|
#endif
|
|
|
|
#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
|
|
auto operator<=>( SemaphoreSubmitInfo const & ) const = default;
|
|
#else
|
|
bool operator==( SemaphoreSubmitInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
# if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
# else
|
|
return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( semaphore == rhs.semaphore ) && ( value == rhs.value ) && ( stageMask == rhs.stageMask ) &&
|
|
( deviceIndex == rhs.deviceIndex );
|
|
# endif
|
|
}
|
|
|
|
bool operator!=( SemaphoreSubmitInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eSemaphoreSubmitInfo;
|
|
const void * pNext = {};
|
|
VULKAN_HPP_NAMESPACE::Semaphore semaphore = {};
|
|
uint64_t value = {};
|
|
VULKAN_HPP_NAMESPACE::PipelineStageFlags2 stageMask = {};
|
|
uint32_t deviceIndex = {};
|
|
};
|
|
|
|
template <>
|
|
struct CppType<StructureType, StructureType::eSemaphoreSubmitInfo>
|
|
{
|
|
using Type = SemaphoreSubmitInfo;
|
|
};
|
|
|
|
using SemaphoreSubmitInfoKHR = SemaphoreSubmitInfo;
|
|
|
|
struct SemaphoreTypeCreateInfo
|
|
{
|
|
using NativeType = VkSemaphoreTypeCreateInfo;
|
|
|
|
static const bool allowDuplicate = false;
|
|
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eSemaphoreTypeCreateInfo;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR SemaphoreTypeCreateInfo( VULKAN_HPP_NAMESPACE::SemaphoreType semaphoreType_ = VULKAN_HPP_NAMESPACE::SemaphoreType::eBinary,
|
|
uint64_t initialValue_ = {},
|
|
const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
|
|
: pNext( pNext_ )
|
|
, semaphoreType( semaphoreType_ )
|
|
, initialValue( initialValue_ )
|
|
{
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR SemaphoreTypeCreateInfo( SemaphoreTypeCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
SemaphoreTypeCreateInfo( VkSemaphoreTypeCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: SemaphoreTypeCreateInfo( *reinterpret_cast<SemaphoreTypeCreateInfo const *>( &rhs ) )
|
|
{
|
|
}
|
|
|
|
SemaphoreTypeCreateInfo & operator=( SemaphoreTypeCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
SemaphoreTypeCreateInfo & operator=( VkSemaphoreTypeCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::SemaphoreTypeCreateInfo const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
|
|
VULKAN_HPP_CONSTEXPR_14 SemaphoreTypeCreateInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pNext = pNext_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 SemaphoreTypeCreateInfo & setSemaphoreType( VULKAN_HPP_NAMESPACE::SemaphoreType semaphoreType_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
semaphoreType = semaphoreType_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 SemaphoreTypeCreateInfo & setInitialValue( uint64_t initialValue_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
initialValue = initialValue_;
|
|
return *this;
|
|
}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
|
|
|
|
operator VkSemaphoreTypeCreateInfo const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkSemaphoreTypeCreateInfo *>( this );
|
|
}
|
|
|
|
operator VkSemaphoreTypeCreateInfo &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkSemaphoreTypeCreateInfo *>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
# if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
# else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::SemaphoreType const &, uint64_t const &>
|
|
# endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( sType, pNext, semaphoreType, initialValue );
|
|
}
|
|
#endif
|
|
|
|
#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
|
|
auto operator<=>( SemaphoreTypeCreateInfo const & ) const = default;
|
|
#else
|
|
bool operator==( SemaphoreTypeCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
# if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
# else
|
|
return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( semaphoreType == rhs.semaphoreType ) && ( initialValue == rhs.initialValue );
|
|
# endif
|
|
}
|
|
|
|
bool operator!=( SemaphoreTypeCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eSemaphoreTypeCreateInfo;
|
|
const void * pNext = {};
|
|
VULKAN_HPP_NAMESPACE::SemaphoreType semaphoreType = VULKAN_HPP_NAMESPACE::SemaphoreType::eBinary;
|
|
uint64_t initialValue = {};
|
|
};
|
|
|
|
template <>
|
|
struct CppType<StructureType, StructureType::eSemaphoreTypeCreateInfo>
|
|
{
|
|
using Type = SemaphoreTypeCreateInfo;
|
|
};
|
|
|
|
using SemaphoreTypeCreateInfoKHR = SemaphoreTypeCreateInfo;
|
|
|
|
struct SemaphoreWaitInfo
|
|
{
|
|
using NativeType = VkSemaphoreWaitInfo;
|
|
|
|
static const bool allowDuplicate = false;
|
|
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eSemaphoreWaitInfo;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR SemaphoreWaitInfo( VULKAN_HPP_NAMESPACE::SemaphoreWaitFlags flags_ = {},
|
|
uint32_t semaphoreCount_ = {},
|
|
const VULKAN_HPP_NAMESPACE::Semaphore * pSemaphores_ = {},
|
|
const uint64_t * pValues_ = {},
|
|
const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
|
|
: pNext( pNext_ )
|
|
, flags( flags_ )
|
|
, semaphoreCount( semaphoreCount_ )
|
|
, pSemaphores( pSemaphores_ )
|
|
, pValues( pValues_ )
|
|
{
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR SemaphoreWaitInfo( SemaphoreWaitInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
SemaphoreWaitInfo( VkSemaphoreWaitInfo const & rhs ) VULKAN_HPP_NOEXCEPT : SemaphoreWaitInfo( *reinterpret_cast<SemaphoreWaitInfo const *>( &rhs ) ) {}
|
|
|
|
# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
|
|
SemaphoreWaitInfo( VULKAN_HPP_NAMESPACE::SemaphoreWaitFlags flags_,
|
|
VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::Semaphore> const & semaphores_,
|
|
VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const uint64_t> const & values_ = {},
|
|
const void * pNext_ = nullptr )
|
|
: pNext( pNext_ )
|
|
, flags( flags_ )
|
|
, semaphoreCount( static_cast<uint32_t>( semaphores_.size() ) )
|
|
, pSemaphores( semaphores_.data() )
|
|
, pValues( values_.data() )
|
|
{
|
|
# ifdef VULKAN_HPP_NO_EXCEPTIONS
|
|
VULKAN_HPP_ASSERT( semaphores_.size() == values_.size() );
|
|
# else
|
|
if ( semaphores_.size() != values_.size() )
|
|
{
|
|
throw LogicError( VULKAN_HPP_NAMESPACE_STRING "::SemaphoreWaitInfo::SemaphoreWaitInfo: semaphores_.size() != values_.size()" );
|
|
}
|
|
# endif /*VULKAN_HPP_NO_EXCEPTIONS*/
|
|
}
|
|
# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
|
|
|
|
SemaphoreWaitInfo & operator=( SemaphoreWaitInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
SemaphoreWaitInfo & operator=( VkSemaphoreWaitInfo const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::SemaphoreWaitInfo const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
|
|
VULKAN_HPP_CONSTEXPR_14 SemaphoreWaitInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pNext = pNext_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 SemaphoreWaitInfo & setFlags( VULKAN_HPP_NAMESPACE::SemaphoreWaitFlags flags_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
flags = flags_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 SemaphoreWaitInfo & setSemaphoreCount( uint32_t semaphoreCount_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
semaphoreCount = semaphoreCount_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 SemaphoreWaitInfo & setPSemaphores( const VULKAN_HPP_NAMESPACE::Semaphore * pSemaphores_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pSemaphores = pSemaphores_;
|
|
return *this;
|
|
}
|
|
|
|
# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
|
|
SemaphoreWaitInfo &
|
|
setSemaphores( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::Semaphore> const & semaphores_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
semaphoreCount = static_cast<uint32_t>( semaphores_.size() );
|
|
pSemaphores = semaphores_.data();
|
|
return *this;
|
|
}
|
|
# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 SemaphoreWaitInfo & setPValues( const uint64_t * pValues_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pValues = pValues_;
|
|
return *this;
|
|
}
|
|
|
|
# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
|
|
SemaphoreWaitInfo & setValues( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const uint64_t> const & values_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
semaphoreCount = static_cast<uint32_t>( values_.size() );
|
|
pValues = values_.data();
|
|
return *this;
|
|
}
|
|
# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
|
|
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
|
|
|
|
operator VkSemaphoreWaitInfo const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkSemaphoreWaitInfo *>( this );
|
|
}
|
|
|
|
operator VkSemaphoreWaitInfo &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkSemaphoreWaitInfo *>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
# if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
# else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &,
|
|
const void * const &,
|
|
VULKAN_HPP_NAMESPACE::SemaphoreWaitFlags const &,
|
|
uint32_t const &,
|
|
const VULKAN_HPP_NAMESPACE::Semaphore * const &,
|
|
const uint64_t * const &>
|
|
# endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( sType, pNext, flags, semaphoreCount, pSemaphores, pValues );
|
|
}
|
|
#endif
|
|
|
|
#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
|
|
auto operator<=>( SemaphoreWaitInfo const & ) const = default;
|
|
#else
|
|
bool operator==( SemaphoreWaitInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
# if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
# else
|
|
return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && ( semaphoreCount == rhs.semaphoreCount ) &&
|
|
( pSemaphores == rhs.pSemaphores ) && ( pValues == rhs.pValues );
|
|
# endif
|
|
}
|
|
|
|
bool operator!=( SemaphoreWaitInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eSemaphoreWaitInfo;
|
|
const void * pNext = {};
|
|
VULKAN_HPP_NAMESPACE::SemaphoreWaitFlags flags = {};
|
|
uint32_t semaphoreCount = {};
|
|
const VULKAN_HPP_NAMESPACE::Semaphore * pSemaphores = {};
|
|
const uint64_t * pValues = {};
|
|
};
|
|
|
|
template <>
|
|
struct CppType<StructureType, StructureType::eSemaphoreWaitInfo>
|
|
{
|
|
using Type = SemaphoreWaitInfo;
|
|
};
|
|
|
|
using SemaphoreWaitInfoKHR = SemaphoreWaitInfo;
|
|
|
|
struct SharedPresentSurfaceCapabilitiesKHR
|
|
{
|
|
using NativeType = VkSharedPresentSurfaceCapabilitiesKHR;
|
|
|
|
static const bool allowDuplicate = false;
|
|
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eSharedPresentSurfaceCapabilitiesKHR;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR SharedPresentSurfaceCapabilitiesKHR( VULKAN_HPP_NAMESPACE::ImageUsageFlags sharedPresentSupportedUsageFlags_ = {},
|
|
void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
|
|
: pNext( pNext_ )
|
|
, sharedPresentSupportedUsageFlags( sharedPresentSupportedUsageFlags_ )
|
|
{
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR SharedPresentSurfaceCapabilitiesKHR( SharedPresentSurfaceCapabilitiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
SharedPresentSurfaceCapabilitiesKHR( VkSharedPresentSurfaceCapabilitiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: SharedPresentSurfaceCapabilitiesKHR( *reinterpret_cast<SharedPresentSurfaceCapabilitiesKHR const *>( &rhs ) )
|
|
{
|
|
}
|
|
|
|
SharedPresentSurfaceCapabilitiesKHR & operator=( SharedPresentSurfaceCapabilitiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
SharedPresentSurfaceCapabilitiesKHR & operator=( VkSharedPresentSurfaceCapabilitiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::SharedPresentSurfaceCapabilitiesKHR const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
operator VkSharedPresentSurfaceCapabilitiesKHR const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkSharedPresentSurfaceCapabilitiesKHR *>( this );
|
|
}
|
|
|
|
operator VkSharedPresentSurfaceCapabilitiesKHR &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkSharedPresentSurfaceCapabilitiesKHR *>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
# if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
# else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::ImageUsageFlags const &>
|
|
# endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( sType, pNext, sharedPresentSupportedUsageFlags );
|
|
}
|
|
#endif
|
|
|
|
#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
|
|
auto operator<=>( SharedPresentSurfaceCapabilitiesKHR const & ) const = default;
|
|
#else
|
|
bool operator==( SharedPresentSurfaceCapabilitiesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
# if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
# else
|
|
return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( sharedPresentSupportedUsageFlags == rhs.sharedPresentSupportedUsageFlags );
|
|
# endif
|
|
}
|
|
|
|
bool operator!=( SharedPresentSurfaceCapabilitiesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eSharedPresentSurfaceCapabilitiesKHR;
|
|
void * pNext = {};
|
|
VULKAN_HPP_NAMESPACE::ImageUsageFlags sharedPresentSupportedUsageFlags = {};
|
|
};
|
|
|
|
template <>
|
|
struct CppType<StructureType, StructureType::eSharedPresentSurfaceCapabilitiesKHR>
|
|
{
|
|
using Type = SharedPresentSurfaceCapabilitiesKHR;
|
|
};
|
|
|
|
struct SubmitInfo
|
|
{
|
|
using NativeType = VkSubmitInfo;
|
|
|
|
static const bool allowDuplicate = false;
|
|
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eSubmitInfo;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR SubmitInfo( uint32_t waitSemaphoreCount_ = {},
|
|
const VULKAN_HPP_NAMESPACE::Semaphore * pWaitSemaphores_ = {},
|
|
const VULKAN_HPP_NAMESPACE::PipelineStageFlags * pWaitDstStageMask_ = {},
|
|
uint32_t commandBufferCount_ = {},
|
|
const VULKAN_HPP_NAMESPACE::CommandBuffer * pCommandBuffers_ = {},
|
|
uint32_t signalSemaphoreCount_ = {},
|
|
const VULKAN_HPP_NAMESPACE::Semaphore * pSignalSemaphores_ = {},
|
|
const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
|
|
: pNext( pNext_ )
|
|
, waitSemaphoreCount( waitSemaphoreCount_ )
|
|
, pWaitSemaphores( pWaitSemaphores_ )
|
|
, pWaitDstStageMask( pWaitDstStageMask_ )
|
|
, commandBufferCount( commandBufferCount_ )
|
|
, pCommandBuffers( pCommandBuffers_ )
|
|
, signalSemaphoreCount( signalSemaphoreCount_ )
|
|
, pSignalSemaphores( pSignalSemaphores_ )
|
|
{
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR SubmitInfo( SubmitInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
SubmitInfo( VkSubmitInfo const & rhs ) VULKAN_HPP_NOEXCEPT : SubmitInfo( *reinterpret_cast<SubmitInfo const *>( &rhs ) ) {}
|
|
|
|
# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
|
|
SubmitInfo( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::Semaphore> const & waitSemaphores_,
|
|
VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::PipelineStageFlags> const & waitDstStageMask_ = {},
|
|
VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::CommandBuffer> const & commandBuffers_ = {},
|
|
VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::Semaphore> const & signalSemaphores_ = {},
|
|
const void * pNext_ = nullptr )
|
|
: pNext( pNext_ )
|
|
, waitSemaphoreCount( static_cast<uint32_t>( waitSemaphores_.size() ) )
|
|
, pWaitSemaphores( waitSemaphores_.data() )
|
|
, pWaitDstStageMask( waitDstStageMask_.data() )
|
|
, commandBufferCount( static_cast<uint32_t>( commandBuffers_.size() ) )
|
|
, pCommandBuffers( commandBuffers_.data() )
|
|
, signalSemaphoreCount( static_cast<uint32_t>( signalSemaphores_.size() ) )
|
|
, pSignalSemaphores( signalSemaphores_.data() )
|
|
{
|
|
# ifdef VULKAN_HPP_NO_EXCEPTIONS
|
|
VULKAN_HPP_ASSERT( waitSemaphores_.size() == waitDstStageMask_.size() );
|
|
# else
|
|
if ( waitSemaphores_.size() != waitDstStageMask_.size() )
|
|
{
|
|
throw LogicError( VULKAN_HPP_NAMESPACE_STRING "::SubmitInfo::SubmitInfo: waitSemaphores_.size() != waitDstStageMask_.size()" );
|
|
}
|
|
# endif /*VULKAN_HPP_NO_EXCEPTIONS*/
|
|
}
|
|
# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
|
|
|
|
SubmitInfo & operator=( SubmitInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
SubmitInfo & operator=( VkSubmitInfo const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::SubmitInfo const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
|
|
VULKAN_HPP_CONSTEXPR_14 SubmitInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pNext = pNext_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 SubmitInfo & setWaitSemaphoreCount( uint32_t waitSemaphoreCount_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
waitSemaphoreCount = waitSemaphoreCount_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 SubmitInfo & setPWaitSemaphores( const VULKAN_HPP_NAMESPACE::Semaphore * pWaitSemaphores_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pWaitSemaphores = pWaitSemaphores_;
|
|
return *this;
|
|
}
|
|
|
|
# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
|
|
SubmitInfo &
|
|
setWaitSemaphores( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::Semaphore> const & waitSemaphores_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
waitSemaphoreCount = static_cast<uint32_t>( waitSemaphores_.size() );
|
|
pWaitSemaphores = waitSemaphores_.data();
|
|
return *this;
|
|
}
|
|
# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 SubmitInfo & setPWaitDstStageMask( const VULKAN_HPP_NAMESPACE::PipelineStageFlags * pWaitDstStageMask_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pWaitDstStageMask = pWaitDstStageMask_;
|
|
return *this;
|
|
}
|
|
|
|
# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
|
|
SubmitInfo & setWaitDstStageMask( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::PipelineStageFlags> const & waitDstStageMask_ )
|
|
VULKAN_HPP_NOEXCEPT
|
|
{
|
|
waitSemaphoreCount = static_cast<uint32_t>( waitDstStageMask_.size() );
|
|
pWaitDstStageMask = waitDstStageMask_.data();
|
|
return *this;
|
|
}
|
|
# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 SubmitInfo & setCommandBufferCount( uint32_t commandBufferCount_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
commandBufferCount = commandBufferCount_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 SubmitInfo & setPCommandBuffers( const VULKAN_HPP_NAMESPACE::CommandBuffer * pCommandBuffers_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pCommandBuffers = pCommandBuffers_;
|
|
return *this;
|
|
}
|
|
|
|
# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
|
|
SubmitInfo &
|
|
setCommandBuffers( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::CommandBuffer> const & commandBuffers_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
commandBufferCount = static_cast<uint32_t>( commandBuffers_.size() );
|
|
pCommandBuffers = commandBuffers_.data();
|
|
return *this;
|
|
}
|
|
# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 SubmitInfo & setSignalSemaphoreCount( uint32_t signalSemaphoreCount_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
signalSemaphoreCount = signalSemaphoreCount_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 SubmitInfo & setPSignalSemaphores( const VULKAN_HPP_NAMESPACE::Semaphore * pSignalSemaphores_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pSignalSemaphores = pSignalSemaphores_;
|
|
return *this;
|
|
}
|
|
|
|
# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
|
|
SubmitInfo &
|
|
setSignalSemaphores( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::Semaphore> const & signalSemaphores_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
signalSemaphoreCount = static_cast<uint32_t>( signalSemaphores_.size() );
|
|
pSignalSemaphores = signalSemaphores_.data();
|
|
return *this;
|
|
}
|
|
# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
|
|
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
|
|
|
|
operator VkSubmitInfo const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkSubmitInfo *>( this );
|
|
}
|
|
|
|
operator VkSubmitInfo &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkSubmitInfo *>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
# if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
# else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &,
|
|
const void * const &,
|
|
uint32_t const &,
|
|
const VULKAN_HPP_NAMESPACE::Semaphore * const &,
|
|
const VULKAN_HPP_NAMESPACE::PipelineStageFlags * const &,
|
|
uint32_t const &,
|
|
const VULKAN_HPP_NAMESPACE::CommandBuffer * const &,
|
|
uint32_t const &,
|
|
const VULKAN_HPP_NAMESPACE::Semaphore * const &>
|
|
# endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie(
|
|
sType, pNext, waitSemaphoreCount, pWaitSemaphores, pWaitDstStageMask, commandBufferCount, pCommandBuffers, signalSemaphoreCount, pSignalSemaphores );
|
|
}
|
|
#endif
|
|
|
|
#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
|
|
auto operator<=>( SubmitInfo const & ) const = default;
|
|
#else
|
|
bool operator==( SubmitInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
# if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
# else
|
|
return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( waitSemaphoreCount == rhs.waitSemaphoreCount ) &&
|
|
( pWaitSemaphores == rhs.pWaitSemaphores ) && ( pWaitDstStageMask == rhs.pWaitDstStageMask ) && ( commandBufferCount == rhs.commandBufferCount ) &&
|
|
( pCommandBuffers == rhs.pCommandBuffers ) && ( signalSemaphoreCount == rhs.signalSemaphoreCount ) &&
|
|
( pSignalSemaphores == rhs.pSignalSemaphores );
|
|
# endif
|
|
}
|
|
|
|
bool operator!=( SubmitInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eSubmitInfo;
|
|
const void * pNext = {};
|
|
uint32_t waitSemaphoreCount = {};
|
|
const VULKAN_HPP_NAMESPACE::Semaphore * pWaitSemaphores = {};
|
|
const VULKAN_HPP_NAMESPACE::PipelineStageFlags * pWaitDstStageMask = {};
|
|
uint32_t commandBufferCount = {};
|
|
const VULKAN_HPP_NAMESPACE::CommandBuffer * pCommandBuffers = {};
|
|
uint32_t signalSemaphoreCount = {};
|
|
const VULKAN_HPP_NAMESPACE::Semaphore * pSignalSemaphores = {};
|
|
};
|
|
|
|
template <>
|
|
struct CppType<StructureType, StructureType::eSubmitInfo>
|
|
{
|
|
using Type = SubmitInfo;
|
|
};
|
|
|
|
struct SubmitInfo2
|
|
{
|
|
using NativeType = VkSubmitInfo2;
|
|
|
|
static const bool allowDuplicate = false;
|
|
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eSubmitInfo2;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR SubmitInfo2( VULKAN_HPP_NAMESPACE::SubmitFlags flags_ = {},
|
|
uint32_t waitSemaphoreInfoCount_ = {},
|
|
const VULKAN_HPP_NAMESPACE::SemaphoreSubmitInfo * pWaitSemaphoreInfos_ = {},
|
|
uint32_t commandBufferInfoCount_ = {},
|
|
const VULKAN_HPP_NAMESPACE::CommandBufferSubmitInfo * pCommandBufferInfos_ = {},
|
|
uint32_t signalSemaphoreInfoCount_ = {},
|
|
const VULKAN_HPP_NAMESPACE::SemaphoreSubmitInfo * pSignalSemaphoreInfos_ = {},
|
|
const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
|
|
: pNext( pNext_ )
|
|
, flags( flags_ )
|
|
, waitSemaphoreInfoCount( waitSemaphoreInfoCount_ )
|
|
, pWaitSemaphoreInfos( pWaitSemaphoreInfos_ )
|
|
, commandBufferInfoCount( commandBufferInfoCount_ )
|
|
, pCommandBufferInfos( pCommandBufferInfos_ )
|
|
, signalSemaphoreInfoCount( signalSemaphoreInfoCount_ )
|
|
, pSignalSemaphoreInfos( pSignalSemaphoreInfos_ )
|
|
{
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR SubmitInfo2( SubmitInfo2 const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
SubmitInfo2( VkSubmitInfo2 const & rhs ) VULKAN_HPP_NOEXCEPT : SubmitInfo2( *reinterpret_cast<SubmitInfo2 const *>( &rhs ) ) {}
|
|
|
|
# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
|
|
SubmitInfo2( VULKAN_HPP_NAMESPACE::SubmitFlags flags_,
|
|
VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::SemaphoreSubmitInfo> const & waitSemaphoreInfos_,
|
|
VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::CommandBufferSubmitInfo> const & commandBufferInfos_ = {},
|
|
VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::SemaphoreSubmitInfo> const & signalSemaphoreInfos_ = {},
|
|
const void * pNext_ = nullptr )
|
|
: pNext( pNext_ )
|
|
, flags( flags_ )
|
|
, waitSemaphoreInfoCount( static_cast<uint32_t>( waitSemaphoreInfos_.size() ) )
|
|
, pWaitSemaphoreInfos( waitSemaphoreInfos_.data() )
|
|
, commandBufferInfoCount( static_cast<uint32_t>( commandBufferInfos_.size() ) )
|
|
, pCommandBufferInfos( commandBufferInfos_.data() )
|
|
, signalSemaphoreInfoCount( static_cast<uint32_t>( signalSemaphoreInfos_.size() ) )
|
|
, pSignalSemaphoreInfos( signalSemaphoreInfos_.data() )
|
|
{
|
|
}
|
|
# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
|
|
|
|
SubmitInfo2 & operator=( SubmitInfo2 const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
SubmitInfo2 & operator=( VkSubmitInfo2 const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::SubmitInfo2 const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
|
|
VULKAN_HPP_CONSTEXPR_14 SubmitInfo2 & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pNext = pNext_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 SubmitInfo2 & setFlags( VULKAN_HPP_NAMESPACE::SubmitFlags flags_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
flags = flags_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 SubmitInfo2 & setWaitSemaphoreInfoCount( uint32_t waitSemaphoreInfoCount_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
waitSemaphoreInfoCount = waitSemaphoreInfoCount_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 SubmitInfo2 & setPWaitSemaphoreInfos( const VULKAN_HPP_NAMESPACE::SemaphoreSubmitInfo * pWaitSemaphoreInfos_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pWaitSemaphoreInfos = pWaitSemaphoreInfos_;
|
|
return *this;
|
|
}
|
|
|
|
# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
|
|
SubmitInfo2 & setWaitSemaphoreInfos(
|
|
VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::SemaphoreSubmitInfo> const & waitSemaphoreInfos_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
waitSemaphoreInfoCount = static_cast<uint32_t>( waitSemaphoreInfos_.size() );
|
|
pWaitSemaphoreInfos = waitSemaphoreInfos_.data();
|
|
return *this;
|
|
}
|
|
# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 SubmitInfo2 & setCommandBufferInfoCount( uint32_t commandBufferInfoCount_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
commandBufferInfoCount = commandBufferInfoCount_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 SubmitInfo2 &
|
|
setPCommandBufferInfos( const VULKAN_HPP_NAMESPACE::CommandBufferSubmitInfo * pCommandBufferInfos_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pCommandBufferInfos = pCommandBufferInfos_;
|
|
return *this;
|
|
}
|
|
|
|
# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
|
|
SubmitInfo2 & setCommandBufferInfos(
|
|
VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::CommandBufferSubmitInfo> const & commandBufferInfos_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
commandBufferInfoCount = static_cast<uint32_t>( commandBufferInfos_.size() );
|
|
pCommandBufferInfos = commandBufferInfos_.data();
|
|
return *this;
|
|
}
|
|
# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 SubmitInfo2 & setSignalSemaphoreInfoCount( uint32_t signalSemaphoreInfoCount_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
signalSemaphoreInfoCount = signalSemaphoreInfoCount_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 SubmitInfo2 &
|
|
setPSignalSemaphoreInfos( const VULKAN_HPP_NAMESPACE::SemaphoreSubmitInfo * pSignalSemaphoreInfos_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pSignalSemaphoreInfos = pSignalSemaphoreInfos_;
|
|
return *this;
|
|
}
|
|
|
|
# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
|
|
SubmitInfo2 & setSignalSemaphoreInfos(
|
|
VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::SemaphoreSubmitInfo> const & signalSemaphoreInfos_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
signalSemaphoreInfoCount = static_cast<uint32_t>( signalSemaphoreInfos_.size() );
|
|
pSignalSemaphoreInfos = signalSemaphoreInfos_.data();
|
|
return *this;
|
|
}
|
|
# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
|
|
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
|
|
|
|
operator VkSubmitInfo2 const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkSubmitInfo2 *>( this );
|
|
}
|
|
|
|
operator VkSubmitInfo2 &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkSubmitInfo2 *>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
# if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
# else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &,
|
|
const void * const &,
|
|
VULKAN_HPP_NAMESPACE::SubmitFlags const &,
|
|
uint32_t const &,
|
|
const VULKAN_HPP_NAMESPACE::SemaphoreSubmitInfo * const &,
|
|
uint32_t const &,
|
|
const VULKAN_HPP_NAMESPACE::CommandBufferSubmitInfo * const &,
|
|
uint32_t const &,
|
|
const VULKAN_HPP_NAMESPACE::SemaphoreSubmitInfo * const &>
|
|
# endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( sType,
|
|
pNext,
|
|
flags,
|
|
waitSemaphoreInfoCount,
|
|
pWaitSemaphoreInfos,
|
|
commandBufferInfoCount,
|
|
pCommandBufferInfos,
|
|
signalSemaphoreInfoCount,
|
|
pSignalSemaphoreInfos );
|
|
}
|
|
#endif
|
|
|
|
#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
|
|
auto operator<=>( SubmitInfo2 const & ) const = default;
|
|
#else
|
|
bool operator==( SubmitInfo2 const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
# if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
# else
|
|
return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && ( waitSemaphoreInfoCount == rhs.waitSemaphoreInfoCount ) &&
|
|
( pWaitSemaphoreInfos == rhs.pWaitSemaphoreInfos ) && ( commandBufferInfoCount == rhs.commandBufferInfoCount ) &&
|
|
( pCommandBufferInfos == rhs.pCommandBufferInfos ) && ( signalSemaphoreInfoCount == rhs.signalSemaphoreInfoCount ) &&
|
|
( pSignalSemaphoreInfos == rhs.pSignalSemaphoreInfos );
|
|
# endif
|
|
}
|
|
|
|
bool operator!=( SubmitInfo2 const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eSubmitInfo2;
|
|
const void * pNext = {};
|
|
VULKAN_HPP_NAMESPACE::SubmitFlags flags = {};
|
|
uint32_t waitSemaphoreInfoCount = {};
|
|
const VULKAN_HPP_NAMESPACE::SemaphoreSubmitInfo * pWaitSemaphoreInfos = {};
|
|
uint32_t commandBufferInfoCount = {};
|
|
const VULKAN_HPP_NAMESPACE::CommandBufferSubmitInfo * pCommandBufferInfos = {};
|
|
uint32_t signalSemaphoreInfoCount = {};
|
|
const VULKAN_HPP_NAMESPACE::SemaphoreSubmitInfo * pSignalSemaphoreInfos = {};
|
|
};
|
|
|
|
template <>
|
|
struct CppType<StructureType, StructureType::eSubmitInfo2>
|
|
{
|
|
using Type = SubmitInfo2;
|
|
};
|
|
|
|
using SubmitInfo2KHR = SubmitInfo2;
|
|
|
|
struct SubpassBeginInfo
|
|
{
|
|
using NativeType = VkSubpassBeginInfo;
|
|
|
|
static const bool allowDuplicate = false;
|
|
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eSubpassBeginInfo;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR SubpassBeginInfo( VULKAN_HPP_NAMESPACE::SubpassContents contents_ = VULKAN_HPP_NAMESPACE::SubpassContents::eInline,
|
|
const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
|
|
: pNext( pNext_ )
|
|
, contents( contents_ )
|
|
{
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR SubpassBeginInfo( SubpassBeginInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
SubpassBeginInfo( VkSubpassBeginInfo const & rhs ) VULKAN_HPP_NOEXCEPT : SubpassBeginInfo( *reinterpret_cast<SubpassBeginInfo const *>( &rhs ) ) {}
|
|
|
|
SubpassBeginInfo & operator=( SubpassBeginInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
SubpassBeginInfo & operator=( VkSubpassBeginInfo const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::SubpassBeginInfo const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
|
|
VULKAN_HPP_CONSTEXPR_14 SubpassBeginInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pNext = pNext_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 SubpassBeginInfo & setContents( VULKAN_HPP_NAMESPACE::SubpassContents contents_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
contents = contents_;
|
|
return *this;
|
|
}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
|
|
|
|
operator VkSubpassBeginInfo const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkSubpassBeginInfo *>( this );
|
|
}
|
|
|
|
operator VkSubpassBeginInfo &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkSubpassBeginInfo *>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
# if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
# else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::SubpassContents const &>
|
|
# endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( sType, pNext, contents );
|
|
}
|
|
#endif
|
|
|
|
#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
|
|
auto operator<=>( SubpassBeginInfo const & ) const = default;
|
|
#else
|
|
bool operator==( SubpassBeginInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
# if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
# else
|
|
return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( contents == rhs.contents );
|
|
# endif
|
|
}
|
|
|
|
bool operator!=( SubpassBeginInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eSubpassBeginInfo;
|
|
const void * pNext = {};
|
|
VULKAN_HPP_NAMESPACE::SubpassContents contents = VULKAN_HPP_NAMESPACE::SubpassContents::eInline;
|
|
};
|
|
|
|
template <>
|
|
struct CppType<StructureType, StructureType::eSubpassBeginInfo>
|
|
{
|
|
using Type = SubpassBeginInfo;
|
|
};
|
|
|
|
using SubpassBeginInfoKHR = SubpassBeginInfo;
|
|
|
|
struct SubpassDescriptionDepthStencilResolve
|
|
{
|
|
using NativeType = VkSubpassDescriptionDepthStencilResolve;
|
|
|
|
static const bool allowDuplicate = false;
|
|
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eSubpassDescriptionDepthStencilResolve;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR
|
|
SubpassDescriptionDepthStencilResolve( VULKAN_HPP_NAMESPACE::ResolveModeFlagBits depthResolveMode_ = VULKAN_HPP_NAMESPACE::ResolveModeFlagBits::eNone,
|
|
VULKAN_HPP_NAMESPACE::ResolveModeFlagBits stencilResolveMode_ = VULKAN_HPP_NAMESPACE::ResolveModeFlagBits::eNone,
|
|
const VULKAN_HPP_NAMESPACE::AttachmentReference2 * pDepthStencilResolveAttachment_ = {},
|
|
const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
|
|
: pNext( pNext_ )
|
|
, depthResolveMode( depthResolveMode_ )
|
|
, stencilResolveMode( stencilResolveMode_ )
|
|
, pDepthStencilResolveAttachment( pDepthStencilResolveAttachment_ )
|
|
{
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR SubpassDescriptionDepthStencilResolve( SubpassDescriptionDepthStencilResolve const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
SubpassDescriptionDepthStencilResolve( VkSubpassDescriptionDepthStencilResolve const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: SubpassDescriptionDepthStencilResolve( *reinterpret_cast<SubpassDescriptionDepthStencilResolve const *>( &rhs ) )
|
|
{
|
|
}
|
|
|
|
SubpassDescriptionDepthStencilResolve & operator=( SubpassDescriptionDepthStencilResolve const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
SubpassDescriptionDepthStencilResolve & operator=( VkSubpassDescriptionDepthStencilResolve const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::SubpassDescriptionDepthStencilResolve const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
|
|
VULKAN_HPP_CONSTEXPR_14 SubpassDescriptionDepthStencilResolve & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pNext = pNext_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 SubpassDescriptionDepthStencilResolve &
|
|
setDepthResolveMode( VULKAN_HPP_NAMESPACE::ResolveModeFlagBits depthResolveMode_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
depthResolveMode = depthResolveMode_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 SubpassDescriptionDepthStencilResolve &
|
|
setStencilResolveMode( VULKAN_HPP_NAMESPACE::ResolveModeFlagBits stencilResolveMode_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
stencilResolveMode = stencilResolveMode_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 SubpassDescriptionDepthStencilResolve &
|
|
setPDepthStencilResolveAttachment( const VULKAN_HPP_NAMESPACE::AttachmentReference2 * pDepthStencilResolveAttachment_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pDepthStencilResolveAttachment = pDepthStencilResolveAttachment_;
|
|
return *this;
|
|
}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
|
|
|
|
operator VkSubpassDescriptionDepthStencilResolve const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkSubpassDescriptionDepthStencilResolve *>( this );
|
|
}
|
|
|
|
operator VkSubpassDescriptionDepthStencilResolve &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkSubpassDescriptionDepthStencilResolve *>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
# if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
# else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &,
|
|
const void * const &,
|
|
VULKAN_HPP_NAMESPACE::ResolveModeFlagBits const &,
|
|
VULKAN_HPP_NAMESPACE::ResolveModeFlagBits const &,
|
|
const VULKAN_HPP_NAMESPACE::AttachmentReference2 * const &>
|
|
# endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( sType, pNext, depthResolveMode, stencilResolveMode, pDepthStencilResolveAttachment );
|
|
}
|
|
#endif
|
|
|
|
#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
|
|
auto operator<=>( SubpassDescriptionDepthStencilResolve const & ) const = default;
|
|
#else
|
|
bool operator==( SubpassDescriptionDepthStencilResolve const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
# if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
# else
|
|
return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( depthResolveMode == rhs.depthResolveMode ) &&
|
|
( stencilResolveMode == rhs.stencilResolveMode ) && ( pDepthStencilResolveAttachment == rhs.pDepthStencilResolveAttachment );
|
|
# endif
|
|
}
|
|
|
|
bool operator!=( SubpassDescriptionDepthStencilResolve const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eSubpassDescriptionDepthStencilResolve;
|
|
const void * pNext = {};
|
|
VULKAN_HPP_NAMESPACE::ResolveModeFlagBits depthResolveMode = VULKAN_HPP_NAMESPACE::ResolveModeFlagBits::eNone;
|
|
VULKAN_HPP_NAMESPACE::ResolveModeFlagBits stencilResolveMode = VULKAN_HPP_NAMESPACE::ResolveModeFlagBits::eNone;
|
|
const VULKAN_HPP_NAMESPACE::AttachmentReference2 * pDepthStencilResolveAttachment = {};
|
|
};
|
|
|
|
template <>
|
|
struct CppType<StructureType, StructureType::eSubpassDescriptionDepthStencilResolve>
|
|
{
|
|
using Type = SubpassDescriptionDepthStencilResolve;
|
|
};
|
|
|
|
using SubpassDescriptionDepthStencilResolveKHR = SubpassDescriptionDepthStencilResolve;
|
|
|
|
struct SubpassEndInfo
|
|
{
|
|
using NativeType = VkSubpassEndInfo;
|
|
|
|
static const bool allowDuplicate = false;
|
|
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eSubpassEndInfo;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR SubpassEndInfo( const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT : pNext( pNext_ ) {}
|
|
|
|
VULKAN_HPP_CONSTEXPR SubpassEndInfo( SubpassEndInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
SubpassEndInfo( VkSubpassEndInfo const & rhs ) VULKAN_HPP_NOEXCEPT : SubpassEndInfo( *reinterpret_cast<SubpassEndInfo const *>( &rhs ) ) {}
|
|
|
|
SubpassEndInfo & operator=( SubpassEndInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
SubpassEndInfo & operator=( VkSubpassEndInfo const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::SubpassEndInfo const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
|
|
VULKAN_HPP_CONSTEXPR_14 SubpassEndInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pNext = pNext_;
|
|
return *this;
|
|
}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
|
|
|
|
operator VkSubpassEndInfo const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkSubpassEndInfo *>( this );
|
|
}
|
|
|
|
operator VkSubpassEndInfo &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkSubpassEndInfo *>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
# if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
# else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &>
|
|
# endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( sType, pNext );
|
|
}
|
|
#endif
|
|
|
|
#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
|
|
auto operator<=>( SubpassEndInfo const & ) const = default;
|
|
#else
|
|
bool operator==( SubpassEndInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
# if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
# else
|
|
return ( sType == rhs.sType ) && ( pNext == rhs.pNext );
|
|
# endif
|
|
}
|
|
|
|
bool operator!=( SubpassEndInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eSubpassEndInfo;
|
|
const void * pNext = {};
|
|
};
|
|
|
|
template <>
|
|
struct CppType<StructureType, StructureType::eSubpassEndInfo>
|
|
{
|
|
using Type = SubpassEndInfo;
|
|
};
|
|
|
|
using SubpassEndInfoKHR = SubpassEndInfo;
|
|
|
|
struct SurfaceCapabilities2EXT
|
|
{
|
|
using NativeType = VkSurfaceCapabilities2EXT;
|
|
|
|
static const bool allowDuplicate = false;
|
|
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eSurfaceCapabilities2EXT;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR SurfaceCapabilities2EXT(
|
|
uint32_t minImageCount_ = {},
|
|
uint32_t maxImageCount_ = {},
|
|
VULKAN_HPP_NAMESPACE::Extent2D currentExtent_ = {},
|
|
VULKAN_HPP_NAMESPACE::Extent2D minImageExtent_ = {},
|
|
VULKAN_HPP_NAMESPACE::Extent2D maxImageExtent_ = {},
|
|
uint32_t maxImageArrayLayers_ = {},
|
|
VULKAN_HPP_NAMESPACE::SurfaceTransformFlagsKHR supportedTransforms_ = {},
|
|
VULKAN_HPP_NAMESPACE::SurfaceTransformFlagBitsKHR currentTransform_ = VULKAN_HPP_NAMESPACE::SurfaceTransformFlagBitsKHR::eIdentity,
|
|
VULKAN_HPP_NAMESPACE::CompositeAlphaFlagsKHR supportedCompositeAlpha_ = {},
|
|
VULKAN_HPP_NAMESPACE::ImageUsageFlags supportedUsageFlags_ = {},
|
|
VULKAN_HPP_NAMESPACE::SurfaceCounterFlagsEXT supportedSurfaceCounters_ = {},
|
|
void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
|
|
: pNext( pNext_ )
|
|
, minImageCount( minImageCount_ )
|
|
, maxImageCount( maxImageCount_ )
|
|
, currentExtent( currentExtent_ )
|
|
, minImageExtent( minImageExtent_ )
|
|
, maxImageExtent( maxImageExtent_ )
|
|
, maxImageArrayLayers( maxImageArrayLayers_ )
|
|
, supportedTransforms( supportedTransforms_ )
|
|
, currentTransform( currentTransform_ )
|
|
, supportedCompositeAlpha( supportedCompositeAlpha_ )
|
|
, supportedUsageFlags( supportedUsageFlags_ )
|
|
, supportedSurfaceCounters( supportedSurfaceCounters_ )
|
|
{
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR SurfaceCapabilities2EXT( SurfaceCapabilities2EXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
SurfaceCapabilities2EXT( VkSurfaceCapabilities2EXT const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: SurfaceCapabilities2EXT( *reinterpret_cast<SurfaceCapabilities2EXT const *>( &rhs ) )
|
|
{
|
|
}
|
|
|
|
SurfaceCapabilities2EXT & operator=( SurfaceCapabilities2EXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
SurfaceCapabilities2EXT & operator=( VkSurfaceCapabilities2EXT const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::SurfaceCapabilities2EXT const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
operator VkSurfaceCapabilities2EXT const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkSurfaceCapabilities2EXT *>( this );
|
|
}
|
|
|
|
operator VkSurfaceCapabilities2EXT &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkSurfaceCapabilities2EXT *>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
# if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
# else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &,
|
|
void * const &,
|
|
uint32_t const &,
|
|
uint32_t const &,
|
|
VULKAN_HPP_NAMESPACE::Extent2D const &,
|
|
VULKAN_HPP_NAMESPACE::Extent2D const &,
|
|
VULKAN_HPP_NAMESPACE::Extent2D const &,
|
|
uint32_t const &,
|
|
VULKAN_HPP_NAMESPACE::SurfaceTransformFlagsKHR const &,
|
|
VULKAN_HPP_NAMESPACE::SurfaceTransformFlagBitsKHR const &,
|
|
VULKAN_HPP_NAMESPACE::CompositeAlphaFlagsKHR const &,
|
|
VULKAN_HPP_NAMESPACE::ImageUsageFlags const &,
|
|
VULKAN_HPP_NAMESPACE::SurfaceCounterFlagsEXT const &>
|
|
# endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( sType,
|
|
pNext,
|
|
minImageCount,
|
|
maxImageCount,
|
|
currentExtent,
|
|
minImageExtent,
|
|
maxImageExtent,
|
|
maxImageArrayLayers,
|
|
supportedTransforms,
|
|
currentTransform,
|
|
supportedCompositeAlpha,
|
|
supportedUsageFlags,
|
|
supportedSurfaceCounters );
|
|
}
|
|
#endif
|
|
|
|
#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
|
|
auto operator<=>( SurfaceCapabilities2EXT const & ) const = default;
|
|
#else
|
|
bool operator==( SurfaceCapabilities2EXT const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
# if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
# else
|
|
return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( minImageCount == rhs.minImageCount ) && ( maxImageCount == rhs.maxImageCount ) &&
|
|
( currentExtent == rhs.currentExtent ) && ( minImageExtent == rhs.minImageExtent ) && ( maxImageExtent == rhs.maxImageExtent ) &&
|
|
( maxImageArrayLayers == rhs.maxImageArrayLayers ) && ( supportedTransforms == rhs.supportedTransforms ) &&
|
|
( currentTransform == rhs.currentTransform ) && ( supportedCompositeAlpha == rhs.supportedCompositeAlpha ) &&
|
|
( supportedUsageFlags == rhs.supportedUsageFlags ) && ( supportedSurfaceCounters == rhs.supportedSurfaceCounters );
|
|
# endif
|
|
}
|
|
|
|
bool operator!=( SurfaceCapabilities2EXT const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eSurfaceCapabilities2EXT;
|
|
void * pNext = {};
|
|
uint32_t minImageCount = {};
|
|
uint32_t maxImageCount = {};
|
|
VULKAN_HPP_NAMESPACE::Extent2D currentExtent = {};
|
|
VULKAN_HPP_NAMESPACE::Extent2D minImageExtent = {};
|
|
VULKAN_HPP_NAMESPACE::Extent2D maxImageExtent = {};
|
|
uint32_t maxImageArrayLayers = {};
|
|
VULKAN_HPP_NAMESPACE::SurfaceTransformFlagsKHR supportedTransforms = {};
|
|
VULKAN_HPP_NAMESPACE::SurfaceTransformFlagBitsKHR currentTransform = VULKAN_HPP_NAMESPACE::SurfaceTransformFlagBitsKHR::eIdentity;
|
|
VULKAN_HPP_NAMESPACE::CompositeAlphaFlagsKHR supportedCompositeAlpha = {};
|
|
VULKAN_HPP_NAMESPACE::ImageUsageFlags supportedUsageFlags = {};
|
|
VULKAN_HPP_NAMESPACE::SurfaceCounterFlagsEXT supportedSurfaceCounters = {};
|
|
};
|
|
|
|
template <>
|
|
struct CppType<StructureType, StructureType::eSurfaceCapabilities2EXT>
|
|
{
|
|
using Type = SurfaceCapabilities2EXT;
|
|
};
|
|
|
|
struct SurfaceCapabilitiesKHR
|
|
{
|
|
using NativeType = VkSurfaceCapabilitiesKHR;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR SurfaceCapabilitiesKHR(
|
|
uint32_t minImageCount_ = {},
|
|
uint32_t maxImageCount_ = {},
|
|
VULKAN_HPP_NAMESPACE::Extent2D currentExtent_ = {},
|
|
VULKAN_HPP_NAMESPACE::Extent2D minImageExtent_ = {},
|
|
VULKAN_HPP_NAMESPACE::Extent2D maxImageExtent_ = {},
|
|
uint32_t maxImageArrayLayers_ = {},
|
|
VULKAN_HPP_NAMESPACE::SurfaceTransformFlagsKHR supportedTransforms_ = {},
|
|
VULKAN_HPP_NAMESPACE::SurfaceTransformFlagBitsKHR currentTransform_ = VULKAN_HPP_NAMESPACE::SurfaceTransformFlagBitsKHR::eIdentity,
|
|
VULKAN_HPP_NAMESPACE::CompositeAlphaFlagsKHR supportedCompositeAlpha_ = {},
|
|
VULKAN_HPP_NAMESPACE::ImageUsageFlags supportedUsageFlags_ = {} ) VULKAN_HPP_NOEXCEPT
|
|
: minImageCount( minImageCount_ )
|
|
, maxImageCount( maxImageCount_ )
|
|
, currentExtent( currentExtent_ )
|
|
, minImageExtent( minImageExtent_ )
|
|
, maxImageExtent( maxImageExtent_ )
|
|
, maxImageArrayLayers( maxImageArrayLayers_ )
|
|
, supportedTransforms( supportedTransforms_ )
|
|
, currentTransform( currentTransform_ )
|
|
, supportedCompositeAlpha( supportedCompositeAlpha_ )
|
|
, supportedUsageFlags( supportedUsageFlags_ )
|
|
{
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR SurfaceCapabilitiesKHR( SurfaceCapabilitiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
SurfaceCapabilitiesKHR( VkSurfaceCapabilitiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: SurfaceCapabilitiesKHR( *reinterpret_cast<SurfaceCapabilitiesKHR const *>( &rhs ) )
|
|
{
|
|
}
|
|
|
|
SurfaceCapabilitiesKHR & operator=( SurfaceCapabilitiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
SurfaceCapabilitiesKHR & operator=( VkSurfaceCapabilitiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::SurfaceCapabilitiesKHR const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
operator VkSurfaceCapabilitiesKHR const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkSurfaceCapabilitiesKHR *>( this );
|
|
}
|
|
|
|
operator VkSurfaceCapabilitiesKHR &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkSurfaceCapabilitiesKHR *>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
# if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
# else
|
|
std::tuple<uint32_t const &,
|
|
uint32_t const &,
|
|
VULKAN_HPP_NAMESPACE::Extent2D const &,
|
|
VULKAN_HPP_NAMESPACE::Extent2D const &,
|
|
VULKAN_HPP_NAMESPACE::Extent2D const &,
|
|
uint32_t const &,
|
|
VULKAN_HPP_NAMESPACE::SurfaceTransformFlagsKHR const &,
|
|
VULKAN_HPP_NAMESPACE::SurfaceTransformFlagBitsKHR const &,
|
|
VULKAN_HPP_NAMESPACE::CompositeAlphaFlagsKHR const &,
|
|
VULKAN_HPP_NAMESPACE::ImageUsageFlags const &>
|
|
# endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( minImageCount,
|
|
maxImageCount,
|
|
currentExtent,
|
|
minImageExtent,
|
|
maxImageExtent,
|
|
maxImageArrayLayers,
|
|
supportedTransforms,
|
|
currentTransform,
|
|
supportedCompositeAlpha,
|
|
supportedUsageFlags );
|
|
}
|
|
#endif
|
|
|
|
#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
|
|
auto operator<=>( SurfaceCapabilitiesKHR const & ) const = default;
|
|
#else
|
|
bool operator==( SurfaceCapabilitiesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
# if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
# else
|
|
return ( minImageCount == rhs.minImageCount ) && ( maxImageCount == rhs.maxImageCount ) && ( currentExtent == rhs.currentExtent ) &&
|
|
( minImageExtent == rhs.minImageExtent ) && ( maxImageExtent == rhs.maxImageExtent ) && ( maxImageArrayLayers == rhs.maxImageArrayLayers ) &&
|
|
( supportedTransforms == rhs.supportedTransforms ) && ( currentTransform == rhs.currentTransform ) &&
|
|
( supportedCompositeAlpha == rhs.supportedCompositeAlpha ) && ( supportedUsageFlags == rhs.supportedUsageFlags );
|
|
# endif
|
|
}
|
|
|
|
bool operator!=( SurfaceCapabilitiesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
uint32_t minImageCount = {};
|
|
uint32_t maxImageCount = {};
|
|
VULKAN_HPP_NAMESPACE::Extent2D currentExtent = {};
|
|
VULKAN_HPP_NAMESPACE::Extent2D minImageExtent = {};
|
|
VULKAN_HPP_NAMESPACE::Extent2D maxImageExtent = {};
|
|
uint32_t maxImageArrayLayers = {};
|
|
VULKAN_HPP_NAMESPACE::SurfaceTransformFlagsKHR supportedTransforms = {};
|
|
VULKAN_HPP_NAMESPACE::SurfaceTransformFlagBitsKHR currentTransform = VULKAN_HPP_NAMESPACE::SurfaceTransformFlagBitsKHR::eIdentity;
|
|
VULKAN_HPP_NAMESPACE::CompositeAlphaFlagsKHR supportedCompositeAlpha = {};
|
|
VULKAN_HPP_NAMESPACE::ImageUsageFlags supportedUsageFlags = {};
|
|
};
|
|
|
|
struct SurfaceCapabilities2KHR
|
|
{
|
|
using NativeType = VkSurfaceCapabilities2KHR;
|
|
|
|
static const bool allowDuplicate = false;
|
|
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eSurfaceCapabilities2KHR;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR SurfaceCapabilities2KHR( VULKAN_HPP_NAMESPACE::SurfaceCapabilitiesKHR surfaceCapabilities_ = {},
|
|
void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
|
|
: pNext( pNext_ )
|
|
, surfaceCapabilities( surfaceCapabilities_ )
|
|
{
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR SurfaceCapabilities2KHR( SurfaceCapabilities2KHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
SurfaceCapabilities2KHR( VkSurfaceCapabilities2KHR const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: SurfaceCapabilities2KHR( *reinterpret_cast<SurfaceCapabilities2KHR const *>( &rhs ) )
|
|
{
|
|
}
|
|
|
|
SurfaceCapabilities2KHR & operator=( SurfaceCapabilities2KHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
SurfaceCapabilities2KHR & operator=( VkSurfaceCapabilities2KHR const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::SurfaceCapabilities2KHR const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
operator VkSurfaceCapabilities2KHR const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkSurfaceCapabilities2KHR *>( this );
|
|
}
|
|
|
|
operator VkSurfaceCapabilities2KHR &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkSurfaceCapabilities2KHR *>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
# if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
# else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::SurfaceCapabilitiesKHR const &>
|
|
# endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( sType, pNext, surfaceCapabilities );
|
|
}
|
|
#endif
|
|
|
|
#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
|
|
auto operator<=>( SurfaceCapabilities2KHR const & ) const = default;
|
|
#else
|
|
bool operator==( SurfaceCapabilities2KHR const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
# if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
# else
|
|
return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( surfaceCapabilities == rhs.surfaceCapabilities );
|
|
# endif
|
|
}
|
|
|
|
bool operator!=( SurfaceCapabilities2KHR const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eSurfaceCapabilities2KHR;
|
|
void * pNext = {};
|
|
VULKAN_HPP_NAMESPACE::SurfaceCapabilitiesKHR surfaceCapabilities = {};
|
|
};
|
|
|
|
template <>
|
|
struct CppType<StructureType, StructureType::eSurfaceCapabilities2KHR>
|
|
{
|
|
using Type = SurfaceCapabilities2KHR;
|
|
};
|
|
|
|
struct SurfaceFormatKHR
|
|
{
|
|
using NativeType = VkSurfaceFormatKHR;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR
|
|
SurfaceFormatKHR( VULKAN_HPP_NAMESPACE::Format format_ = VULKAN_HPP_NAMESPACE::Format::eUndefined,
|
|
VULKAN_HPP_NAMESPACE::ColorSpaceKHR colorSpace_ = VULKAN_HPP_NAMESPACE::ColorSpaceKHR::eSrgbNonlinear ) VULKAN_HPP_NOEXCEPT
|
|
: format( format_ )
|
|
, colorSpace( colorSpace_ )
|
|
{
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR SurfaceFormatKHR( SurfaceFormatKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
SurfaceFormatKHR( VkSurfaceFormatKHR const & rhs ) VULKAN_HPP_NOEXCEPT : SurfaceFormatKHR( *reinterpret_cast<SurfaceFormatKHR const *>( &rhs ) ) {}
|
|
|
|
SurfaceFormatKHR & operator=( SurfaceFormatKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
SurfaceFormatKHR & operator=( VkSurfaceFormatKHR const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::SurfaceFormatKHR const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
operator VkSurfaceFormatKHR const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkSurfaceFormatKHR *>( this );
|
|
}
|
|
|
|
operator VkSurfaceFormatKHR &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkSurfaceFormatKHR *>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
# if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
# else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::Format const &, VULKAN_HPP_NAMESPACE::ColorSpaceKHR const &>
|
|
# endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( format, colorSpace );
|
|
}
|
|
#endif
|
|
|
|
#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
|
|
auto operator<=>( SurfaceFormatKHR const & ) const = default;
|
|
#else
|
|
bool operator==( SurfaceFormatKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
# if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
# else
|
|
return ( format == rhs.format ) && ( colorSpace == rhs.colorSpace );
|
|
# endif
|
|
}
|
|
|
|
bool operator!=( SurfaceFormatKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::Format format = VULKAN_HPP_NAMESPACE::Format::eUndefined;
|
|
VULKAN_HPP_NAMESPACE::ColorSpaceKHR colorSpace = VULKAN_HPP_NAMESPACE::ColorSpaceKHR::eSrgbNonlinear;
|
|
};
|
|
|
|
struct SurfaceFormat2KHR
|
|
{
|
|
using NativeType = VkSurfaceFormat2KHR;
|
|
|
|
static const bool allowDuplicate = false;
|
|
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eSurfaceFormat2KHR;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR SurfaceFormat2KHR( VULKAN_HPP_NAMESPACE::SurfaceFormatKHR surfaceFormat_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
|
|
: pNext( pNext_ )
|
|
, surfaceFormat( surfaceFormat_ )
|
|
{
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR SurfaceFormat2KHR( SurfaceFormat2KHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
SurfaceFormat2KHR( VkSurfaceFormat2KHR const & rhs ) VULKAN_HPP_NOEXCEPT : SurfaceFormat2KHR( *reinterpret_cast<SurfaceFormat2KHR const *>( &rhs ) ) {}
|
|
|
|
SurfaceFormat2KHR & operator=( SurfaceFormat2KHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
SurfaceFormat2KHR & operator=( VkSurfaceFormat2KHR const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::SurfaceFormat2KHR const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
operator VkSurfaceFormat2KHR const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkSurfaceFormat2KHR *>( this );
|
|
}
|
|
|
|
operator VkSurfaceFormat2KHR &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkSurfaceFormat2KHR *>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
# if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
# else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::SurfaceFormatKHR const &>
|
|
# endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( sType, pNext, surfaceFormat );
|
|
}
|
|
#endif
|
|
|
|
#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
|
|
auto operator<=>( SurfaceFormat2KHR const & ) const = default;
|
|
#else
|
|
bool operator==( SurfaceFormat2KHR const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
# if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
# else
|
|
return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( surfaceFormat == rhs.surfaceFormat );
|
|
# endif
|
|
}
|
|
|
|
bool operator!=( SurfaceFormat2KHR const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eSurfaceFormat2KHR;
|
|
void * pNext = {};
|
|
VULKAN_HPP_NAMESPACE::SurfaceFormatKHR surfaceFormat = {};
|
|
};
|
|
|
|
template <>
|
|
struct CppType<StructureType, StructureType::eSurfaceFormat2KHR>
|
|
{
|
|
using Type = SurfaceFormat2KHR;
|
|
};
|
|
|
|
struct SwapchainCounterCreateInfoEXT
|
|
{
|
|
using NativeType = VkSwapchainCounterCreateInfoEXT;
|
|
|
|
static const bool allowDuplicate = false;
|
|
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eSwapchainCounterCreateInfoEXT;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR SwapchainCounterCreateInfoEXT( VULKAN_HPP_NAMESPACE::SurfaceCounterFlagsEXT surfaceCounters_ = {},
|
|
const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
|
|
: pNext( pNext_ )
|
|
, surfaceCounters( surfaceCounters_ )
|
|
{
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR SwapchainCounterCreateInfoEXT( SwapchainCounterCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
SwapchainCounterCreateInfoEXT( VkSwapchainCounterCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: SwapchainCounterCreateInfoEXT( *reinterpret_cast<SwapchainCounterCreateInfoEXT const *>( &rhs ) )
|
|
{
|
|
}
|
|
|
|
SwapchainCounterCreateInfoEXT & operator=( SwapchainCounterCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
SwapchainCounterCreateInfoEXT & operator=( VkSwapchainCounterCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::SwapchainCounterCreateInfoEXT const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
|
|
VULKAN_HPP_CONSTEXPR_14 SwapchainCounterCreateInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pNext = pNext_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 SwapchainCounterCreateInfoEXT &
|
|
setSurfaceCounters( VULKAN_HPP_NAMESPACE::SurfaceCounterFlagsEXT surfaceCounters_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
surfaceCounters = surfaceCounters_;
|
|
return *this;
|
|
}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
|
|
|
|
operator VkSwapchainCounterCreateInfoEXT const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkSwapchainCounterCreateInfoEXT *>( this );
|
|
}
|
|
|
|
operator VkSwapchainCounterCreateInfoEXT &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkSwapchainCounterCreateInfoEXT *>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
# if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
# else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::SurfaceCounterFlagsEXT const &>
|
|
# endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( sType, pNext, surfaceCounters );
|
|
}
|
|
#endif
|
|
|
|
#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
|
|
auto operator<=>( SwapchainCounterCreateInfoEXT const & ) const = default;
|
|
#else
|
|
bool operator==( SwapchainCounterCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
# if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
# else
|
|
return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( surfaceCounters == rhs.surfaceCounters );
|
|
# endif
|
|
}
|
|
|
|
bool operator!=( SwapchainCounterCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eSwapchainCounterCreateInfoEXT;
|
|
const void * pNext = {};
|
|
VULKAN_HPP_NAMESPACE::SurfaceCounterFlagsEXT surfaceCounters = {};
|
|
};
|
|
|
|
template <>
|
|
struct CppType<StructureType, StructureType::eSwapchainCounterCreateInfoEXT>
|
|
{
|
|
using Type = SwapchainCounterCreateInfoEXT;
|
|
};
|
|
|
|
struct SwapchainCreateInfoKHR
|
|
{
|
|
using NativeType = VkSwapchainCreateInfoKHR;
|
|
|
|
static const bool allowDuplicate = false;
|
|
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eSwapchainCreateInfoKHR;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR
|
|
SwapchainCreateInfoKHR( VULKAN_HPP_NAMESPACE::SwapchainCreateFlagsKHR flags_ = {},
|
|
VULKAN_HPP_NAMESPACE::SurfaceKHR surface_ = {},
|
|
uint32_t minImageCount_ = {},
|
|
VULKAN_HPP_NAMESPACE::Format imageFormat_ = VULKAN_HPP_NAMESPACE::Format::eUndefined,
|
|
VULKAN_HPP_NAMESPACE::ColorSpaceKHR imageColorSpace_ = VULKAN_HPP_NAMESPACE::ColorSpaceKHR::eSrgbNonlinear,
|
|
VULKAN_HPP_NAMESPACE::Extent2D imageExtent_ = {},
|
|
uint32_t imageArrayLayers_ = {},
|
|
VULKAN_HPP_NAMESPACE::ImageUsageFlags imageUsage_ = {},
|
|
VULKAN_HPP_NAMESPACE::SharingMode imageSharingMode_ = VULKAN_HPP_NAMESPACE::SharingMode::eExclusive,
|
|
uint32_t queueFamilyIndexCount_ = {},
|
|
const uint32_t * pQueueFamilyIndices_ = {},
|
|
VULKAN_HPP_NAMESPACE::SurfaceTransformFlagBitsKHR preTransform_ = VULKAN_HPP_NAMESPACE::SurfaceTransformFlagBitsKHR::eIdentity,
|
|
VULKAN_HPP_NAMESPACE::CompositeAlphaFlagBitsKHR compositeAlpha_ = VULKAN_HPP_NAMESPACE::CompositeAlphaFlagBitsKHR::eOpaque,
|
|
VULKAN_HPP_NAMESPACE::PresentModeKHR presentMode_ = VULKAN_HPP_NAMESPACE::PresentModeKHR::eImmediate,
|
|
VULKAN_HPP_NAMESPACE::Bool32 clipped_ = {},
|
|
VULKAN_HPP_NAMESPACE::SwapchainKHR oldSwapchain_ = {},
|
|
const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
|
|
: pNext( pNext_ )
|
|
, flags( flags_ )
|
|
, surface( surface_ )
|
|
, minImageCount( minImageCount_ )
|
|
, imageFormat( imageFormat_ )
|
|
, imageColorSpace( imageColorSpace_ )
|
|
, imageExtent( imageExtent_ )
|
|
, imageArrayLayers( imageArrayLayers_ )
|
|
, imageUsage( imageUsage_ )
|
|
, imageSharingMode( imageSharingMode_ )
|
|
, queueFamilyIndexCount( queueFamilyIndexCount_ )
|
|
, pQueueFamilyIndices( pQueueFamilyIndices_ )
|
|
, preTransform( preTransform_ )
|
|
, compositeAlpha( compositeAlpha_ )
|
|
, presentMode( presentMode_ )
|
|
, clipped( clipped_ )
|
|
, oldSwapchain( oldSwapchain_ )
|
|
{
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR SwapchainCreateInfoKHR( SwapchainCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
SwapchainCreateInfoKHR( VkSwapchainCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: SwapchainCreateInfoKHR( *reinterpret_cast<SwapchainCreateInfoKHR const *>( &rhs ) )
|
|
{
|
|
}
|
|
|
|
# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
|
|
SwapchainCreateInfoKHR( VULKAN_HPP_NAMESPACE::SwapchainCreateFlagsKHR flags_,
|
|
VULKAN_HPP_NAMESPACE::SurfaceKHR surface_,
|
|
uint32_t minImageCount_,
|
|
VULKAN_HPP_NAMESPACE::Format imageFormat_,
|
|
VULKAN_HPP_NAMESPACE::ColorSpaceKHR imageColorSpace_,
|
|
VULKAN_HPP_NAMESPACE::Extent2D imageExtent_,
|
|
uint32_t imageArrayLayers_,
|
|
VULKAN_HPP_NAMESPACE::ImageUsageFlags imageUsage_,
|
|
VULKAN_HPP_NAMESPACE::SharingMode imageSharingMode_,
|
|
VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const uint32_t> const & queueFamilyIndices_,
|
|
VULKAN_HPP_NAMESPACE::SurfaceTransformFlagBitsKHR preTransform_ = VULKAN_HPP_NAMESPACE::SurfaceTransformFlagBitsKHR::eIdentity,
|
|
VULKAN_HPP_NAMESPACE::CompositeAlphaFlagBitsKHR compositeAlpha_ = VULKAN_HPP_NAMESPACE::CompositeAlphaFlagBitsKHR::eOpaque,
|
|
VULKAN_HPP_NAMESPACE::PresentModeKHR presentMode_ = VULKAN_HPP_NAMESPACE::PresentModeKHR::eImmediate,
|
|
VULKAN_HPP_NAMESPACE::Bool32 clipped_ = {},
|
|
VULKAN_HPP_NAMESPACE::SwapchainKHR oldSwapchain_ = {},
|
|
const void * pNext_ = nullptr )
|
|
: pNext( pNext_ )
|
|
, flags( flags_ )
|
|
, surface( surface_ )
|
|
, minImageCount( minImageCount_ )
|
|
, imageFormat( imageFormat_ )
|
|
, imageColorSpace( imageColorSpace_ )
|
|
, imageExtent( imageExtent_ )
|
|
, imageArrayLayers( imageArrayLayers_ )
|
|
, imageUsage( imageUsage_ )
|
|
, imageSharingMode( imageSharingMode_ )
|
|
, queueFamilyIndexCount( static_cast<uint32_t>( queueFamilyIndices_.size() ) )
|
|
, pQueueFamilyIndices( queueFamilyIndices_.data() )
|
|
, preTransform( preTransform_ )
|
|
, compositeAlpha( compositeAlpha_ )
|
|
, presentMode( presentMode_ )
|
|
, clipped( clipped_ )
|
|
, oldSwapchain( oldSwapchain_ )
|
|
{
|
|
}
|
|
# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
|
|
|
|
SwapchainCreateInfoKHR & operator=( SwapchainCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
SwapchainCreateInfoKHR & operator=( VkSwapchainCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::SwapchainCreateInfoKHR const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
|
|
VULKAN_HPP_CONSTEXPR_14 SwapchainCreateInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pNext = pNext_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 SwapchainCreateInfoKHR & setFlags( VULKAN_HPP_NAMESPACE::SwapchainCreateFlagsKHR flags_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
flags = flags_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 SwapchainCreateInfoKHR & setSurface( VULKAN_HPP_NAMESPACE::SurfaceKHR surface_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
surface = surface_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 SwapchainCreateInfoKHR & setMinImageCount( uint32_t minImageCount_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
minImageCount = minImageCount_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 SwapchainCreateInfoKHR & setImageFormat( VULKAN_HPP_NAMESPACE::Format imageFormat_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
imageFormat = imageFormat_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 SwapchainCreateInfoKHR & setImageColorSpace( VULKAN_HPP_NAMESPACE::ColorSpaceKHR imageColorSpace_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
imageColorSpace = imageColorSpace_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 SwapchainCreateInfoKHR & setImageExtent( VULKAN_HPP_NAMESPACE::Extent2D const & imageExtent_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
imageExtent = imageExtent_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 SwapchainCreateInfoKHR & setImageArrayLayers( uint32_t imageArrayLayers_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
imageArrayLayers = imageArrayLayers_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 SwapchainCreateInfoKHR & setImageUsage( VULKAN_HPP_NAMESPACE::ImageUsageFlags imageUsage_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
imageUsage = imageUsage_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 SwapchainCreateInfoKHR & setImageSharingMode( VULKAN_HPP_NAMESPACE::SharingMode imageSharingMode_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
imageSharingMode = imageSharingMode_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 SwapchainCreateInfoKHR & setQueueFamilyIndexCount( uint32_t queueFamilyIndexCount_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
queueFamilyIndexCount = queueFamilyIndexCount_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 SwapchainCreateInfoKHR & setPQueueFamilyIndices( const uint32_t * pQueueFamilyIndices_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pQueueFamilyIndices = pQueueFamilyIndices_;
|
|
return *this;
|
|
}
|
|
|
|
# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
|
|
SwapchainCreateInfoKHR &
|
|
setQueueFamilyIndices( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const uint32_t> const & queueFamilyIndices_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
queueFamilyIndexCount = static_cast<uint32_t>( queueFamilyIndices_.size() );
|
|
pQueueFamilyIndices = queueFamilyIndices_.data();
|
|
return *this;
|
|
}
|
|
# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 SwapchainCreateInfoKHR & setPreTransform( VULKAN_HPP_NAMESPACE::SurfaceTransformFlagBitsKHR preTransform_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
preTransform = preTransform_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 SwapchainCreateInfoKHR & setCompositeAlpha( VULKAN_HPP_NAMESPACE::CompositeAlphaFlagBitsKHR compositeAlpha_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
compositeAlpha = compositeAlpha_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 SwapchainCreateInfoKHR & setPresentMode( VULKAN_HPP_NAMESPACE::PresentModeKHR presentMode_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
presentMode = presentMode_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 SwapchainCreateInfoKHR & setClipped( VULKAN_HPP_NAMESPACE::Bool32 clipped_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
clipped = clipped_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 SwapchainCreateInfoKHR & setOldSwapchain( VULKAN_HPP_NAMESPACE::SwapchainKHR oldSwapchain_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
oldSwapchain = oldSwapchain_;
|
|
return *this;
|
|
}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
|
|
|
|
operator VkSwapchainCreateInfoKHR const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkSwapchainCreateInfoKHR *>( this );
|
|
}
|
|
|
|
operator VkSwapchainCreateInfoKHR &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkSwapchainCreateInfoKHR *>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
# if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
# else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &,
|
|
const void * const &,
|
|
VULKAN_HPP_NAMESPACE::SwapchainCreateFlagsKHR const &,
|
|
VULKAN_HPP_NAMESPACE::SurfaceKHR const &,
|
|
uint32_t const &,
|
|
VULKAN_HPP_NAMESPACE::Format const &,
|
|
VULKAN_HPP_NAMESPACE::ColorSpaceKHR const &,
|
|
VULKAN_HPP_NAMESPACE::Extent2D const &,
|
|
uint32_t const &,
|
|
VULKAN_HPP_NAMESPACE::ImageUsageFlags const &,
|
|
VULKAN_HPP_NAMESPACE::SharingMode const &,
|
|
uint32_t const &,
|
|
const uint32_t * const &,
|
|
VULKAN_HPP_NAMESPACE::SurfaceTransformFlagBitsKHR const &,
|
|
VULKAN_HPP_NAMESPACE::CompositeAlphaFlagBitsKHR const &,
|
|
VULKAN_HPP_NAMESPACE::PresentModeKHR const &,
|
|
VULKAN_HPP_NAMESPACE::Bool32 const &,
|
|
VULKAN_HPP_NAMESPACE::SwapchainKHR const &>
|
|
# endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( sType,
|
|
pNext,
|
|
flags,
|
|
surface,
|
|
minImageCount,
|
|
imageFormat,
|
|
imageColorSpace,
|
|
imageExtent,
|
|
imageArrayLayers,
|
|
imageUsage,
|
|
imageSharingMode,
|
|
queueFamilyIndexCount,
|
|
pQueueFamilyIndices,
|
|
preTransform,
|
|
compositeAlpha,
|
|
presentMode,
|
|
clipped,
|
|
oldSwapchain );
|
|
}
|
|
#endif
|
|
|
|
#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
|
|
auto operator<=>( SwapchainCreateInfoKHR const & ) const = default;
|
|
#else
|
|
bool operator==( SwapchainCreateInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
# if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
# else
|
|
return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && ( surface == rhs.surface ) &&
|
|
( minImageCount == rhs.minImageCount ) && ( imageFormat == rhs.imageFormat ) && ( imageColorSpace == rhs.imageColorSpace ) &&
|
|
( imageExtent == rhs.imageExtent ) && ( imageArrayLayers == rhs.imageArrayLayers ) && ( imageUsage == rhs.imageUsage ) &&
|
|
( imageSharingMode == rhs.imageSharingMode ) && ( queueFamilyIndexCount == rhs.queueFamilyIndexCount ) &&
|
|
( pQueueFamilyIndices == rhs.pQueueFamilyIndices ) && ( preTransform == rhs.preTransform ) && ( compositeAlpha == rhs.compositeAlpha ) &&
|
|
( presentMode == rhs.presentMode ) && ( clipped == rhs.clipped ) && ( oldSwapchain == rhs.oldSwapchain );
|
|
# endif
|
|
}
|
|
|
|
bool operator!=( SwapchainCreateInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eSwapchainCreateInfoKHR;
|
|
const void * pNext = {};
|
|
VULKAN_HPP_NAMESPACE::SwapchainCreateFlagsKHR flags = {};
|
|
VULKAN_HPP_NAMESPACE::SurfaceKHR surface = {};
|
|
uint32_t minImageCount = {};
|
|
VULKAN_HPP_NAMESPACE::Format imageFormat = VULKAN_HPP_NAMESPACE::Format::eUndefined;
|
|
VULKAN_HPP_NAMESPACE::ColorSpaceKHR imageColorSpace = VULKAN_HPP_NAMESPACE::ColorSpaceKHR::eSrgbNonlinear;
|
|
VULKAN_HPP_NAMESPACE::Extent2D imageExtent = {};
|
|
uint32_t imageArrayLayers = {};
|
|
VULKAN_HPP_NAMESPACE::ImageUsageFlags imageUsage = {};
|
|
VULKAN_HPP_NAMESPACE::SharingMode imageSharingMode = VULKAN_HPP_NAMESPACE::SharingMode::eExclusive;
|
|
uint32_t queueFamilyIndexCount = {};
|
|
const uint32_t * pQueueFamilyIndices = {};
|
|
VULKAN_HPP_NAMESPACE::SurfaceTransformFlagBitsKHR preTransform = VULKAN_HPP_NAMESPACE::SurfaceTransformFlagBitsKHR::eIdentity;
|
|
VULKAN_HPP_NAMESPACE::CompositeAlphaFlagBitsKHR compositeAlpha = VULKAN_HPP_NAMESPACE::CompositeAlphaFlagBitsKHR::eOpaque;
|
|
VULKAN_HPP_NAMESPACE::PresentModeKHR presentMode = VULKAN_HPP_NAMESPACE::PresentModeKHR::eImmediate;
|
|
VULKAN_HPP_NAMESPACE::Bool32 clipped = {};
|
|
VULKAN_HPP_NAMESPACE::SwapchainKHR oldSwapchain = {};
|
|
};
|
|
|
|
template <>
|
|
struct CppType<StructureType, StructureType::eSwapchainCreateInfoKHR>
|
|
{
|
|
using Type = SwapchainCreateInfoKHR;
|
|
};
|
|
|
|
struct TimelineSemaphoreSubmitInfo
|
|
{
|
|
using NativeType = VkTimelineSemaphoreSubmitInfo;
|
|
|
|
static const bool allowDuplicate = false;
|
|
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eTimelineSemaphoreSubmitInfo;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR TimelineSemaphoreSubmitInfo( uint32_t waitSemaphoreValueCount_ = {},
|
|
const uint64_t * pWaitSemaphoreValues_ = {},
|
|
uint32_t signalSemaphoreValueCount_ = {},
|
|
const uint64_t * pSignalSemaphoreValues_ = {},
|
|
const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
|
|
: pNext( pNext_ )
|
|
, waitSemaphoreValueCount( waitSemaphoreValueCount_ )
|
|
, pWaitSemaphoreValues( pWaitSemaphoreValues_ )
|
|
, signalSemaphoreValueCount( signalSemaphoreValueCount_ )
|
|
, pSignalSemaphoreValues( pSignalSemaphoreValues_ )
|
|
{
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR TimelineSemaphoreSubmitInfo( TimelineSemaphoreSubmitInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
TimelineSemaphoreSubmitInfo( VkTimelineSemaphoreSubmitInfo const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: TimelineSemaphoreSubmitInfo( *reinterpret_cast<TimelineSemaphoreSubmitInfo const *>( &rhs ) )
|
|
{
|
|
}
|
|
|
|
# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
|
|
TimelineSemaphoreSubmitInfo( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const uint64_t> const & waitSemaphoreValues_,
|
|
VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const uint64_t> const & signalSemaphoreValues_ = {},
|
|
const void * pNext_ = nullptr )
|
|
: pNext( pNext_ )
|
|
, waitSemaphoreValueCount( static_cast<uint32_t>( waitSemaphoreValues_.size() ) )
|
|
, pWaitSemaphoreValues( waitSemaphoreValues_.data() )
|
|
, signalSemaphoreValueCount( static_cast<uint32_t>( signalSemaphoreValues_.size() ) )
|
|
, pSignalSemaphoreValues( signalSemaphoreValues_.data() )
|
|
{
|
|
}
|
|
# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
|
|
|
|
TimelineSemaphoreSubmitInfo & operator=( TimelineSemaphoreSubmitInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
TimelineSemaphoreSubmitInfo & operator=( VkTimelineSemaphoreSubmitInfo const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::TimelineSemaphoreSubmitInfo const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
|
|
VULKAN_HPP_CONSTEXPR_14 TimelineSemaphoreSubmitInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pNext = pNext_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 TimelineSemaphoreSubmitInfo & setWaitSemaphoreValueCount( uint32_t waitSemaphoreValueCount_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
waitSemaphoreValueCount = waitSemaphoreValueCount_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 TimelineSemaphoreSubmitInfo & setPWaitSemaphoreValues( const uint64_t * pWaitSemaphoreValues_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pWaitSemaphoreValues = pWaitSemaphoreValues_;
|
|
return *this;
|
|
}
|
|
|
|
# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
|
|
TimelineSemaphoreSubmitInfo &
|
|
setWaitSemaphoreValues( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const uint64_t> const & waitSemaphoreValues_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
waitSemaphoreValueCount = static_cast<uint32_t>( waitSemaphoreValues_.size() );
|
|
pWaitSemaphoreValues = waitSemaphoreValues_.data();
|
|
return *this;
|
|
}
|
|
# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 TimelineSemaphoreSubmitInfo & setSignalSemaphoreValueCount( uint32_t signalSemaphoreValueCount_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
signalSemaphoreValueCount = signalSemaphoreValueCount_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 TimelineSemaphoreSubmitInfo & setPSignalSemaphoreValues( const uint64_t * pSignalSemaphoreValues_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pSignalSemaphoreValues = pSignalSemaphoreValues_;
|
|
return *this;
|
|
}
|
|
|
|
# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
|
|
TimelineSemaphoreSubmitInfo &
|
|
setSignalSemaphoreValues( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const uint64_t> const & signalSemaphoreValues_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
signalSemaphoreValueCount = static_cast<uint32_t>( signalSemaphoreValues_.size() );
|
|
pSignalSemaphoreValues = signalSemaphoreValues_.data();
|
|
return *this;
|
|
}
|
|
# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
|
|
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
|
|
|
|
operator VkTimelineSemaphoreSubmitInfo const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkTimelineSemaphoreSubmitInfo *>( this );
|
|
}
|
|
|
|
operator VkTimelineSemaphoreSubmitInfo &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkTimelineSemaphoreSubmitInfo *>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
# if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
# else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &,
|
|
const void * const &,
|
|
uint32_t const &,
|
|
const uint64_t * const &,
|
|
uint32_t const &,
|
|
const uint64_t * const &>
|
|
# endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( sType, pNext, waitSemaphoreValueCount, pWaitSemaphoreValues, signalSemaphoreValueCount, pSignalSemaphoreValues );
|
|
}
|
|
#endif
|
|
|
|
#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
|
|
auto operator<=>( TimelineSemaphoreSubmitInfo const & ) const = default;
|
|
#else
|
|
bool operator==( TimelineSemaphoreSubmitInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
# if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
# else
|
|
return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( waitSemaphoreValueCount == rhs.waitSemaphoreValueCount ) &&
|
|
( pWaitSemaphoreValues == rhs.pWaitSemaphoreValues ) && ( signalSemaphoreValueCount == rhs.signalSemaphoreValueCount ) &&
|
|
( pSignalSemaphoreValues == rhs.pSignalSemaphoreValues );
|
|
# endif
|
|
}
|
|
|
|
bool operator!=( TimelineSemaphoreSubmitInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eTimelineSemaphoreSubmitInfo;
|
|
const void * pNext = {};
|
|
uint32_t waitSemaphoreValueCount = {};
|
|
const uint64_t * pWaitSemaphoreValues = {};
|
|
uint32_t signalSemaphoreValueCount = {};
|
|
const uint64_t * pSignalSemaphoreValues = {};
|
|
};
|
|
|
|
template <>
|
|
struct CppType<StructureType, StructureType::eTimelineSemaphoreSubmitInfo>
|
|
{
|
|
using Type = TimelineSemaphoreSubmitInfo;
|
|
};
|
|
|
|
using TimelineSemaphoreSubmitInfoKHR = TimelineSemaphoreSubmitInfo;
|
|
|
|
struct ValidationFeaturesEXT
|
|
{
|
|
using NativeType = VkValidationFeaturesEXT;
|
|
|
|
static const bool allowDuplicate = false;
|
|
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eValidationFeaturesEXT;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR ValidationFeaturesEXT( uint32_t enabledValidationFeatureCount_ = {},
|
|
const VULKAN_HPP_NAMESPACE::ValidationFeatureEnableEXT * pEnabledValidationFeatures_ = {},
|
|
uint32_t disabledValidationFeatureCount_ = {},
|
|
const VULKAN_HPP_NAMESPACE::ValidationFeatureDisableEXT * pDisabledValidationFeatures_ = {},
|
|
const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
|
|
: pNext( pNext_ )
|
|
, enabledValidationFeatureCount( enabledValidationFeatureCount_ )
|
|
, pEnabledValidationFeatures( pEnabledValidationFeatures_ )
|
|
, disabledValidationFeatureCount( disabledValidationFeatureCount_ )
|
|
, pDisabledValidationFeatures( pDisabledValidationFeatures_ )
|
|
{
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR ValidationFeaturesEXT( ValidationFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
ValidationFeaturesEXT( VkValidationFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: ValidationFeaturesEXT( *reinterpret_cast<ValidationFeaturesEXT const *>( &rhs ) )
|
|
{
|
|
}
|
|
|
|
# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
|
|
ValidationFeaturesEXT(
|
|
VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::ValidationFeatureEnableEXT> const & enabledValidationFeatures_,
|
|
VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::ValidationFeatureDisableEXT> const & disabledValidationFeatures_ = {},
|
|
const void * pNext_ = nullptr )
|
|
: pNext( pNext_ )
|
|
, enabledValidationFeatureCount( static_cast<uint32_t>( enabledValidationFeatures_.size() ) )
|
|
, pEnabledValidationFeatures( enabledValidationFeatures_.data() )
|
|
, disabledValidationFeatureCount( static_cast<uint32_t>( disabledValidationFeatures_.size() ) )
|
|
, pDisabledValidationFeatures( disabledValidationFeatures_.data() )
|
|
{
|
|
}
|
|
# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
|
|
|
|
ValidationFeaturesEXT & operator=( ValidationFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
ValidationFeaturesEXT & operator=( VkValidationFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ValidationFeaturesEXT const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
|
|
VULKAN_HPP_CONSTEXPR_14 ValidationFeaturesEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pNext = pNext_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 ValidationFeaturesEXT & setEnabledValidationFeatureCount( uint32_t enabledValidationFeatureCount_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
enabledValidationFeatureCount = enabledValidationFeatureCount_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 ValidationFeaturesEXT &
|
|
setPEnabledValidationFeatures( const VULKAN_HPP_NAMESPACE::ValidationFeatureEnableEXT * pEnabledValidationFeatures_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pEnabledValidationFeatures = pEnabledValidationFeatures_;
|
|
return *this;
|
|
}
|
|
|
|
# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
|
|
ValidationFeaturesEXT & setEnabledValidationFeatures(
|
|
VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::ValidationFeatureEnableEXT> const & enabledValidationFeatures_ )
|
|
VULKAN_HPP_NOEXCEPT
|
|
{
|
|
enabledValidationFeatureCount = static_cast<uint32_t>( enabledValidationFeatures_.size() );
|
|
pEnabledValidationFeatures = enabledValidationFeatures_.data();
|
|
return *this;
|
|
}
|
|
# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 ValidationFeaturesEXT & setDisabledValidationFeatureCount( uint32_t disabledValidationFeatureCount_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
disabledValidationFeatureCount = disabledValidationFeatureCount_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 ValidationFeaturesEXT &
|
|
setPDisabledValidationFeatures( const VULKAN_HPP_NAMESPACE::ValidationFeatureDisableEXT * pDisabledValidationFeatures_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pDisabledValidationFeatures = pDisabledValidationFeatures_;
|
|
return *this;
|
|
}
|
|
|
|
# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
|
|
ValidationFeaturesEXT & setDisabledValidationFeatures(
|
|
VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::ValidationFeatureDisableEXT> const & disabledValidationFeatures_ )
|
|
VULKAN_HPP_NOEXCEPT
|
|
{
|
|
disabledValidationFeatureCount = static_cast<uint32_t>( disabledValidationFeatures_.size() );
|
|
pDisabledValidationFeatures = disabledValidationFeatures_.data();
|
|
return *this;
|
|
}
|
|
# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
|
|
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
|
|
|
|
operator VkValidationFeaturesEXT const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkValidationFeaturesEXT *>( this );
|
|
}
|
|
|
|
operator VkValidationFeaturesEXT &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkValidationFeaturesEXT *>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
# if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
# else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &,
|
|
const void * const &,
|
|
uint32_t const &,
|
|
const VULKAN_HPP_NAMESPACE::ValidationFeatureEnableEXT * const &,
|
|
uint32_t const &,
|
|
const VULKAN_HPP_NAMESPACE::ValidationFeatureDisableEXT * const &>
|
|
# endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( sType, pNext, enabledValidationFeatureCount, pEnabledValidationFeatures, disabledValidationFeatureCount, pDisabledValidationFeatures );
|
|
}
|
|
#endif
|
|
|
|
#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
|
|
auto operator<=>( ValidationFeaturesEXT const & ) const = default;
|
|
#else
|
|
bool operator==( ValidationFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
# if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
# else
|
|
return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( enabledValidationFeatureCount == rhs.enabledValidationFeatureCount ) &&
|
|
( pEnabledValidationFeatures == rhs.pEnabledValidationFeatures ) && ( disabledValidationFeatureCount == rhs.disabledValidationFeatureCount ) &&
|
|
( pDisabledValidationFeatures == rhs.pDisabledValidationFeatures );
|
|
# endif
|
|
}
|
|
|
|
bool operator!=( ValidationFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eValidationFeaturesEXT;
|
|
const void * pNext = {};
|
|
uint32_t enabledValidationFeatureCount = {};
|
|
const VULKAN_HPP_NAMESPACE::ValidationFeatureEnableEXT * pEnabledValidationFeatures = {};
|
|
uint32_t disabledValidationFeatureCount = {};
|
|
const VULKAN_HPP_NAMESPACE::ValidationFeatureDisableEXT * pDisabledValidationFeatures = {};
|
|
};
|
|
|
|
template <>
|
|
struct CppType<StructureType, StructureType::eValidationFeaturesEXT>
|
|
{
|
|
using Type = ValidationFeaturesEXT;
|
|
};
|
|
|
|
struct VertexInputAttributeDescription2EXT
|
|
{
|
|
using NativeType = VkVertexInputAttributeDescription2EXT;
|
|
|
|
static const bool allowDuplicate = false;
|
|
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eVertexInputAttributeDescription2EXT;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR VertexInputAttributeDescription2EXT( uint32_t location_ = {},
|
|
uint32_t binding_ = {},
|
|
VULKAN_HPP_NAMESPACE::Format format_ = VULKAN_HPP_NAMESPACE::Format::eUndefined,
|
|
uint32_t offset_ = {},
|
|
void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
|
|
: pNext( pNext_ )
|
|
, location( location_ )
|
|
, binding( binding_ )
|
|
, format( format_ )
|
|
, offset( offset_ )
|
|
{
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR VertexInputAttributeDescription2EXT( VertexInputAttributeDescription2EXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
VertexInputAttributeDescription2EXT( VkVertexInputAttributeDescription2EXT const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: VertexInputAttributeDescription2EXT( *reinterpret_cast<VertexInputAttributeDescription2EXT const *>( &rhs ) )
|
|
{
|
|
}
|
|
|
|
VertexInputAttributeDescription2EXT & operator=( VertexInputAttributeDescription2EXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
VertexInputAttributeDescription2EXT & operator=( VkVertexInputAttributeDescription2EXT const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::VertexInputAttributeDescription2EXT const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
|
|
VULKAN_HPP_CONSTEXPR_14 VertexInputAttributeDescription2EXT & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pNext = pNext_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 VertexInputAttributeDescription2EXT & setLocation( uint32_t location_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
location = location_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 VertexInputAttributeDescription2EXT & setBinding( uint32_t binding_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
binding = binding_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 VertexInputAttributeDescription2EXT & setFormat( VULKAN_HPP_NAMESPACE::Format format_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
format = format_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 VertexInputAttributeDescription2EXT & setOffset( uint32_t offset_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
offset = offset_;
|
|
return *this;
|
|
}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
|
|
|
|
operator VkVertexInputAttributeDescription2EXT const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkVertexInputAttributeDescription2EXT *>( this );
|
|
}
|
|
|
|
operator VkVertexInputAttributeDescription2EXT &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkVertexInputAttributeDescription2EXT *>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
# if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
# else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &,
|
|
void * const &,
|
|
uint32_t const &,
|
|
uint32_t const &,
|
|
VULKAN_HPP_NAMESPACE::Format const &,
|
|
uint32_t const &>
|
|
# endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( sType, pNext, location, binding, format, offset );
|
|
}
|
|
#endif
|
|
|
|
#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
|
|
auto operator<=>( VertexInputAttributeDescription2EXT const & ) const = default;
|
|
#else
|
|
bool operator==( VertexInputAttributeDescription2EXT const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
# if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
# else
|
|
return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( location == rhs.location ) && ( binding == rhs.binding ) && ( format == rhs.format ) &&
|
|
( offset == rhs.offset );
|
|
# endif
|
|
}
|
|
|
|
bool operator!=( VertexInputAttributeDescription2EXT const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eVertexInputAttributeDescription2EXT;
|
|
void * pNext = {};
|
|
uint32_t location = {};
|
|
uint32_t binding = {};
|
|
VULKAN_HPP_NAMESPACE::Format format = VULKAN_HPP_NAMESPACE::Format::eUndefined;
|
|
uint32_t offset = {};
|
|
};
|
|
|
|
template <>
|
|
struct CppType<StructureType, StructureType::eVertexInputAttributeDescription2EXT>
|
|
{
|
|
using Type = VertexInputAttributeDescription2EXT;
|
|
};
|
|
|
|
struct VertexInputBindingDescription2EXT
|
|
{
|
|
using NativeType = VkVertexInputBindingDescription2EXT;
|
|
|
|
static const bool allowDuplicate = false;
|
|
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eVertexInputBindingDescription2EXT;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR VertexInputBindingDescription2EXT( uint32_t binding_ = {},
|
|
uint32_t stride_ = {},
|
|
VULKAN_HPP_NAMESPACE::VertexInputRate inputRate_ = VULKAN_HPP_NAMESPACE::VertexInputRate::eVertex,
|
|
uint32_t divisor_ = {},
|
|
void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
|
|
: pNext( pNext_ )
|
|
, binding( binding_ )
|
|
, stride( stride_ )
|
|
, inputRate( inputRate_ )
|
|
, divisor( divisor_ )
|
|
{
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR VertexInputBindingDescription2EXT( VertexInputBindingDescription2EXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
VertexInputBindingDescription2EXT( VkVertexInputBindingDescription2EXT const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: VertexInputBindingDescription2EXT( *reinterpret_cast<VertexInputBindingDescription2EXT const *>( &rhs ) )
|
|
{
|
|
}
|
|
|
|
VertexInputBindingDescription2EXT & operator=( VertexInputBindingDescription2EXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
VertexInputBindingDescription2EXT & operator=( VkVertexInputBindingDescription2EXT const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::VertexInputBindingDescription2EXT const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
|
|
VULKAN_HPP_CONSTEXPR_14 VertexInputBindingDescription2EXT & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pNext = pNext_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 VertexInputBindingDescription2EXT & setBinding( uint32_t binding_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
binding = binding_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 VertexInputBindingDescription2EXT & setStride( uint32_t stride_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
stride = stride_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 VertexInputBindingDescription2EXT & setInputRate( VULKAN_HPP_NAMESPACE::VertexInputRate inputRate_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
inputRate = inputRate_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 VertexInputBindingDescription2EXT & setDivisor( uint32_t divisor_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
divisor = divisor_;
|
|
return *this;
|
|
}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
|
|
|
|
operator VkVertexInputBindingDescription2EXT const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkVertexInputBindingDescription2EXT *>( this );
|
|
}
|
|
|
|
operator VkVertexInputBindingDescription2EXT &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkVertexInputBindingDescription2EXT *>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
# if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
# else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &,
|
|
void * const &,
|
|
uint32_t const &,
|
|
uint32_t const &,
|
|
VULKAN_HPP_NAMESPACE::VertexInputRate const &,
|
|
uint32_t const &>
|
|
# endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( sType, pNext, binding, stride, inputRate, divisor );
|
|
}
|
|
#endif
|
|
|
|
#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
|
|
auto operator<=>( VertexInputBindingDescription2EXT const & ) const = default;
|
|
#else
|
|
bool operator==( VertexInputBindingDescription2EXT const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
# if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
# else
|
|
return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( binding == rhs.binding ) && ( stride == rhs.stride ) && ( inputRate == rhs.inputRate ) &&
|
|
( divisor == rhs.divisor );
|
|
# endif
|
|
}
|
|
|
|
bool operator!=( VertexInputBindingDescription2EXT const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eVertexInputBindingDescription2EXT;
|
|
void * pNext = {};
|
|
uint32_t binding = {};
|
|
uint32_t stride = {};
|
|
VULKAN_HPP_NAMESPACE::VertexInputRate inputRate = VULKAN_HPP_NAMESPACE::VertexInputRate::eVertex;
|
|
uint32_t divisor = {};
|
|
};
|
|
|
|
template <>
|
|
struct CppType<StructureType, StructureType::eVertexInputBindingDescription2EXT>
|
|
{
|
|
using Type = VertexInputBindingDescription2EXT;
|
|
};
|
|
|
|
struct WriteDescriptorSet
|
|
{
|
|
using NativeType = VkWriteDescriptorSet;
|
|
|
|
static const bool allowDuplicate = false;
|
|
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eWriteDescriptorSet;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR WriteDescriptorSet( VULKAN_HPP_NAMESPACE::DescriptorSet dstSet_ = {},
|
|
uint32_t dstBinding_ = {},
|
|
uint32_t dstArrayElement_ = {},
|
|
uint32_t descriptorCount_ = {},
|
|
VULKAN_HPP_NAMESPACE::DescriptorType descriptorType_ = VULKAN_HPP_NAMESPACE::DescriptorType::eSampler,
|
|
const VULKAN_HPP_NAMESPACE::DescriptorImageInfo * pImageInfo_ = {},
|
|
const VULKAN_HPP_NAMESPACE::DescriptorBufferInfo * pBufferInfo_ = {},
|
|
const VULKAN_HPP_NAMESPACE::BufferView * pTexelBufferView_ = {},
|
|
const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
|
|
: pNext( pNext_ )
|
|
, dstSet( dstSet_ )
|
|
, dstBinding( dstBinding_ )
|
|
, dstArrayElement( dstArrayElement_ )
|
|
, descriptorCount( descriptorCount_ )
|
|
, descriptorType( descriptorType_ )
|
|
, pImageInfo( pImageInfo_ )
|
|
, pBufferInfo( pBufferInfo_ )
|
|
, pTexelBufferView( pTexelBufferView_ )
|
|
{
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR WriteDescriptorSet( WriteDescriptorSet const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
WriteDescriptorSet( VkWriteDescriptorSet const & rhs ) VULKAN_HPP_NOEXCEPT : WriteDescriptorSet( *reinterpret_cast<WriteDescriptorSet const *>( &rhs ) ) {}
|
|
|
|
# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
|
|
WriteDescriptorSet( VULKAN_HPP_NAMESPACE::DescriptorSet dstSet_,
|
|
uint32_t dstBinding_,
|
|
uint32_t dstArrayElement_,
|
|
VULKAN_HPP_NAMESPACE::DescriptorType descriptorType_,
|
|
VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::DescriptorImageInfo> const & imageInfo_,
|
|
VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::DescriptorBufferInfo> const & bufferInfo_ = {},
|
|
VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::BufferView> const & texelBufferView_ = {},
|
|
const void * pNext_ = nullptr )
|
|
: pNext( pNext_ )
|
|
, dstSet( dstSet_ )
|
|
, dstBinding( dstBinding_ )
|
|
, dstArrayElement( dstArrayElement_ )
|
|
, descriptorCount( static_cast<uint32_t>( !imageInfo_.empty() ? imageInfo_.size()
|
|
: !bufferInfo_.empty() ? bufferInfo_.size()
|
|
: texelBufferView_.size() ) )
|
|
, descriptorType( descriptorType_ )
|
|
, pImageInfo( imageInfo_.data() )
|
|
, pBufferInfo( bufferInfo_.data() )
|
|
, pTexelBufferView( texelBufferView_.data() )
|
|
{
|
|
# ifdef VULKAN_HPP_NO_EXCEPTIONS
|
|
VULKAN_HPP_ASSERT( ( !imageInfo_.empty() + !bufferInfo_.empty() + !texelBufferView_.empty() ) <= 1 );
|
|
# else
|
|
if ( 1 < ( !imageInfo_.empty() + !bufferInfo_.empty() + !texelBufferView_.empty() ) )
|
|
{
|
|
throw LogicError( VULKAN_HPP_NAMESPACE_STRING
|
|
"::WriteDescriptorSet::WriteDescriptorSet: 1 < ( !imageInfo_.empty() + !bufferInfo_.empty() + !texelBufferView_.empty() )" );
|
|
}
|
|
# endif /*VULKAN_HPP_NO_EXCEPTIONS*/
|
|
}
|
|
# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
|
|
|
|
WriteDescriptorSet & operator=( WriteDescriptorSet const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
WriteDescriptorSet & operator=( VkWriteDescriptorSet const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::WriteDescriptorSet const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
|
|
VULKAN_HPP_CONSTEXPR_14 WriteDescriptorSet & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pNext = pNext_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 WriteDescriptorSet & setDstSet( VULKAN_HPP_NAMESPACE::DescriptorSet dstSet_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
dstSet = dstSet_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 WriteDescriptorSet & setDstBinding( uint32_t dstBinding_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
dstBinding = dstBinding_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 WriteDescriptorSet & setDstArrayElement( uint32_t dstArrayElement_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
dstArrayElement = dstArrayElement_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 WriteDescriptorSet & setDescriptorCount( uint32_t descriptorCount_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
descriptorCount = descriptorCount_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 WriteDescriptorSet & setDescriptorType( VULKAN_HPP_NAMESPACE::DescriptorType descriptorType_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
descriptorType = descriptorType_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 WriteDescriptorSet & setPImageInfo( const VULKAN_HPP_NAMESPACE::DescriptorImageInfo * pImageInfo_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pImageInfo = pImageInfo_;
|
|
return *this;
|
|
}
|
|
|
|
# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
|
|
WriteDescriptorSet &
|
|
setImageInfo( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::DescriptorImageInfo> const & imageInfo_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
descriptorCount = static_cast<uint32_t>( imageInfo_.size() );
|
|
pImageInfo = imageInfo_.data();
|
|
return *this;
|
|
}
|
|
# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 WriteDescriptorSet & setPBufferInfo( const VULKAN_HPP_NAMESPACE::DescriptorBufferInfo * pBufferInfo_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pBufferInfo = pBufferInfo_;
|
|
return *this;
|
|
}
|
|
|
|
# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
|
|
WriteDescriptorSet &
|
|
setBufferInfo( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::DescriptorBufferInfo> const & bufferInfo_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
descriptorCount = static_cast<uint32_t>( bufferInfo_.size() );
|
|
pBufferInfo = bufferInfo_.data();
|
|
return *this;
|
|
}
|
|
# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 WriteDescriptorSet & setPTexelBufferView( const VULKAN_HPP_NAMESPACE::BufferView * pTexelBufferView_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pTexelBufferView = pTexelBufferView_;
|
|
return *this;
|
|
}
|
|
|
|
# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
|
|
WriteDescriptorSet &
|
|
setTexelBufferView( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::BufferView> const & texelBufferView_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
descriptorCount = static_cast<uint32_t>( texelBufferView_.size() );
|
|
pTexelBufferView = texelBufferView_.data();
|
|
return *this;
|
|
}
|
|
# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
|
|
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
|
|
|
|
operator VkWriteDescriptorSet const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkWriteDescriptorSet *>( this );
|
|
}
|
|
|
|
operator VkWriteDescriptorSet &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkWriteDescriptorSet *>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
# if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
# else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &,
|
|
const void * const &,
|
|
VULKAN_HPP_NAMESPACE::DescriptorSet const &,
|
|
uint32_t const &,
|
|
uint32_t const &,
|
|
uint32_t const &,
|
|
VULKAN_HPP_NAMESPACE::DescriptorType const &,
|
|
const VULKAN_HPP_NAMESPACE::DescriptorImageInfo * const &,
|
|
const VULKAN_HPP_NAMESPACE::DescriptorBufferInfo * const &,
|
|
const VULKAN_HPP_NAMESPACE::BufferView * const &>
|
|
# endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( sType, pNext, dstSet, dstBinding, dstArrayElement, descriptorCount, descriptorType, pImageInfo, pBufferInfo, pTexelBufferView );
|
|
}
|
|
#endif
|
|
|
|
#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
|
|
auto operator<=>( WriteDescriptorSet const & ) const = default;
|
|
#else
|
|
bool operator==( WriteDescriptorSet const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
# if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
# else
|
|
return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( dstSet == rhs.dstSet ) && ( dstBinding == rhs.dstBinding ) &&
|
|
( dstArrayElement == rhs.dstArrayElement ) && ( descriptorCount == rhs.descriptorCount ) && ( descriptorType == rhs.descriptorType ) &&
|
|
( pImageInfo == rhs.pImageInfo ) && ( pBufferInfo == rhs.pBufferInfo ) && ( pTexelBufferView == rhs.pTexelBufferView );
|
|
# endif
|
|
}
|
|
|
|
bool operator!=( WriteDescriptorSet const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eWriteDescriptorSet;
|
|
const void * pNext = {};
|
|
VULKAN_HPP_NAMESPACE::DescriptorSet dstSet = {};
|
|
uint32_t dstBinding = {};
|
|
uint32_t dstArrayElement = {};
|
|
uint32_t descriptorCount = {};
|
|
VULKAN_HPP_NAMESPACE::DescriptorType descriptorType = VULKAN_HPP_NAMESPACE::DescriptorType::eSampler;
|
|
const VULKAN_HPP_NAMESPACE::DescriptorImageInfo * pImageInfo = {};
|
|
const VULKAN_HPP_NAMESPACE::DescriptorBufferInfo * pBufferInfo = {};
|
|
const VULKAN_HPP_NAMESPACE::BufferView * pTexelBufferView = {};
|
|
};
|
|
|
|
template <>
|
|
struct CppType<StructureType, StructureType::eWriteDescriptorSet>
|
|
{
|
|
using Type = WriteDescriptorSet;
|
|
};
|
|
|
|
struct WriteDescriptorSetInlineUniformBlock
|
|
{
|
|
using NativeType = VkWriteDescriptorSetInlineUniformBlock;
|
|
|
|
static const bool allowDuplicate = false;
|
|
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eWriteDescriptorSetInlineUniformBlock;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR
|
|
WriteDescriptorSetInlineUniformBlock( uint32_t dataSize_ = {}, const void * pData_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
|
|
: pNext( pNext_ )
|
|
, dataSize( dataSize_ )
|
|
, pData( pData_ )
|
|
{
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR WriteDescriptorSetInlineUniformBlock( WriteDescriptorSetInlineUniformBlock const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
WriteDescriptorSetInlineUniformBlock( VkWriteDescriptorSetInlineUniformBlock const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: WriteDescriptorSetInlineUniformBlock( *reinterpret_cast<WriteDescriptorSetInlineUniformBlock const *>( &rhs ) )
|
|
{
|
|
}
|
|
|
|
# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
|
|
template <typename T>
|
|
WriteDescriptorSetInlineUniformBlock( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const T> const & data_, const void * pNext_ = nullptr )
|
|
: pNext( pNext_ ), dataSize( static_cast<uint32_t>( data_.size() * sizeof( T ) ) ), pData( data_.data() )
|
|
{
|
|
}
|
|
# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
|
|
|
|
WriteDescriptorSetInlineUniformBlock & operator=( WriteDescriptorSetInlineUniformBlock const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
WriteDescriptorSetInlineUniformBlock & operator=( VkWriteDescriptorSetInlineUniformBlock const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::WriteDescriptorSetInlineUniformBlock const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
|
|
VULKAN_HPP_CONSTEXPR_14 WriteDescriptorSetInlineUniformBlock & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pNext = pNext_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 WriteDescriptorSetInlineUniformBlock & setDataSize( uint32_t dataSize_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
dataSize = dataSize_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 WriteDescriptorSetInlineUniformBlock & setPData( const void * pData_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pData = pData_;
|
|
return *this;
|
|
}
|
|
|
|
# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
|
|
template <typename T>
|
|
WriteDescriptorSetInlineUniformBlock & setData( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const T> const & data_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
dataSize = static_cast<uint32_t>( data_.size() * sizeof( T ) );
|
|
pData = data_.data();
|
|
return *this;
|
|
}
|
|
# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
|
|
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
|
|
|
|
operator VkWriteDescriptorSetInlineUniformBlock const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkWriteDescriptorSetInlineUniformBlock *>( this );
|
|
}
|
|
|
|
operator VkWriteDescriptorSetInlineUniformBlock &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkWriteDescriptorSetInlineUniformBlock *>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
# if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
# else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, uint32_t const &, const void * const &>
|
|
# endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( sType, pNext, dataSize, pData );
|
|
}
|
|
#endif
|
|
|
|
#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
|
|
auto operator<=>( WriteDescriptorSetInlineUniformBlock const & ) const = default;
|
|
#else
|
|
bool operator==( WriteDescriptorSetInlineUniformBlock const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
# if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
# else
|
|
return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( dataSize == rhs.dataSize ) && ( pData == rhs.pData );
|
|
# endif
|
|
}
|
|
|
|
bool operator!=( WriteDescriptorSetInlineUniformBlock const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eWriteDescriptorSetInlineUniformBlock;
|
|
const void * pNext = {};
|
|
uint32_t dataSize = {};
|
|
const void * pData = {};
|
|
};
|
|
|
|
template <>
|
|
struct CppType<StructureType, StructureType::eWriteDescriptorSetInlineUniformBlock>
|
|
{
|
|
using Type = WriteDescriptorSetInlineUniformBlock;
|
|
};
|
|
|
|
using WriteDescriptorSetInlineUniformBlockEXT = WriteDescriptorSetInlineUniformBlock;
|
|
|
|
} // namespace VULKAN_HPP_NAMESPACE
|
|
#endif
|