Vulkan-Hpp/vulkan/vulkan_raii.hpp

23823 lines
1.3 MiB
C++
Raw Permalink Normal View History

// Copyright 2015-2024 The Khronos Group Inc.
//
// SPDX-License-Identifier: Apache-2.0 OR MIT
//
// This header is generated from the Khronos Vulkan XML API Registry.
#ifndef VULKAN_RAII_HPP
#define VULKAN_RAII_HPP
#include <vulkan/vulkan.hpp>
#if !( defined( VULKAN_HPP_ENABLE_STD_MODULE ) && defined( VULKAN_HPP_STD_MODULE ) )
# include <memory> // std::unique_ptr
# include <utility> // std::forward
#endif
#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
namespace VULKAN_HPP_NAMESPACE
{
namespace VULKAN_HPP_RAII_NAMESPACE
{
namespace detail
{
template <class T>
class CreateReturnType
{
public:
# if defined( VULKAN_HPP_RAII_NO_EXCEPTIONS )
using Type = VULKAN_HPP_EXPECTED<T, VULKAN_HPP_NAMESPACE::Result>;
# else
using Type = T;
# endif
};
using PFN_dummy = void ( * )();
class ContextDispatcher : public ::VULKAN_HPP_NAMESPACE::detail::DispatchLoaderBase
{
public:
ContextDispatcher( PFN_vkGetInstanceProcAddr getProcAddr )
: vkGetInstanceProcAddr( getProcAddr )
//=== VK_VERSION_1_0 ===
, vkCreateInstance( PFN_vkCreateInstance( getProcAddr( NULL, "vkCreateInstance" ) ) )
, vkEnumerateInstanceExtensionProperties(
PFN_vkEnumerateInstanceExtensionProperties( getProcAddr( NULL, "vkEnumerateInstanceExtensionProperties" ) ) )
, vkEnumerateInstanceLayerProperties( PFN_vkEnumerateInstanceLayerProperties( getProcAddr( NULL, "vkEnumerateInstanceLayerProperties" ) ) )
//=== VK_VERSION_1_1 ===
, vkEnumerateInstanceVersion( PFN_vkEnumerateInstanceVersion( getProcAddr( NULL, "vkEnumerateInstanceVersion" ) ) )
{
}
2021-07-21 16:15:31 +00:00
public:
PFN_vkGetInstanceProcAddr vkGetInstanceProcAddr = 0;
2021-07-21 16:15:31 +00:00
//=== VK_VERSION_1_0 ===
PFN_vkCreateInstance vkCreateInstance = 0;
PFN_vkEnumerateInstanceExtensionProperties vkEnumerateInstanceExtensionProperties = 0;
PFN_vkEnumerateInstanceLayerProperties vkEnumerateInstanceLayerProperties = 0;
2021-07-21 16:15:31 +00:00
//=== VK_VERSION_1_1 ===
PFN_vkEnumerateInstanceVersion vkEnumerateInstanceVersion = 0;
};
2021-07-21 16:15:31 +00:00
class InstanceDispatcher : public ::VULKAN_HPP_NAMESPACE::detail::DispatchLoaderBase
{
public:
InstanceDispatcher( PFN_vkGetInstanceProcAddr getProcAddr, VkInstance instance ) : vkGetInstanceProcAddr( getProcAddr )
{
//=== VK_VERSION_1_0 ===
vkDestroyInstance = PFN_vkDestroyInstance( vkGetInstanceProcAddr( instance, "vkDestroyInstance" ) );
vkEnumeratePhysicalDevices = PFN_vkEnumeratePhysicalDevices( vkGetInstanceProcAddr( instance, "vkEnumeratePhysicalDevices" ) );
vkGetPhysicalDeviceFeatures = PFN_vkGetPhysicalDeviceFeatures( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceFeatures" ) );
vkGetPhysicalDeviceFormatProperties =
PFN_vkGetPhysicalDeviceFormatProperties( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceFormatProperties" ) );
vkGetPhysicalDeviceImageFormatProperties =
PFN_vkGetPhysicalDeviceImageFormatProperties( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceImageFormatProperties" ) );
vkGetPhysicalDeviceProperties = PFN_vkGetPhysicalDeviceProperties( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceProperties" ) );
vkGetPhysicalDeviceQueueFamilyProperties =
PFN_vkGetPhysicalDeviceQueueFamilyProperties( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceQueueFamilyProperties" ) );
vkGetPhysicalDeviceMemoryProperties =
PFN_vkGetPhysicalDeviceMemoryProperties( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceMemoryProperties" ) );
vkCreateDevice = PFN_vkCreateDevice( vkGetInstanceProcAddr( instance, "vkCreateDevice" ) );
vkEnumerateDeviceExtensionProperties =
PFN_vkEnumerateDeviceExtensionProperties( vkGetInstanceProcAddr( instance, "vkEnumerateDeviceExtensionProperties" ) );
vkEnumerateDeviceLayerProperties = PFN_vkEnumerateDeviceLayerProperties( vkGetInstanceProcAddr( instance, "vkEnumerateDeviceLayerProperties" ) );
vkGetPhysicalDeviceSparseImageFormatProperties =
PFN_vkGetPhysicalDeviceSparseImageFormatProperties( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceSparseImageFormatProperties" ) );
//=== VK_VERSION_1_1 ===
vkEnumeratePhysicalDeviceGroups = PFN_vkEnumeratePhysicalDeviceGroups( vkGetInstanceProcAddr( instance, "vkEnumeratePhysicalDeviceGroups" ) );
vkGetPhysicalDeviceFeatures2 = PFN_vkGetPhysicalDeviceFeatures2( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceFeatures2" ) );
vkGetPhysicalDeviceProperties2 = PFN_vkGetPhysicalDeviceProperties2( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceProperties2" ) );
vkGetPhysicalDeviceFormatProperties2 =
PFN_vkGetPhysicalDeviceFormatProperties2( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceFormatProperties2" ) );
vkGetPhysicalDeviceImageFormatProperties2 =
PFN_vkGetPhysicalDeviceImageFormatProperties2( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceImageFormatProperties2" ) );
vkGetPhysicalDeviceQueueFamilyProperties2 =
PFN_vkGetPhysicalDeviceQueueFamilyProperties2( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceQueueFamilyProperties2" ) );
vkGetPhysicalDeviceMemoryProperties2 =
PFN_vkGetPhysicalDeviceMemoryProperties2( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceMemoryProperties2" ) );
vkGetPhysicalDeviceSparseImageFormatProperties2 =
PFN_vkGetPhysicalDeviceSparseImageFormatProperties2( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceSparseImageFormatProperties2" ) );
vkGetPhysicalDeviceExternalBufferProperties =
PFN_vkGetPhysicalDeviceExternalBufferProperties( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceExternalBufferProperties" ) );
vkGetPhysicalDeviceExternalFenceProperties =
PFN_vkGetPhysicalDeviceExternalFenceProperties( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceExternalFenceProperties" ) );
vkGetPhysicalDeviceExternalSemaphoreProperties =
PFN_vkGetPhysicalDeviceExternalSemaphoreProperties( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceExternalSemaphoreProperties" ) );
//=== VK_VERSION_1_3 ===
vkGetPhysicalDeviceToolProperties = PFN_vkGetPhysicalDeviceToolProperties( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceToolProperties" ) );
//=== VK_KHR_surface ===
vkDestroySurfaceKHR = PFN_vkDestroySurfaceKHR( vkGetInstanceProcAddr( instance, "vkDestroySurfaceKHR" ) );
vkGetPhysicalDeviceSurfaceSupportKHR =
PFN_vkGetPhysicalDeviceSurfaceSupportKHR( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceSurfaceSupportKHR" ) );
vkGetPhysicalDeviceSurfaceCapabilitiesKHR =
PFN_vkGetPhysicalDeviceSurfaceCapabilitiesKHR( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceSurfaceCapabilitiesKHR" ) );
vkGetPhysicalDeviceSurfaceFormatsKHR =
PFN_vkGetPhysicalDeviceSurfaceFormatsKHR( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceSurfaceFormatsKHR" ) );
vkGetPhysicalDeviceSurfacePresentModesKHR =
PFN_vkGetPhysicalDeviceSurfacePresentModesKHR( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceSurfacePresentModesKHR" ) );
//=== VK_KHR_swapchain ===
vkGetPhysicalDevicePresentRectanglesKHR =
PFN_vkGetPhysicalDevicePresentRectanglesKHR( vkGetInstanceProcAddr( instance, "vkGetPhysicalDevicePresentRectanglesKHR" ) );
//=== VK_KHR_display ===
vkGetPhysicalDeviceDisplayPropertiesKHR =
PFN_vkGetPhysicalDeviceDisplayPropertiesKHR( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceDisplayPropertiesKHR" ) );
vkGetPhysicalDeviceDisplayPlanePropertiesKHR =
PFN_vkGetPhysicalDeviceDisplayPlanePropertiesKHR( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceDisplayPlanePropertiesKHR" ) );
vkGetDisplayPlaneSupportedDisplaysKHR =
PFN_vkGetDisplayPlaneSupportedDisplaysKHR( vkGetInstanceProcAddr( instance, "vkGetDisplayPlaneSupportedDisplaysKHR" ) );
vkGetDisplayModePropertiesKHR = PFN_vkGetDisplayModePropertiesKHR( vkGetInstanceProcAddr( instance, "vkGetDisplayModePropertiesKHR" ) );
vkCreateDisplayModeKHR = PFN_vkCreateDisplayModeKHR( vkGetInstanceProcAddr( instance, "vkCreateDisplayModeKHR" ) );
vkGetDisplayPlaneCapabilitiesKHR = PFN_vkGetDisplayPlaneCapabilitiesKHR( vkGetInstanceProcAddr( instance, "vkGetDisplayPlaneCapabilitiesKHR" ) );
vkCreateDisplayPlaneSurfaceKHR = PFN_vkCreateDisplayPlaneSurfaceKHR( vkGetInstanceProcAddr( instance, "vkCreateDisplayPlaneSurfaceKHR" ) );
2021-07-21 16:15:31 +00:00
2023-03-01 09:17:02 +00:00
# if defined( VK_USE_PLATFORM_XLIB_KHR )
//=== VK_KHR_xlib_surface ===
vkCreateXlibSurfaceKHR = PFN_vkCreateXlibSurfaceKHR( vkGetInstanceProcAddr( instance, "vkCreateXlibSurfaceKHR" ) );
vkGetPhysicalDeviceXlibPresentationSupportKHR =
PFN_vkGetPhysicalDeviceXlibPresentationSupportKHR( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceXlibPresentationSupportKHR" ) );
2023-03-01 09:17:02 +00:00
# endif /*VK_USE_PLATFORM_XLIB_KHR*/
2021-07-21 16:15:31 +00:00
2023-03-01 09:17:02 +00:00
# if defined( VK_USE_PLATFORM_XCB_KHR )
//=== VK_KHR_xcb_surface ===
vkCreateXcbSurfaceKHR = PFN_vkCreateXcbSurfaceKHR( vkGetInstanceProcAddr( instance, "vkCreateXcbSurfaceKHR" ) );
vkGetPhysicalDeviceXcbPresentationSupportKHR =
PFN_vkGetPhysicalDeviceXcbPresentationSupportKHR( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceXcbPresentationSupportKHR" ) );
2023-03-01 09:17:02 +00:00
# endif /*VK_USE_PLATFORM_XCB_KHR*/
2021-07-21 16:15:31 +00:00
2023-03-01 09:17:02 +00:00
# if defined( VK_USE_PLATFORM_WAYLAND_KHR )
//=== VK_KHR_wayland_surface ===
vkCreateWaylandSurfaceKHR = PFN_vkCreateWaylandSurfaceKHR( vkGetInstanceProcAddr( instance, "vkCreateWaylandSurfaceKHR" ) );
vkGetPhysicalDeviceWaylandPresentationSupportKHR =
PFN_vkGetPhysicalDeviceWaylandPresentationSupportKHR( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceWaylandPresentationSupportKHR" ) );
2023-03-01 09:17:02 +00:00
# endif /*VK_USE_PLATFORM_WAYLAND_KHR*/
2021-07-21 16:15:31 +00:00
2023-03-01 09:17:02 +00:00
# if defined( VK_USE_PLATFORM_ANDROID_KHR )
//=== VK_KHR_android_surface ===
vkCreateAndroidSurfaceKHR = PFN_vkCreateAndroidSurfaceKHR( vkGetInstanceProcAddr( instance, "vkCreateAndroidSurfaceKHR" ) );
2023-03-01 09:17:02 +00:00
# endif /*VK_USE_PLATFORM_ANDROID_KHR*/
2021-07-21 16:15:31 +00:00
2023-03-01 09:17:02 +00:00
# if defined( VK_USE_PLATFORM_WIN32_KHR )
//=== VK_KHR_win32_surface ===
vkCreateWin32SurfaceKHR = PFN_vkCreateWin32SurfaceKHR( vkGetInstanceProcAddr( instance, "vkCreateWin32SurfaceKHR" ) );
vkGetPhysicalDeviceWin32PresentationSupportKHR =
PFN_vkGetPhysicalDeviceWin32PresentationSupportKHR( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceWin32PresentationSupportKHR" ) );
2023-03-01 09:17:02 +00:00
# endif /*VK_USE_PLATFORM_WIN32_KHR*/
//=== VK_EXT_debug_report ===
vkCreateDebugReportCallbackEXT = PFN_vkCreateDebugReportCallbackEXT( vkGetInstanceProcAddr( instance, "vkCreateDebugReportCallbackEXT" ) );
vkDestroyDebugReportCallbackEXT = PFN_vkDestroyDebugReportCallbackEXT( vkGetInstanceProcAddr( instance, "vkDestroyDebugReportCallbackEXT" ) );
vkDebugReportMessageEXT = PFN_vkDebugReportMessageEXT( vkGetInstanceProcAddr( instance, "vkDebugReportMessageEXT" ) );
2023-03-01 09:17:02 +00:00
//=== VK_KHR_video_queue ===
vkGetPhysicalDeviceVideoCapabilitiesKHR =
PFN_vkGetPhysicalDeviceVideoCapabilitiesKHR( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceVideoCapabilitiesKHR" ) );
vkGetPhysicalDeviceVideoFormatPropertiesKHR =
PFN_vkGetPhysicalDeviceVideoFormatPropertiesKHR( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceVideoFormatPropertiesKHR" ) );
2023-03-01 09:17:02 +00:00
# if defined( VK_USE_PLATFORM_GGP )
//=== VK_GGP_stream_descriptor_surface ===
vkCreateStreamDescriptorSurfaceGGP =
PFN_vkCreateStreamDescriptorSurfaceGGP( vkGetInstanceProcAddr( instance, "vkCreateStreamDescriptorSurfaceGGP" ) );
2023-03-01 09:17:02 +00:00
# endif /*VK_USE_PLATFORM_GGP*/
//=== VK_NV_external_memory_capabilities ===
vkGetPhysicalDeviceExternalImageFormatPropertiesNV =
PFN_vkGetPhysicalDeviceExternalImageFormatPropertiesNV( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceExternalImageFormatPropertiesNV" ) );
//=== VK_KHR_get_physical_device_properties2 ===
vkGetPhysicalDeviceFeatures2KHR = PFN_vkGetPhysicalDeviceFeatures2KHR( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceFeatures2KHR" ) );
if ( !vkGetPhysicalDeviceFeatures2 )
vkGetPhysicalDeviceFeatures2 = vkGetPhysicalDeviceFeatures2KHR;
vkGetPhysicalDeviceProperties2KHR = PFN_vkGetPhysicalDeviceProperties2KHR( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceProperties2KHR" ) );
if ( !vkGetPhysicalDeviceProperties2 )
vkGetPhysicalDeviceProperties2 = vkGetPhysicalDeviceProperties2KHR;
vkGetPhysicalDeviceFormatProperties2KHR =
PFN_vkGetPhysicalDeviceFormatProperties2KHR( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceFormatProperties2KHR" ) );
if ( !vkGetPhysicalDeviceFormatProperties2 )
vkGetPhysicalDeviceFormatProperties2 = vkGetPhysicalDeviceFormatProperties2KHR;
vkGetPhysicalDeviceImageFormatProperties2KHR =
PFN_vkGetPhysicalDeviceImageFormatProperties2KHR( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceImageFormatProperties2KHR" ) );
if ( !vkGetPhysicalDeviceImageFormatProperties2 )
vkGetPhysicalDeviceImageFormatProperties2 = vkGetPhysicalDeviceImageFormatProperties2KHR;
vkGetPhysicalDeviceQueueFamilyProperties2KHR =
PFN_vkGetPhysicalDeviceQueueFamilyProperties2KHR( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceQueueFamilyProperties2KHR" ) );
if ( !vkGetPhysicalDeviceQueueFamilyProperties2 )
vkGetPhysicalDeviceQueueFamilyProperties2 = vkGetPhysicalDeviceQueueFamilyProperties2KHR;
vkGetPhysicalDeviceMemoryProperties2KHR =
PFN_vkGetPhysicalDeviceMemoryProperties2KHR( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceMemoryProperties2KHR" ) );
if ( !vkGetPhysicalDeviceMemoryProperties2 )
vkGetPhysicalDeviceMemoryProperties2 = vkGetPhysicalDeviceMemoryProperties2KHR;
vkGetPhysicalDeviceSparseImageFormatProperties2KHR =
PFN_vkGetPhysicalDeviceSparseImageFormatProperties2KHR( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceSparseImageFormatProperties2KHR" ) );
if ( !vkGetPhysicalDeviceSparseImageFormatProperties2 )
vkGetPhysicalDeviceSparseImageFormatProperties2 = vkGetPhysicalDeviceSparseImageFormatProperties2KHR;
2021-07-21 16:15:31 +00:00
2023-03-01 09:17:02 +00:00
# if defined( VK_USE_PLATFORM_VI_NN )
//=== VK_NN_vi_surface ===
vkCreateViSurfaceNN = PFN_vkCreateViSurfaceNN( vkGetInstanceProcAddr( instance, "vkCreateViSurfaceNN" ) );
2023-03-01 09:17:02 +00:00
# endif /*VK_USE_PLATFORM_VI_NN*/
//=== VK_KHR_device_group_creation ===
vkEnumeratePhysicalDeviceGroupsKHR =
PFN_vkEnumeratePhysicalDeviceGroupsKHR( vkGetInstanceProcAddr( instance, "vkEnumeratePhysicalDeviceGroupsKHR" ) );
if ( !vkEnumeratePhysicalDeviceGroups )
vkEnumeratePhysicalDeviceGroups = vkEnumeratePhysicalDeviceGroupsKHR;
2023-03-01 09:17:02 +00:00
//=== VK_KHR_external_memory_capabilities ===
vkGetPhysicalDeviceExternalBufferPropertiesKHR =
PFN_vkGetPhysicalDeviceExternalBufferPropertiesKHR( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceExternalBufferPropertiesKHR" ) );
if ( !vkGetPhysicalDeviceExternalBufferProperties )
vkGetPhysicalDeviceExternalBufferProperties = vkGetPhysicalDeviceExternalBufferPropertiesKHR;
2023-03-01 09:17:02 +00:00
//=== VK_KHR_external_semaphore_capabilities ===
vkGetPhysicalDeviceExternalSemaphorePropertiesKHR =
PFN_vkGetPhysicalDeviceExternalSemaphorePropertiesKHR( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceExternalSemaphorePropertiesKHR" ) );
if ( !vkGetPhysicalDeviceExternalSemaphoreProperties )
vkGetPhysicalDeviceExternalSemaphoreProperties = vkGetPhysicalDeviceExternalSemaphorePropertiesKHR;
2023-03-01 09:17:02 +00:00
//=== VK_EXT_direct_mode_display ===
vkReleaseDisplayEXT = PFN_vkReleaseDisplayEXT( vkGetInstanceProcAddr( instance, "vkReleaseDisplayEXT" ) );
2023-03-01 09:17:02 +00:00
# if defined( VK_USE_PLATFORM_XLIB_XRANDR_EXT )
//=== VK_EXT_acquire_xlib_display ===
vkAcquireXlibDisplayEXT = PFN_vkAcquireXlibDisplayEXT( vkGetInstanceProcAddr( instance, "vkAcquireXlibDisplayEXT" ) );
vkGetRandROutputDisplayEXT = PFN_vkGetRandROutputDisplayEXT( vkGetInstanceProcAddr( instance, "vkGetRandROutputDisplayEXT" ) );
2023-03-01 09:17:02 +00:00
# endif /*VK_USE_PLATFORM_XLIB_XRANDR_EXT*/
//=== VK_EXT_display_surface_counter ===
vkGetPhysicalDeviceSurfaceCapabilities2EXT =
PFN_vkGetPhysicalDeviceSurfaceCapabilities2EXT( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceSurfaceCapabilities2EXT" ) );
//=== VK_KHR_external_fence_capabilities ===
vkGetPhysicalDeviceExternalFencePropertiesKHR =
PFN_vkGetPhysicalDeviceExternalFencePropertiesKHR( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceExternalFencePropertiesKHR" ) );
if ( !vkGetPhysicalDeviceExternalFenceProperties )
vkGetPhysicalDeviceExternalFenceProperties = vkGetPhysicalDeviceExternalFencePropertiesKHR;
//=== VK_KHR_performance_query ===
vkEnumeratePhysicalDeviceQueueFamilyPerformanceQueryCountersKHR = PFN_vkEnumeratePhysicalDeviceQueueFamilyPerformanceQueryCountersKHR(
vkGetInstanceProcAddr( instance, "vkEnumeratePhysicalDeviceQueueFamilyPerformanceQueryCountersKHR" ) );
vkGetPhysicalDeviceQueueFamilyPerformanceQueryPassesKHR = PFN_vkGetPhysicalDeviceQueueFamilyPerformanceQueryPassesKHR(
vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceQueueFamilyPerformanceQueryPassesKHR" ) );
//=== VK_KHR_get_surface_capabilities2 ===
vkGetPhysicalDeviceSurfaceCapabilities2KHR =
PFN_vkGetPhysicalDeviceSurfaceCapabilities2KHR( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceSurfaceCapabilities2KHR" ) );
vkGetPhysicalDeviceSurfaceFormats2KHR =
PFN_vkGetPhysicalDeviceSurfaceFormats2KHR( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceSurfaceFormats2KHR" ) );
//=== VK_KHR_get_display_properties2 ===
vkGetPhysicalDeviceDisplayProperties2KHR =
PFN_vkGetPhysicalDeviceDisplayProperties2KHR( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceDisplayProperties2KHR" ) );
vkGetPhysicalDeviceDisplayPlaneProperties2KHR =
PFN_vkGetPhysicalDeviceDisplayPlaneProperties2KHR( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceDisplayPlaneProperties2KHR" ) );
vkGetDisplayModeProperties2KHR = PFN_vkGetDisplayModeProperties2KHR( vkGetInstanceProcAddr( instance, "vkGetDisplayModeProperties2KHR" ) );
vkGetDisplayPlaneCapabilities2KHR = PFN_vkGetDisplayPlaneCapabilities2KHR( vkGetInstanceProcAddr( instance, "vkGetDisplayPlaneCapabilities2KHR" ) );
2021-07-21 16:15:31 +00:00
2023-03-01 09:17:02 +00:00
# if defined( VK_USE_PLATFORM_IOS_MVK )
//=== VK_MVK_ios_surface ===
vkCreateIOSSurfaceMVK = PFN_vkCreateIOSSurfaceMVK( vkGetInstanceProcAddr( instance, "vkCreateIOSSurfaceMVK" ) );
2023-03-01 09:17:02 +00:00
# endif /*VK_USE_PLATFORM_IOS_MVK*/
2021-07-21 16:15:31 +00:00
# if defined( VK_USE_PLATFORM_MACOS_MVK )
//=== VK_MVK_macos_surface ===
vkCreateMacOSSurfaceMVK = PFN_vkCreateMacOSSurfaceMVK( vkGetInstanceProcAddr( instance, "vkCreateMacOSSurfaceMVK" ) );
2021-07-21 16:15:31 +00:00
# endif /*VK_USE_PLATFORM_MACOS_MVK*/
//=== VK_EXT_debug_utils ===
vkCreateDebugUtilsMessengerEXT = PFN_vkCreateDebugUtilsMessengerEXT( vkGetInstanceProcAddr( instance, "vkCreateDebugUtilsMessengerEXT" ) );
vkDestroyDebugUtilsMessengerEXT = PFN_vkDestroyDebugUtilsMessengerEXT( vkGetInstanceProcAddr( instance, "vkDestroyDebugUtilsMessengerEXT" ) );
vkSubmitDebugUtilsMessageEXT = PFN_vkSubmitDebugUtilsMessageEXT( vkGetInstanceProcAddr( instance, "vkSubmitDebugUtilsMessageEXT" ) );
2021-07-21 16:15:31 +00:00
//=== VK_EXT_sample_locations ===
vkGetPhysicalDeviceMultisamplePropertiesEXT =
PFN_vkGetPhysicalDeviceMultisamplePropertiesEXT( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceMultisamplePropertiesEXT" ) );
2023-03-01 09:17:02 +00:00
//=== VK_EXT_calibrated_timestamps ===
vkGetPhysicalDeviceCalibrateableTimeDomainsEXT =
PFN_vkGetPhysicalDeviceCalibrateableTimeDomainsEXT( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceCalibrateableTimeDomainsEXT" ) );
if ( !vkGetPhysicalDeviceCalibrateableTimeDomainsKHR )
vkGetPhysicalDeviceCalibrateableTimeDomainsKHR = vkGetPhysicalDeviceCalibrateableTimeDomainsEXT;
2023-03-01 09:17:02 +00:00
# if defined( VK_USE_PLATFORM_FUCHSIA )
//=== VK_FUCHSIA_imagepipe_surface ===
vkCreateImagePipeSurfaceFUCHSIA = PFN_vkCreateImagePipeSurfaceFUCHSIA( vkGetInstanceProcAddr( instance, "vkCreateImagePipeSurfaceFUCHSIA" ) );
2023-03-01 09:17:02 +00:00
# endif /*VK_USE_PLATFORM_FUCHSIA*/
# if defined( VK_USE_PLATFORM_METAL_EXT )
//=== VK_EXT_metal_surface ===
vkCreateMetalSurfaceEXT = PFN_vkCreateMetalSurfaceEXT( vkGetInstanceProcAddr( instance, "vkCreateMetalSurfaceEXT" ) );
2023-03-01 09:17:02 +00:00
# endif /*VK_USE_PLATFORM_METAL_EXT*/
//=== VK_KHR_fragment_shading_rate ===
vkGetPhysicalDeviceFragmentShadingRatesKHR =
PFN_vkGetPhysicalDeviceFragmentShadingRatesKHR( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceFragmentShadingRatesKHR" ) );
2023-03-01 09:17:02 +00:00
//=== VK_EXT_tooling_info ===
vkGetPhysicalDeviceToolPropertiesEXT =
PFN_vkGetPhysicalDeviceToolPropertiesEXT( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceToolPropertiesEXT" ) );
if ( !vkGetPhysicalDeviceToolProperties )
vkGetPhysicalDeviceToolProperties = vkGetPhysicalDeviceToolPropertiesEXT;
2021-07-21 16:15:31 +00:00
//=== VK_NV_cooperative_matrix ===
vkGetPhysicalDeviceCooperativeMatrixPropertiesNV =
PFN_vkGetPhysicalDeviceCooperativeMatrixPropertiesNV( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceCooperativeMatrixPropertiesNV" ) );
2021-07-21 16:15:31 +00:00
//=== VK_NV_coverage_reduction_mode ===
vkGetPhysicalDeviceSupportedFramebufferMixedSamplesCombinationsNV = PFN_vkGetPhysicalDeviceSupportedFramebufferMixedSamplesCombinationsNV(
vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceSupportedFramebufferMixedSamplesCombinationsNV" ) );
2021-07-21 16:15:31 +00:00
2023-03-01 09:17:02 +00:00
# if defined( VK_USE_PLATFORM_WIN32_KHR )
//=== VK_EXT_full_screen_exclusive ===
vkGetPhysicalDeviceSurfacePresentModes2EXT =
PFN_vkGetPhysicalDeviceSurfacePresentModes2EXT( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceSurfacePresentModes2EXT" ) );
2023-03-01 09:17:02 +00:00
# endif /*VK_USE_PLATFORM_WIN32_KHR*/
2021-07-21 16:15:31 +00:00
//=== VK_EXT_headless_surface ===
vkCreateHeadlessSurfaceEXT = PFN_vkCreateHeadlessSurfaceEXT( vkGetInstanceProcAddr( instance, "vkCreateHeadlessSurfaceEXT" ) );
2023-03-01 09:17:02 +00:00
//=== VK_EXT_acquire_drm_display ===
vkAcquireDrmDisplayEXT = PFN_vkAcquireDrmDisplayEXT( vkGetInstanceProcAddr( instance, "vkAcquireDrmDisplayEXT" ) );
vkGetDrmDisplayEXT = PFN_vkGetDrmDisplayEXT( vkGetInstanceProcAddr( instance, "vkGetDrmDisplayEXT" ) );
2023-03-01 09:17:02 +00:00
//=== VK_KHR_video_encode_queue ===
vkGetPhysicalDeviceVideoEncodeQualityLevelPropertiesKHR = PFN_vkGetPhysicalDeviceVideoEncodeQualityLevelPropertiesKHR(
vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceVideoEncodeQualityLevelPropertiesKHR" ) );
2023-06-11 00:11:41 +00:00
2023-03-01 09:17:02 +00:00
# if defined( VK_USE_PLATFORM_WIN32_KHR )
//=== VK_NV_acquire_winrt_display ===
vkAcquireWinrtDisplayNV = PFN_vkAcquireWinrtDisplayNV( vkGetInstanceProcAddr( instance, "vkAcquireWinrtDisplayNV" ) );
vkGetWinrtDisplayNV = PFN_vkGetWinrtDisplayNV( vkGetInstanceProcAddr( instance, "vkGetWinrtDisplayNV" ) );
2023-03-01 09:17:02 +00:00
# endif /*VK_USE_PLATFORM_WIN32_KHR*/
# if defined( VK_USE_PLATFORM_DIRECTFB_EXT )
//=== VK_EXT_directfb_surface ===
vkCreateDirectFBSurfaceEXT = PFN_vkCreateDirectFBSurfaceEXT( vkGetInstanceProcAddr( instance, "vkCreateDirectFBSurfaceEXT" ) );
vkGetPhysicalDeviceDirectFBPresentationSupportEXT =
PFN_vkGetPhysicalDeviceDirectFBPresentationSupportEXT( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceDirectFBPresentationSupportEXT" ) );
2023-03-01 09:17:02 +00:00
# endif /*VK_USE_PLATFORM_DIRECTFB_EXT*/
2022-09-30 00:20:29 +00:00
2021-07-21 16:15:31 +00:00
# if defined( VK_USE_PLATFORM_SCREEN_QNX )
//=== VK_QNX_screen_surface ===
vkCreateScreenSurfaceQNX = PFN_vkCreateScreenSurfaceQNX( vkGetInstanceProcAddr( instance, "vkCreateScreenSurfaceQNX" ) );
vkGetPhysicalDeviceScreenPresentationSupportQNX =
PFN_vkGetPhysicalDeviceScreenPresentationSupportQNX( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceScreenPresentationSupportQNX" ) );
2021-07-21 16:15:31 +00:00
# endif /*VK_USE_PLATFORM_SCREEN_QNX*/
//=== VK_NV_optical_flow ===
vkGetPhysicalDeviceOpticalFlowImageFormatsNV =
PFN_vkGetPhysicalDeviceOpticalFlowImageFormatsNV( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceOpticalFlowImageFormatsNV" ) );
2023-03-01 09:17:02 +00:00
//=== VK_KHR_cooperative_matrix ===
vkGetPhysicalDeviceCooperativeMatrixPropertiesKHR =
PFN_vkGetPhysicalDeviceCooperativeMatrixPropertiesKHR( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceCooperativeMatrixPropertiesKHR" ) );
//=== VK_KHR_calibrated_timestamps ===
vkGetPhysicalDeviceCalibrateableTimeDomainsKHR =
PFN_vkGetPhysicalDeviceCalibrateableTimeDomainsKHR( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceCalibrateableTimeDomainsKHR" ) );
//=== VK_NV_cooperative_matrix2 ===
vkGetPhysicalDeviceCooperativeMatrixFlexibleDimensionsPropertiesNV = PFN_vkGetPhysicalDeviceCooperativeMatrixFlexibleDimensionsPropertiesNV(
vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceCooperativeMatrixFlexibleDimensionsPropertiesNV" ) );
vkGetDeviceProcAddr = PFN_vkGetDeviceProcAddr( vkGetInstanceProcAddr( instance, "vkGetDeviceProcAddr" ) );
}
public:
//=== VK_VERSION_1_0 ===
PFN_vkDestroyInstance vkDestroyInstance = 0;
PFN_vkEnumeratePhysicalDevices vkEnumeratePhysicalDevices = 0;
PFN_vkGetPhysicalDeviceFeatures vkGetPhysicalDeviceFeatures = 0;
PFN_vkGetPhysicalDeviceFormatProperties vkGetPhysicalDeviceFormatProperties = 0;
PFN_vkGetPhysicalDeviceImageFormatProperties vkGetPhysicalDeviceImageFormatProperties = 0;
PFN_vkGetPhysicalDeviceProperties vkGetPhysicalDeviceProperties = 0;
PFN_vkGetPhysicalDeviceQueueFamilyProperties vkGetPhysicalDeviceQueueFamilyProperties = 0;
PFN_vkGetPhysicalDeviceMemoryProperties vkGetPhysicalDeviceMemoryProperties = 0;
PFN_vkGetInstanceProcAddr vkGetInstanceProcAddr = 0;
PFN_vkCreateDevice vkCreateDevice = 0;
PFN_vkEnumerateDeviceExtensionProperties vkEnumerateDeviceExtensionProperties = 0;
PFN_vkEnumerateDeviceLayerProperties vkEnumerateDeviceLayerProperties = 0;
PFN_vkGetPhysicalDeviceSparseImageFormatProperties vkGetPhysicalDeviceSparseImageFormatProperties = 0;
2021-07-21 16:15:31 +00:00
//=== VK_VERSION_1_1 ===
PFN_vkEnumeratePhysicalDeviceGroups vkEnumeratePhysicalDeviceGroups = 0;
PFN_vkGetPhysicalDeviceFeatures2 vkGetPhysicalDeviceFeatures2 = 0;
PFN_vkGetPhysicalDeviceProperties2 vkGetPhysicalDeviceProperties2 = 0;
PFN_vkGetPhysicalDeviceFormatProperties2 vkGetPhysicalDeviceFormatProperties2 = 0;
PFN_vkGetPhysicalDeviceImageFormatProperties2 vkGetPhysicalDeviceImageFormatProperties2 = 0;
PFN_vkGetPhysicalDeviceQueueFamilyProperties2 vkGetPhysicalDeviceQueueFamilyProperties2 = 0;
PFN_vkGetPhysicalDeviceMemoryProperties2 vkGetPhysicalDeviceMemoryProperties2 = 0;
PFN_vkGetPhysicalDeviceSparseImageFormatProperties2 vkGetPhysicalDeviceSparseImageFormatProperties2 = 0;
PFN_vkGetPhysicalDeviceExternalBufferProperties vkGetPhysicalDeviceExternalBufferProperties = 0;
PFN_vkGetPhysicalDeviceExternalFenceProperties vkGetPhysicalDeviceExternalFenceProperties = 0;
PFN_vkGetPhysicalDeviceExternalSemaphoreProperties vkGetPhysicalDeviceExternalSemaphoreProperties = 0;
2021-07-21 16:15:31 +00:00
//=== VK_VERSION_1_3 ===
PFN_vkGetPhysicalDeviceToolProperties vkGetPhysicalDeviceToolProperties = 0;
2022-01-26 00:42:08 +00:00
//=== VK_KHR_surface ===
PFN_vkDestroySurfaceKHR vkDestroySurfaceKHR = 0;
PFN_vkGetPhysicalDeviceSurfaceSupportKHR vkGetPhysicalDeviceSurfaceSupportKHR = 0;
PFN_vkGetPhysicalDeviceSurfaceCapabilitiesKHR vkGetPhysicalDeviceSurfaceCapabilitiesKHR = 0;
PFN_vkGetPhysicalDeviceSurfaceFormatsKHR vkGetPhysicalDeviceSurfaceFormatsKHR = 0;
PFN_vkGetPhysicalDeviceSurfacePresentModesKHR vkGetPhysicalDeviceSurfacePresentModesKHR = 0;
2021-07-21 16:15:31 +00:00
//=== VK_KHR_swapchain ===
PFN_vkGetPhysicalDevicePresentRectanglesKHR vkGetPhysicalDevicePresentRectanglesKHR = 0;
2021-07-21 16:15:31 +00:00
//=== VK_KHR_display ===
PFN_vkGetPhysicalDeviceDisplayPropertiesKHR vkGetPhysicalDeviceDisplayPropertiesKHR = 0;
PFN_vkGetPhysicalDeviceDisplayPlanePropertiesKHR vkGetPhysicalDeviceDisplayPlanePropertiesKHR = 0;
PFN_vkGetDisplayPlaneSupportedDisplaysKHR vkGetDisplayPlaneSupportedDisplaysKHR = 0;
PFN_vkGetDisplayModePropertiesKHR vkGetDisplayModePropertiesKHR = 0;
PFN_vkCreateDisplayModeKHR vkCreateDisplayModeKHR = 0;
PFN_vkGetDisplayPlaneCapabilitiesKHR vkGetDisplayPlaneCapabilitiesKHR = 0;
PFN_vkCreateDisplayPlaneSurfaceKHR vkCreateDisplayPlaneSurfaceKHR = 0;
2021-07-21 16:15:31 +00:00
2023-03-01 09:17:02 +00:00
# if defined( VK_USE_PLATFORM_XLIB_KHR )
//=== VK_KHR_xlib_surface ===
PFN_vkCreateXlibSurfaceKHR vkCreateXlibSurfaceKHR = 0;
PFN_vkGetPhysicalDeviceXlibPresentationSupportKHR vkGetPhysicalDeviceXlibPresentationSupportKHR = 0;
2023-03-01 09:17:02 +00:00
# else
PFN_dummy vkCreateXlibSurfaceKHR_placeholder = 0;
PFN_dummy vkGetPhysicalDeviceXlibPresentationSupportKHR_placeholder = 0;
2023-03-01 09:17:02 +00:00
# endif /*VK_USE_PLATFORM_XLIB_KHR*/
2021-07-21 16:15:31 +00:00
2023-03-01 09:17:02 +00:00
# if defined( VK_USE_PLATFORM_XCB_KHR )
//=== VK_KHR_xcb_surface ===
PFN_vkCreateXcbSurfaceKHR vkCreateXcbSurfaceKHR = 0;
PFN_vkGetPhysicalDeviceXcbPresentationSupportKHR vkGetPhysicalDeviceXcbPresentationSupportKHR = 0;
2023-03-01 09:17:02 +00:00
# else
PFN_dummy vkCreateXcbSurfaceKHR_placeholder = 0;
PFN_dummy vkGetPhysicalDeviceXcbPresentationSupportKHR_placeholder = 0;
2023-03-01 09:17:02 +00:00
# endif /*VK_USE_PLATFORM_XCB_KHR*/
2021-07-21 16:15:31 +00:00
2023-03-01 09:17:02 +00:00
# if defined( VK_USE_PLATFORM_WAYLAND_KHR )
//=== VK_KHR_wayland_surface ===
PFN_vkCreateWaylandSurfaceKHR vkCreateWaylandSurfaceKHR = 0;
PFN_vkGetPhysicalDeviceWaylandPresentationSupportKHR vkGetPhysicalDeviceWaylandPresentationSupportKHR = 0;
# else
PFN_dummy vkCreateWaylandSurfaceKHR_placeholder = 0;
PFN_dummy vkGetPhysicalDeviceWaylandPresentationSupportKHR_placeholder = 0;
2023-03-01 09:17:02 +00:00
# endif /*VK_USE_PLATFORM_WAYLAND_KHR*/
2021-07-21 16:15:31 +00:00
2023-03-01 09:17:02 +00:00
# if defined( VK_USE_PLATFORM_ANDROID_KHR )
//=== VK_KHR_android_surface ===
PFN_vkCreateAndroidSurfaceKHR vkCreateAndroidSurfaceKHR = 0;
2023-03-01 09:17:02 +00:00
# else
PFN_dummy vkCreateAndroidSurfaceKHR_placeholder = 0;
2023-03-01 09:17:02 +00:00
# endif /*VK_USE_PLATFORM_ANDROID_KHR*/
2021-07-21 16:15:31 +00:00
# if defined( VK_USE_PLATFORM_WIN32_KHR )
//=== VK_KHR_win32_surface ===
PFN_vkCreateWin32SurfaceKHR vkCreateWin32SurfaceKHR = 0;
PFN_vkGetPhysicalDeviceWin32PresentationSupportKHR vkGetPhysicalDeviceWin32PresentationSupportKHR = 0;
# else
PFN_dummy vkCreateWin32SurfaceKHR_placeholder = 0;
PFN_dummy vkGetPhysicalDeviceWin32PresentationSupportKHR_placeholder = 0;
2021-07-21 16:15:31 +00:00
# endif /*VK_USE_PLATFORM_WIN32_KHR*/
//=== VK_EXT_debug_report ===
PFN_vkCreateDebugReportCallbackEXT vkCreateDebugReportCallbackEXT = 0;
PFN_vkDestroyDebugReportCallbackEXT vkDestroyDebugReportCallbackEXT = 0;
PFN_vkDebugReportMessageEXT vkDebugReportMessageEXT = 0;
2021-07-21 16:15:31 +00:00
//=== VK_KHR_video_queue ===
PFN_vkGetPhysicalDeviceVideoCapabilitiesKHR vkGetPhysicalDeviceVideoCapabilitiesKHR = 0;
PFN_vkGetPhysicalDeviceVideoFormatPropertiesKHR vkGetPhysicalDeviceVideoFormatPropertiesKHR = 0;
2021-07-21 16:15:31 +00:00
# if defined( VK_USE_PLATFORM_GGP )
//=== VK_GGP_stream_descriptor_surface ===
PFN_vkCreateStreamDescriptorSurfaceGGP vkCreateStreamDescriptorSurfaceGGP = 0;
# else
PFN_dummy vkCreateStreamDescriptorSurfaceGGP_placeholder = 0;
# endif /*VK_USE_PLATFORM_GGP*/
2021-07-21 16:15:31 +00:00
//=== VK_NV_external_memory_capabilities ===
PFN_vkGetPhysicalDeviceExternalImageFormatPropertiesNV vkGetPhysicalDeviceExternalImageFormatPropertiesNV = 0;
2021-07-21 16:15:31 +00:00
//=== VK_KHR_get_physical_device_properties2 ===
PFN_vkGetPhysicalDeviceFeatures2KHR vkGetPhysicalDeviceFeatures2KHR = 0;
PFN_vkGetPhysicalDeviceProperties2KHR vkGetPhysicalDeviceProperties2KHR = 0;
PFN_vkGetPhysicalDeviceFormatProperties2KHR vkGetPhysicalDeviceFormatProperties2KHR = 0;
PFN_vkGetPhysicalDeviceImageFormatProperties2KHR vkGetPhysicalDeviceImageFormatProperties2KHR = 0;
PFN_vkGetPhysicalDeviceQueueFamilyProperties2KHR vkGetPhysicalDeviceQueueFamilyProperties2KHR = 0;
PFN_vkGetPhysicalDeviceMemoryProperties2KHR vkGetPhysicalDeviceMemoryProperties2KHR = 0;
PFN_vkGetPhysicalDeviceSparseImageFormatProperties2KHR vkGetPhysicalDeviceSparseImageFormatProperties2KHR = 0;
2023-03-01 09:17:02 +00:00
# if defined( VK_USE_PLATFORM_VI_NN )
//=== VK_NN_vi_surface ===
PFN_vkCreateViSurfaceNN vkCreateViSurfaceNN = 0;
2023-03-01 09:17:02 +00:00
# else
PFN_dummy vkCreateViSurfaceNN_placeholder = 0;
2023-03-01 09:17:02 +00:00
# endif /*VK_USE_PLATFORM_VI_NN*/
2021-07-21 16:15:31 +00:00
//=== VK_KHR_device_group_creation ===
PFN_vkEnumeratePhysicalDeviceGroupsKHR vkEnumeratePhysicalDeviceGroupsKHR = 0;
2021-07-21 16:15:31 +00:00
//=== VK_KHR_external_memory_capabilities ===
PFN_vkGetPhysicalDeviceExternalBufferPropertiesKHR vkGetPhysicalDeviceExternalBufferPropertiesKHR = 0;
2021-07-21 16:15:31 +00:00
//=== VK_KHR_external_semaphore_capabilities ===
PFN_vkGetPhysicalDeviceExternalSemaphorePropertiesKHR vkGetPhysicalDeviceExternalSemaphorePropertiesKHR = 0;
2021-07-21 16:15:31 +00:00
//=== VK_EXT_direct_mode_display ===
PFN_vkReleaseDisplayEXT vkReleaseDisplayEXT = 0;
2021-07-21 16:15:31 +00:00
2023-03-01 09:17:02 +00:00
# if defined( VK_USE_PLATFORM_XLIB_XRANDR_EXT )
//=== VK_EXT_acquire_xlib_display ===
PFN_vkAcquireXlibDisplayEXT vkAcquireXlibDisplayEXT = 0;
PFN_vkGetRandROutputDisplayEXT vkGetRandROutputDisplayEXT = 0;
2023-03-01 09:17:02 +00:00
# else
PFN_dummy vkAcquireXlibDisplayEXT_placeholder = 0;
PFN_dummy vkGetRandROutputDisplayEXT_placeholder = 0;
2023-03-01 09:17:02 +00:00
# endif /*VK_USE_PLATFORM_XLIB_XRANDR_EXT*/
2021-07-21 16:15:31 +00:00
//=== VK_EXT_display_surface_counter ===
PFN_vkGetPhysicalDeviceSurfaceCapabilities2EXT vkGetPhysicalDeviceSurfaceCapabilities2EXT = 0;
2023-03-01 09:17:02 +00:00
//=== VK_KHR_external_fence_capabilities ===
PFN_vkGetPhysicalDeviceExternalFencePropertiesKHR vkGetPhysicalDeviceExternalFencePropertiesKHR = 0;
2021-07-21 16:15:31 +00:00
//=== VK_KHR_performance_query ===
PFN_vkEnumeratePhysicalDeviceQueueFamilyPerformanceQueryCountersKHR vkEnumeratePhysicalDeviceQueueFamilyPerformanceQueryCountersKHR = 0;
PFN_vkGetPhysicalDeviceQueueFamilyPerformanceQueryPassesKHR vkGetPhysicalDeviceQueueFamilyPerformanceQueryPassesKHR = 0;
2021-07-21 16:15:31 +00:00
//=== VK_KHR_get_surface_capabilities2 ===
PFN_vkGetPhysicalDeviceSurfaceCapabilities2KHR vkGetPhysicalDeviceSurfaceCapabilities2KHR = 0;
PFN_vkGetPhysicalDeviceSurfaceFormats2KHR vkGetPhysicalDeviceSurfaceFormats2KHR = 0;
2021-07-21 16:15:31 +00:00
//=== VK_KHR_get_display_properties2 ===
PFN_vkGetPhysicalDeviceDisplayProperties2KHR vkGetPhysicalDeviceDisplayProperties2KHR = 0;
PFN_vkGetPhysicalDeviceDisplayPlaneProperties2KHR vkGetPhysicalDeviceDisplayPlaneProperties2KHR = 0;
PFN_vkGetDisplayModeProperties2KHR vkGetDisplayModeProperties2KHR = 0;
PFN_vkGetDisplayPlaneCapabilities2KHR vkGetDisplayPlaneCapabilities2KHR = 0;
2021-07-21 16:15:31 +00:00
# if defined( VK_USE_PLATFORM_IOS_MVK )
//=== VK_MVK_ios_surface ===
PFN_vkCreateIOSSurfaceMVK vkCreateIOSSurfaceMVK = 0;
2021-07-21 16:15:31 +00:00
# else
PFN_dummy vkCreateIOSSurfaceMVK_placeholder = 0;
2021-07-21 16:15:31 +00:00
# endif /*VK_USE_PLATFORM_IOS_MVK*/
# if defined( VK_USE_PLATFORM_MACOS_MVK )
//=== VK_MVK_macos_surface ===
PFN_vkCreateMacOSSurfaceMVK vkCreateMacOSSurfaceMVK = 0;
2021-07-21 16:15:31 +00:00
# else
PFN_dummy vkCreateMacOSSurfaceMVK_placeholder = 0;
2021-07-21 16:15:31 +00:00
# endif /*VK_USE_PLATFORM_MACOS_MVK*/
//=== VK_EXT_debug_utils ===
PFN_vkCreateDebugUtilsMessengerEXT vkCreateDebugUtilsMessengerEXT = 0;
PFN_vkDestroyDebugUtilsMessengerEXT vkDestroyDebugUtilsMessengerEXT = 0;
PFN_vkSubmitDebugUtilsMessageEXT vkSubmitDebugUtilsMessageEXT = 0;
2023-03-01 09:17:02 +00:00
//=== VK_EXT_sample_locations ===
PFN_vkGetPhysicalDeviceMultisamplePropertiesEXT vkGetPhysicalDeviceMultisamplePropertiesEXT = 0;
2023-03-01 09:17:02 +00:00
//=== VK_EXT_calibrated_timestamps ===
PFN_vkGetPhysicalDeviceCalibrateableTimeDomainsEXT vkGetPhysicalDeviceCalibrateableTimeDomainsEXT = 0;
2023-03-01 09:17:02 +00:00
# if defined( VK_USE_PLATFORM_FUCHSIA )
//=== VK_FUCHSIA_imagepipe_surface ===
PFN_vkCreateImagePipeSurfaceFUCHSIA vkCreateImagePipeSurfaceFUCHSIA = 0;
2021-07-21 16:15:31 +00:00
# else
PFN_dummy vkCreateImagePipeSurfaceFUCHSIA_placeholder = 0;
2023-03-01 09:17:02 +00:00
# endif /*VK_USE_PLATFORM_FUCHSIA*/
2021-07-21 16:15:31 +00:00
2023-03-01 09:17:02 +00:00
# if defined( VK_USE_PLATFORM_METAL_EXT )
//=== VK_EXT_metal_surface ===
PFN_vkCreateMetalSurfaceEXT vkCreateMetalSurfaceEXT = 0;
2021-07-21 16:15:31 +00:00
# else
PFN_dummy vkCreateMetalSurfaceEXT_placeholder = 0;
2023-03-01 09:17:02 +00:00
# endif /*VK_USE_PLATFORM_METAL_EXT*/
//=== VK_KHR_fragment_shading_rate ===
PFN_vkGetPhysicalDeviceFragmentShadingRatesKHR vkGetPhysicalDeviceFragmentShadingRatesKHR = 0;
2023-03-01 09:17:02 +00:00
//=== VK_EXT_tooling_info ===
PFN_vkGetPhysicalDeviceToolPropertiesEXT vkGetPhysicalDeviceToolPropertiesEXT = 0;
2021-07-21 16:15:31 +00:00
//=== VK_NV_cooperative_matrix ===
PFN_vkGetPhysicalDeviceCooperativeMatrixPropertiesNV vkGetPhysicalDeviceCooperativeMatrixPropertiesNV = 0;
2021-07-21 16:15:31 +00:00
//=== VK_NV_coverage_reduction_mode ===
PFN_vkGetPhysicalDeviceSupportedFramebufferMixedSamplesCombinationsNV vkGetPhysicalDeviceSupportedFramebufferMixedSamplesCombinationsNV = 0;
2021-07-21 16:15:31 +00:00
2023-03-01 09:17:02 +00:00
# if defined( VK_USE_PLATFORM_WIN32_KHR )
//=== VK_EXT_full_screen_exclusive ===
PFN_vkGetPhysicalDeviceSurfacePresentModes2EXT vkGetPhysicalDeviceSurfacePresentModes2EXT = 0;
2023-03-01 09:17:02 +00:00
# else
PFN_dummy vkGetPhysicalDeviceSurfacePresentModes2EXT_placeholder = 0;
2023-03-01 09:17:02 +00:00
# endif /*VK_USE_PLATFORM_WIN32_KHR*/
2021-07-21 16:15:31 +00:00
//=== VK_EXT_headless_surface ===
PFN_vkCreateHeadlessSurfaceEXT vkCreateHeadlessSurfaceEXT = 0;
2023-03-01 09:17:02 +00:00
//=== VK_EXT_acquire_drm_display ===
PFN_vkAcquireDrmDisplayEXT vkAcquireDrmDisplayEXT = 0;
PFN_vkGetDrmDisplayEXT vkGetDrmDisplayEXT = 0;
2023-03-01 09:17:02 +00:00
//=== VK_KHR_video_encode_queue ===
PFN_vkGetPhysicalDeviceVideoEncodeQualityLevelPropertiesKHR vkGetPhysicalDeviceVideoEncodeQualityLevelPropertiesKHR = 0;
2023-06-11 00:11:41 +00:00
2023-03-01 09:17:02 +00:00
# if defined( VK_USE_PLATFORM_WIN32_KHR )
//=== VK_NV_acquire_winrt_display ===
PFN_vkAcquireWinrtDisplayNV vkAcquireWinrtDisplayNV = 0;
PFN_vkGetWinrtDisplayNV vkGetWinrtDisplayNV = 0;
2023-03-01 09:17:02 +00:00
# else
PFN_dummy vkAcquireWinrtDisplayNV_placeholder = 0;
PFN_dummy vkGetWinrtDisplayNV_placeholder = 0;
2023-03-01 09:17:02 +00:00
# endif /*VK_USE_PLATFORM_WIN32_KHR*/
# if defined( VK_USE_PLATFORM_DIRECTFB_EXT )
//=== VK_EXT_directfb_surface ===
PFN_vkCreateDirectFBSurfaceEXT vkCreateDirectFBSurfaceEXT = 0;
PFN_vkGetPhysicalDeviceDirectFBPresentationSupportEXT vkGetPhysicalDeviceDirectFBPresentationSupportEXT = 0;
2023-03-01 09:17:02 +00:00
# else
PFN_dummy vkCreateDirectFBSurfaceEXT_placeholder = 0;
PFN_dummy vkGetPhysicalDeviceDirectFBPresentationSupportEXT_placeholder = 0;
2023-03-01 09:17:02 +00:00
# endif /*VK_USE_PLATFORM_DIRECTFB_EXT*/
2022-09-30 00:20:29 +00:00
2021-07-21 16:15:31 +00:00
# if defined( VK_USE_PLATFORM_SCREEN_QNX )
//=== VK_QNX_screen_surface ===
PFN_vkCreateScreenSurfaceQNX vkCreateScreenSurfaceQNX = 0;
PFN_vkGetPhysicalDeviceScreenPresentationSupportQNX vkGetPhysicalDeviceScreenPresentationSupportQNX = 0;
2021-07-21 16:15:31 +00:00
# else
PFN_dummy vkCreateScreenSurfaceQNX_placeholder = 0;
PFN_dummy vkGetPhysicalDeviceScreenPresentationSupportQNX_placeholder = 0;
2021-07-21 16:15:31 +00:00
# endif /*VK_USE_PLATFORM_SCREEN_QNX*/
//=== VK_NV_optical_flow ===
PFN_vkGetPhysicalDeviceOpticalFlowImageFormatsNV vkGetPhysicalDeviceOpticalFlowImageFormatsNV = 0;
2023-03-01 09:17:02 +00:00
//=== VK_KHR_cooperative_matrix ===
PFN_vkGetPhysicalDeviceCooperativeMatrixPropertiesKHR vkGetPhysicalDeviceCooperativeMatrixPropertiesKHR = 0;
//=== VK_KHR_calibrated_timestamps ===
PFN_vkGetPhysicalDeviceCalibrateableTimeDomainsKHR vkGetPhysicalDeviceCalibrateableTimeDomainsKHR = 0;
//=== VK_NV_cooperative_matrix2 ===
PFN_vkGetPhysicalDeviceCooperativeMatrixFlexibleDimensionsPropertiesNV vkGetPhysicalDeviceCooperativeMatrixFlexibleDimensionsPropertiesNV = 0;
PFN_vkGetDeviceProcAddr vkGetDeviceProcAddr = 0;
};
class DeviceDispatcher : public ::VULKAN_HPP_NAMESPACE::detail::DispatchLoaderBase
{
public:
DeviceDispatcher( PFN_vkGetDeviceProcAddr getProcAddr, VkDevice device ) : vkGetDeviceProcAddr( getProcAddr )
{
//=== VK_VERSION_1_0 ===
vkGetDeviceProcAddr = PFN_vkGetDeviceProcAddr( vkGetDeviceProcAddr( device, "vkGetDeviceProcAddr" ) );
vkDestroyDevice = PFN_vkDestroyDevice( vkGetDeviceProcAddr( device, "vkDestroyDevice" ) );
vkGetDeviceQueue = PFN_vkGetDeviceQueue( vkGetDeviceProcAddr( device, "vkGetDeviceQueue" ) );
vkQueueSubmit = PFN_vkQueueSubmit( vkGetDeviceProcAddr( device, "vkQueueSubmit" ) );
vkQueueWaitIdle = PFN_vkQueueWaitIdle( vkGetDeviceProcAddr( device, "vkQueueWaitIdle" ) );
vkDeviceWaitIdle = PFN_vkDeviceWaitIdle( vkGetDeviceProcAddr( device, "vkDeviceWaitIdle" ) );
vkAllocateMemory = PFN_vkAllocateMemory( vkGetDeviceProcAddr( device, "vkAllocateMemory" ) );
vkFreeMemory = PFN_vkFreeMemory( vkGetDeviceProcAddr( device, "vkFreeMemory" ) );
vkMapMemory = PFN_vkMapMemory( vkGetDeviceProcAddr( device, "vkMapMemory" ) );
vkUnmapMemory = PFN_vkUnmapMemory( vkGetDeviceProcAddr( device, "vkUnmapMemory" ) );
vkFlushMappedMemoryRanges = PFN_vkFlushMappedMemoryRanges( vkGetDeviceProcAddr( device, "vkFlushMappedMemoryRanges" ) );
vkInvalidateMappedMemoryRanges = PFN_vkInvalidateMappedMemoryRanges( vkGetDeviceProcAddr( device, "vkInvalidateMappedMemoryRanges" ) );
vkGetDeviceMemoryCommitment = PFN_vkGetDeviceMemoryCommitment( vkGetDeviceProcAddr( device, "vkGetDeviceMemoryCommitment" ) );
vkBindBufferMemory = PFN_vkBindBufferMemory( vkGetDeviceProcAddr( device, "vkBindBufferMemory" ) );
vkBindImageMemory = PFN_vkBindImageMemory( vkGetDeviceProcAddr( device, "vkBindImageMemory" ) );
vkGetBufferMemoryRequirements = PFN_vkGetBufferMemoryRequirements( vkGetDeviceProcAddr( device, "vkGetBufferMemoryRequirements" ) );
vkGetImageMemoryRequirements = PFN_vkGetImageMemoryRequirements( vkGetDeviceProcAddr( device, "vkGetImageMemoryRequirements" ) );
vkGetImageSparseMemoryRequirements = PFN_vkGetImageSparseMemoryRequirements( vkGetDeviceProcAddr( device, "vkGetImageSparseMemoryRequirements" ) );
vkQueueBindSparse = PFN_vkQueueBindSparse( vkGetDeviceProcAddr( device, "vkQueueBindSparse" ) );
vkCreateFence = PFN_vkCreateFence( vkGetDeviceProcAddr( device, "vkCreateFence" ) );
vkDestroyFence = PFN_vkDestroyFence( vkGetDeviceProcAddr( device, "vkDestroyFence" ) );
vkResetFences = PFN_vkResetFences( vkGetDeviceProcAddr( device, "vkResetFences" ) );
vkGetFenceStatus = PFN_vkGetFenceStatus( vkGetDeviceProcAddr( device, "vkGetFenceStatus" ) );
vkWaitForFences = PFN_vkWaitForFences( vkGetDeviceProcAddr( device, "vkWaitForFences" ) );
vkCreateSemaphore = PFN_vkCreateSemaphore( vkGetDeviceProcAddr( device, "vkCreateSemaphore" ) );
vkDestroySemaphore = PFN_vkDestroySemaphore( vkGetDeviceProcAddr( device, "vkDestroySemaphore" ) );
vkCreateEvent = PFN_vkCreateEvent( vkGetDeviceProcAddr( device, "vkCreateEvent" ) );
vkDestroyEvent = PFN_vkDestroyEvent( vkGetDeviceProcAddr( device, "vkDestroyEvent" ) );
vkGetEventStatus = PFN_vkGetEventStatus( vkGetDeviceProcAddr( device, "vkGetEventStatus" ) );
vkSetEvent = PFN_vkSetEvent( vkGetDeviceProcAddr( device, "vkSetEvent" ) );
vkResetEvent = PFN_vkResetEvent( vkGetDeviceProcAddr( device, "vkResetEvent" ) );
vkCreateQueryPool = PFN_vkCreateQueryPool( vkGetDeviceProcAddr( device, "vkCreateQueryPool" ) );
vkDestroyQueryPool = PFN_vkDestroyQueryPool( vkGetDeviceProcAddr( device, "vkDestroyQueryPool" ) );
vkGetQueryPoolResults = PFN_vkGetQueryPoolResults( vkGetDeviceProcAddr( device, "vkGetQueryPoolResults" ) );
vkCreateBuffer = PFN_vkCreateBuffer( vkGetDeviceProcAddr( device, "vkCreateBuffer" ) );
vkDestroyBuffer = PFN_vkDestroyBuffer( vkGetDeviceProcAddr( device, "vkDestroyBuffer" ) );
vkCreateBufferView = PFN_vkCreateBufferView( vkGetDeviceProcAddr( device, "vkCreateBufferView" ) );
vkDestroyBufferView = PFN_vkDestroyBufferView( vkGetDeviceProcAddr( device, "vkDestroyBufferView" ) );
vkCreateImage = PFN_vkCreateImage( vkGetDeviceProcAddr( device, "vkCreateImage" ) );
vkDestroyImage = PFN_vkDestroyImage( vkGetDeviceProcAddr( device, "vkDestroyImage" ) );
vkGetImageSubresourceLayout = PFN_vkGetImageSubresourceLayout( vkGetDeviceProcAddr( device, "vkGetImageSubresourceLayout" ) );
vkCreateImageView = PFN_vkCreateImageView( vkGetDeviceProcAddr( device, "vkCreateImageView" ) );
vkDestroyImageView = PFN_vkDestroyImageView( vkGetDeviceProcAddr( device, "vkDestroyImageView" ) );
vkCreateShaderModule = PFN_vkCreateShaderModule( vkGetDeviceProcAddr( device, "vkCreateShaderModule" ) );
vkDestroyShaderModule = PFN_vkDestroyShaderModule( vkGetDeviceProcAddr( device, "vkDestroyShaderModule" ) );
vkCreatePipelineCache = PFN_vkCreatePipelineCache( vkGetDeviceProcAddr( device, "vkCreatePipelineCache" ) );
vkDestroyPipelineCache = PFN_vkDestroyPipelineCache( vkGetDeviceProcAddr( device, "vkDestroyPipelineCache" ) );
vkGetPipelineCacheData = PFN_vkGetPipelineCacheData( vkGetDeviceProcAddr( device, "vkGetPipelineCacheData" ) );
vkMergePipelineCaches = PFN_vkMergePipelineCaches( vkGetDeviceProcAddr( device, "vkMergePipelineCaches" ) );
vkCreateGraphicsPipelines = PFN_vkCreateGraphicsPipelines( vkGetDeviceProcAddr( device, "vkCreateGraphicsPipelines" ) );
vkCreateComputePipelines = PFN_vkCreateComputePipelines( vkGetDeviceProcAddr( device, "vkCreateComputePipelines" ) );
vkDestroyPipeline = PFN_vkDestroyPipeline( vkGetDeviceProcAddr( device, "vkDestroyPipeline" ) );
vkCreatePipelineLayout = PFN_vkCreatePipelineLayout( vkGetDeviceProcAddr( device, "vkCreatePipelineLayout" ) );
vkDestroyPipelineLayout = PFN_vkDestroyPipelineLayout( vkGetDeviceProcAddr( device, "vkDestroyPipelineLayout" ) );
vkCreateSampler = PFN_vkCreateSampler( vkGetDeviceProcAddr( device, "vkCreateSampler" ) );
vkDestroySampler = PFN_vkDestroySampler( vkGetDeviceProcAddr( device, "vkDestroySampler" ) );
vkCreateDescriptorSetLayout = PFN_vkCreateDescriptorSetLayout( vkGetDeviceProcAddr( device, "vkCreateDescriptorSetLayout" ) );
vkDestroyDescriptorSetLayout = PFN_vkDestroyDescriptorSetLayout( vkGetDeviceProcAddr( device, "vkDestroyDescriptorSetLayout" ) );
vkCreateDescriptorPool = PFN_vkCreateDescriptorPool( vkGetDeviceProcAddr( device, "vkCreateDescriptorPool" ) );
vkDestroyDescriptorPool = PFN_vkDestroyDescriptorPool( vkGetDeviceProcAddr( device, "vkDestroyDescriptorPool" ) );
vkResetDescriptorPool = PFN_vkResetDescriptorPool( vkGetDeviceProcAddr( device, "vkResetDescriptorPool" ) );
vkAllocateDescriptorSets = PFN_vkAllocateDescriptorSets( vkGetDeviceProcAddr( device, "vkAllocateDescriptorSets" ) );
vkFreeDescriptorSets = PFN_vkFreeDescriptorSets( vkGetDeviceProcAddr( device, "vkFreeDescriptorSets" ) );
vkUpdateDescriptorSets = PFN_vkUpdateDescriptorSets( vkGetDeviceProcAddr( device, "vkUpdateDescriptorSets" ) );
vkCreateFramebuffer = PFN_vkCreateFramebuffer( vkGetDeviceProcAddr( device, "vkCreateFramebuffer" ) );
vkDestroyFramebuffer = PFN_vkDestroyFramebuffer( vkGetDeviceProcAddr( device, "vkDestroyFramebuffer" ) );
vkCreateRenderPass = PFN_vkCreateRenderPass( vkGetDeviceProcAddr( device, "vkCreateRenderPass" ) );
vkDestroyRenderPass = PFN_vkDestroyRenderPass( vkGetDeviceProcAddr( device, "vkDestroyRenderPass" ) );
vkGetRenderAreaGranularity = PFN_vkGetRenderAreaGranularity( vkGetDeviceProcAddr( device, "vkGetRenderAreaGranularity" ) );
vkCreateCommandPool = PFN_vkCreateCommandPool( vkGetDeviceProcAddr( device, "vkCreateCommandPool" ) );
vkDestroyCommandPool = PFN_vkDestroyCommandPool( vkGetDeviceProcAddr( device, "vkDestroyCommandPool" ) );
vkResetCommandPool = PFN_vkResetCommandPool( vkGetDeviceProcAddr( device, "vkResetCommandPool" ) );
vkAllocateCommandBuffers = PFN_vkAllocateCommandBuffers( vkGetDeviceProcAddr( device, "vkAllocateCommandBuffers" ) );
vkFreeCommandBuffers = PFN_vkFreeCommandBuffers( vkGetDeviceProcAddr( device, "vkFreeCommandBuffers" ) );
vkBeginCommandBuffer = PFN_vkBeginCommandBuffer( vkGetDeviceProcAddr( device, "vkBeginCommandBuffer" ) );
vkEndCommandBuffer = PFN_vkEndCommandBuffer( vkGetDeviceProcAddr( device, "vkEndCommandBuffer" ) );
vkResetCommandBuffer = PFN_vkResetCommandBuffer( vkGetDeviceProcAddr( device, "vkResetCommandBuffer" ) );
vkCmdBindPipeline = PFN_vkCmdBindPipeline( vkGetDeviceProcAddr( device, "vkCmdBindPipeline" ) );
vkCmdSetViewport = PFN_vkCmdSetViewport( vkGetDeviceProcAddr( device, "vkCmdSetViewport" ) );
vkCmdSetScissor = PFN_vkCmdSetScissor( vkGetDeviceProcAddr( device, "vkCmdSetScissor" ) );
vkCmdSetLineWidth = PFN_vkCmdSetLineWidth( vkGetDeviceProcAddr( device, "vkCmdSetLineWidth" ) );
vkCmdSetDepthBias = PFN_vkCmdSetDepthBias( vkGetDeviceProcAddr( device, "vkCmdSetDepthBias" ) );
vkCmdSetBlendConstants = PFN_vkCmdSetBlendConstants( vkGetDeviceProcAddr( device, "vkCmdSetBlendConstants" ) );
vkCmdSetDepthBounds = PFN_vkCmdSetDepthBounds( vkGetDeviceProcAddr( device, "vkCmdSetDepthBounds" ) );
vkCmdSetStencilCompareMask = PFN_vkCmdSetStencilCompareMask( vkGetDeviceProcAddr( device, "vkCmdSetStencilCompareMask" ) );
vkCmdSetStencilWriteMask = PFN_vkCmdSetStencilWriteMask( vkGetDeviceProcAddr( device, "vkCmdSetStencilWriteMask" ) );
vkCmdSetStencilReference = PFN_vkCmdSetStencilReference( vkGetDeviceProcAddr( device, "vkCmdSetStencilReference" ) );
vkCmdBindDescriptorSets = PFN_vkCmdBindDescriptorSets( vkGetDeviceProcAddr( device, "vkCmdBindDescriptorSets" ) );
vkCmdBindIndexBuffer = PFN_vkCmdBindIndexBuffer( vkGetDeviceProcAddr( device, "vkCmdBindIndexBuffer" ) );
vkCmdBindVertexBuffers = PFN_vkCmdBindVertexBuffers( vkGetDeviceProcAddr( device, "vkCmdBindVertexBuffers" ) );
vkCmdDraw = PFN_vkCmdDraw( vkGetDeviceProcAddr( device, "vkCmdDraw" ) );
vkCmdDrawIndexed = PFN_vkCmdDrawIndexed( vkGetDeviceProcAddr( device, "vkCmdDrawIndexed" ) );
vkCmdDrawIndirect = PFN_vkCmdDrawIndirect( vkGetDeviceProcAddr( device, "vkCmdDrawIndirect" ) );
vkCmdDrawIndexedIndirect = PFN_vkCmdDrawIndexedIndirect( vkGetDeviceProcAddr( device, "vkCmdDrawIndexedIndirect" ) );
vkCmdDispatch = PFN_vkCmdDispatch( vkGetDeviceProcAddr( device, "vkCmdDispatch" ) );
vkCmdDispatchIndirect = PFN_vkCmdDispatchIndirect( vkGetDeviceProcAddr( device, "vkCmdDispatchIndirect" ) );
vkCmdCopyBuffer = PFN_vkCmdCopyBuffer( vkGetDeviceProcAddr( device, "vkCmdCopyBuffer" ) );
vkCmdCopyImage = PFN_vkCmdCopyImage( vkGetDeviceProcAddr( device, "vkCmdCopyImage" ) );
vkCmdBlitImage = PFN_vkCmdBlitImage( vkGetDeviceProcAddr( device, "vkCmdBlitImage" ) );
vkCmdCopyBufferToImage = PFN_vkCmdCopyBufferToImage( vkGetDeviceProcAddr( device, "vkCmdCopyBufferToImage" ) );
vkCmdCopyImageToBuffer = PFN_vkCmdCopyImageToBuffer( vkGetDeviceProcAddr( device, "vkCmdCopyImageToBuffer" ) );
vkCmdUpdateBuffer = PFN_vkCmdUpdateBuffer( vkGetDeviceProcAddr( device, "vkCmdUpdateBuffer" ) );
vkCmdFillBuffer = PFN_vkCmdFillBuffer( vkGetDeviceProcAddr( device, "vkCmdFillBuffer" ) );
vkCmdClearColorImage = PFN_vkCmdClearColorImage( vkGetDeviceProcAddr( device, "vkCmdClearColorImage" ) );
vkCmdClearDepthStencilImage = PFN_vkCmdClearDepthStencilImage( vkGetDeviceProcAddr( device, "vkCmdClearDepthStencilImage" ) );
vkCmdClearAttachments = PFN_vkCmdClearAttachments( vkGetDeviceProcAddr( device, "vkCmdClearAttachments" ) );
vkCmdResolveImage = PFN_vkCmdResolveImage( vkGetDeviceProcAddr( device, "vkCmdResolveImage" ) );
vkCmdSetEvent = PFN_vkCmdSetEvent( vkGetDeviceProcAddr( device, "vkCmdSetEvent" ) );
vkCmdResetEvent = PFN_vkCmdResetEvent( vkGetDeviceProcAddr( device, "vkCmdResetEvent" ) );
vkCmdWaitEvents = PFN_vkCmdWaitEvents( vkGetDeviceProcAddr( device, "vkCmdWaitEvents" ) );
vkCmdPipelineBarrier = PFN_vkCmdPipelineBarrier( vkGetDeviceProcAddr( device, "vkCmdPipelineBarrier" ) );
vkCmdBeginQuery = PFN_vkCmdBeginQuery( vkGetDeviceProcAddr( device, "vkCmdBeginQuery" ) );
vkCmdEndQuery = PFN_vkCmdEndQuery( vkGetDeviceProcAddr( device, "vkCmdEndQuery" ) );
vkCmdResetQueryPool = PFN_vkCmdResetQueryPool( vkGetDeviceProcAddr( device, "vkCmdResetQueryPool" ) );
vkCmdWriteTimestamp = PFN_vkCmdWriteTimestamp( vkGetDeviceProcAddr( device, "vkCmdWriteTimestamp" ) );
vkCmdCopyQueryPoolResults = PFN_vkCmdCopyQueryPoolResults( vkGetDeviceProcAddr( device, "vkCmdCopyQueryPoolResults" ) );
vkCmdPushConstants = PFN_vkCmdPushConstants( vkGetDeviceProcAddr( device, "vkCmdPushConstants" ) );
vkCmdBeginRenderPass = PFN_vkCmdBeginRenderPass( vkGetDeviceProcAddr( device, "vkCmdBeginRenderPass" ) );
vkCmdNextSubpass = PFN_vkCmdNextSubpass( vkGetDeviceProcAddr( device, "vkCmdNextSubpass" ) );
vkCmdEndRenderPass = PFN_vkCmdEndRenderPass( vkGetDeviceProcAddr( device, "vkCmdEndRenderPass" ) );
vkCmdExecuteCommands = PFN_vkCmdExecuteCommands( vkGetDeviceProcAddr( device, "vkCmdExecuteCommands" ) );
//=== VK_VERSION_1_1 ===
vkBindBufferMemory2 = PFN_vkBindBufferMemory2( vkGetDeviceProcAddr( device, "vkBindBufferMemory2" ) );
vkBindImageMemory2 = PFN_vkBindImageMemory2( vkGetDeviceProcAddr( device, "vkBindImageMemory2" ) );
vkGetDeviceGroupPeerMemoryFeatures = PFN_vkGetDeviceGroupPeerMemoryFeatures( vkGetDeviceProcAddr( device, "vkGetDeviceGroupPeerMemoryFeatures" ) );
vkCmdSetDeviceMask = PFN_vkCmdSetDeviceMask( vkGetDeviceProcAddr( device, "vkCmdSetDeviceMask" ) );
vkCmdDispatchBase = PFN_vkCmdDispatchBase( vkGetDeviceProcAddr( device, "vkCmdDispatchBase" ) );
vkGetImageMemoryRequirements2 = PFN_vkGetImageMemoryRequirements2( vkGetDeviceProcAddr( device, "vkGetImageMemoryRequirements2" ) );
vkGetBufferMemoryRequirements2 = PFN_vkGetBufferMemoryRequirements2( vkGetDeviceProcAddr( device, "vkGetBufferMemoryRequirements2" ) );
vkGetImageSparseMemoryRequirements2 = PFN_vkGetImageSparseMemoryRequirements2( vkGetDeviceProcAddr( device, "vkGetImageSparseMemoryRequirements2" ) );
vkTrimCommandPool = PFN_vkTrimCommandPool( vkGetDeviceProcAddr( device, "vkTrimCommandPool" ) );
vkGetDeviceQueue2 = PFN_vkGetDeviceQueue2( vkGetDeviceProcAddr( device, "vkGetDeviceQueue2" ) );
vkCreateSamplerYcbcrConversion = PFN_vkCreateSamplerYcbcrConversion( vkGetDeviceProcAddr( device, "vkCreateSamplerYcbcrConversion" ) );
vkDestroySamplerYcbcrConversion = PFN_vkDestroySamplerYcbcrConversion( vkGetDeviceProcAddr( device, "vkDestroySamplerYcbcrConversion" ) );
vkCreateDescriptorUpdateTemplate = PFN_vkCreateDescriptorUpdateTemplate( vkGetDeviceProcAddr( device, "vkCreateDescriptorUpdateTemplate" ) );
vkDestroyDescriptorUpdateTemplate = PFN_vkDestroyDescriptorUpdateTemplate( vkGetDeviceProcAddr( device, "vkDestroyDescriptorUpdateTemplate" ) );
vkUpdateDescriptorSetWithTemplate = PFN_vkUpdateDescriptorSetWithTemplate( vkGetDeviceProcAddr( device, "vkUpdateDescriptorSetWithTemplate" ) );
vkGetDescriptorSetLayoutSupport = PFN_vkGetDescriptorSetLayoutSupport( vkGetDeviceProcAddr( device, "vkGetDescriptorSetLayoutSupport" ) );
//=== VK_VERSION_1_2 ===
vkCmdDrawIndirectCount = PFN_vkCmdDrawIndirectCount( vkGetDeviceProcAddr( device, "vkCmdDrawIndirectCount" ) );
vkCmdDrawIndexedIndirectCount = PFN_vkCmdDrawIndexedIndirectCount( vkGetDeviceProcAddr( device, "vkCmdDrawIndexedIndirectCount" ) );
vkCreateRenderPass2 = PFN_vkCreateRenderPass2( vkGetDeviceProcAddr( device, "vkCreateRenderPass2" ) );
vkCmdBeginRenderPass2 = PFN_vkCmdBeginRenderPass2( vkGetDeviceProcAddr( device, "vkCmdBeginRenderPass2" ) );
vkCmdNextSubpass2 = PFN_vkCmdNextSubpass2( vkGetDeviceProcAddr( device, "vkCmdNextSubpass2" ) );
vkCmdEndRenderPass2 = PFN_vkCmdEndRenderPass2( vkGetDeviceProcAddr( device, "vkCmdEndRenderPass2" ) );
vkResetQueryPool = PFN_vkResetQueryPool( vkGetDeviceProcAddr( device, "vkResetQueryPool" ) );
vkGetSemaphoreCounterValue = PFN_vkGetSemaphoreCounterValue( vkGetDeviceProcAddr( device, "vkGetSemaphoreCounterValue" ) );
vkWaitSemaphores = PFN_vkWaitSemaphores( vkGetDeviceProcAddr( device, "vkWaitSemaphores" ) );
vkSignalSemaphore = PFN_vkSignalSemaphore( vkGetDeviceProcAddr( device, "vkSignalSemaphore" ) );
vkGetBufferDeviceAddress = PFN_vkGetBufferDeviceAddress( vkGetDeviceProcAddr( device, "vkGetBufferDeviceAddress" ) );
vkGetBufferOpaqueCaptureAddress = PFN_vkGetBufferOpaqueCaptureAddress( vkGetDeviceProcAddr( device, "vkGetBufferOpaqueCaptureAddress" ) );
vkGetDeviceMemoryOpaqueCaptureAddress =
PFN_vkGetDeviceMemoryOpaqueCaptureAddress( vkGetDeviceProcAddr( device, "vkGetDeviceMemoryOpaqueCaptureAddress" ) );
//=== VK_VERSION_1_3 ===
vkCreatePrivateDataSlot = PFN_vkCreatePrivateDataSlot( vkGetDeviceProcAddr( device, "vkCreatePrivateDataSlot" ) );
vkDestroyPrivateDataSlot = PFN_vkDestroyPrivateDataSlot( vkGetDeviceProcAddr( device, "vkDestroyPrivateDataSlot" ) );
vkSetPrivateData = PFN_vkSetPrivateData( vkGetDeviceProcAddr( device, "vkSetPrivateData" ) );
vkGetPrivateData = PFN_vkGetPrivateData( vkGetDeviceProcAddr( device, "vkGetPrivateData" ) );
vkCmdSetEvent2 = PFN_vkCmdSetEvent2( vkGetDeviceProcAddr( device, "vkCmdSetEvent2" ) );
vkCmdResetEvent2 = PFN_vkCmdResetEvent2( vkGetDeviceProcAddr( device, "vkCmdResetEvent2" ) );
vkCmdWaitEvents2 = PFN_vkCmdWaitEvents2( vkGetDeviceProcAddr( device, "vkCmdWaitEvents2" ) );
vkCmdPipelineBarrier2 = PFN_vkCmdPipelineBarrier2( vkGetDeviceProcAddr( device, "vkCmdPipelineBarrier2" ) );
vkCmdWriteTimestamp2 = PFN_vkCmdWriteTimestamp2( vkGetDeviceProcAddr( device, "vkCmdWriteTimestamp2" ) );
vkQueueSubmit2 = PFN_vkQueueSubmit2( vkGetDeviceProcAddr( device, "vkQueueSubmit2" ) );
vkCmdCopyBuffer2 = PFN_vkCmdCopyBuffer2( vkGetDeviceProcAddr( device, "vkCmdCopyBuffer2" ) );
vkCmdCopyImage2 = PFN_vkCmdCopyImage2( vkGetDeviceProcAddr( device, "vkCmdCopyImage2" ) );
vkCmdCopyBufferToImage2 = PFN_vkCmdCopyBufferToImage2( vkGetDeviceProcAddr( device, "vkCmdCopyBufferToImage2" ) );
vkCmdCopyImageToBuffer2 = PFN_vkCmdCopyImageToBuffer2( vkGetDeviceProcAddr( device, "vkCmdCopyImageToBuffer2" ) );
vkCmdBlitImage2 = PFN_vkCmdBlitImage2( vkGetDeviceProcAddr( device, "vkCmdBlitImage2" ) );
vkCmdResolveImage2 = PFN_vkCmdResolveImage2( vkGetDeviceProcAddr( device, "vkCmdResolveImage2" ) );
vkCmdBeginRendering = PFN_vkCmdBeginRendering( vkGetDeviceProcAddr( device, "vkCmdBeginRendering" ) );
vkCmdEndRendering = PFN_vkCmdEndRendering( vkGetDeviceProcAddr( device, "vkCmdEndRendering" ) );
vkCmdSetCullMode = PFN_vkCmdSetCullMode( vkGetDeviceProcAddr( device, "vkCmdSetCullMode" ) );
vkCmdSetFrontFace = PFN_vkCmdSetFrontFace( vkGetDeviceProcAddr( device, "vkCmdSetFrontFace" ) );
vkCmdSetPrimitiveTopology = PFN_vkCmdSetPrimitiveTopology( vkGetDeviceProcAddr( device, "vkCmdSetPrimitiveTopology" ) );
vkCmdSetViewportWithCount = PFN_vkCmdSetViewportWithCount( vkGetDeviceProcAddr( device, "vkCmdSetViewportWithCount" ) );
vkCmdSetScissorWithCount = PFN_vkCmdSetScissorWithCount( vkGetDeviceProcAddr( device, "vkCmdSetScissorWithCount" ) );
vkCmdBindVertexBuffers2 = PFN_vkCmdBindVertexBuffers2( vkGetDeviceProcAddr( device, "vkCmdBindVertexBuffers2" ) );
vkCmdSetDepthTestEnable = PFN_vkCmdSetDepthTestEnable( vkGetDeviceProcAddr( device, "vkCmdSetDepthTestEnable" ) );
vkCmdSetDepthWriteEnable = PFN_vkCmdSetDepthWriteEnable( vkGetDeviceProcAddr( device, "vkCmdSetDepthWriteEnable" ) );
vkCmdSetDepthCompareOp = PFN_vkCmdSetDepthCompareOp( vkGetDeviceProcAddr( device, "vkCmdSetDepthCompareOp" ) );
vkCmdSetDepthBoundsTestEnable = PFN_vkCmdSetDepthBoundsTestEnable( vkGetDeviceProcAddr( device, "vkCmdSetDepthBoundsTestEnable" ) );
vkCmdSetStencilTestEnable = PFN_vkCmdSetStencilTestEnable( vkGetDeviceProcAddr( device, "vkCmdSetStencilTestEnable" ) );
vkCmdSetStencilOp = PFN_vkCmdSetStencilOp( vkGetDeviceProcAddr( device, "vkCmdSetStencilOp" ) );
vkCmdSetRasterizerDiscardEnable = PFN_vkCmdSetRasterizerDiscardEnable( vkGetDeviceProcAddr( device, "vkCmdSetRasterizerDiscardEnable" ) );
vkCmdSetDepthBiasEnable = PFN_vkCmdSetDepthBiasEnable( vkGetDeviceProcAddr( device, "vkCmdSetDepthBiasEnable" ) );
vkCmdSetPrimitiveRestartEnable = PFN_vkCmdSetPrimitiveRestartEnable( vkGetDeviceProcAddr( device, "vkCmdSetPrimitiveRestartEnable" ) );
vkGetDeviceBufferMemoryRequirements = PFN_vkGetDeviceBufferMemoryRequirements( vkGetDeviceProcAddr( device, "vkGetDeviceBufferMemoryRequirements" ) );
vkGetDeviceImageMemoryRequirements = PFN_vkGetDeviceImageMemoryRequirements( vkGetDeviceProcAddr( device, "vkGetDeviceImageMemoryRequirements" ) );
vkGetDeviceImageSparseMemoryRequirements =
PFN_vkGetDeviceImageSparseMemoryRequirements( vkGetDeviceProcAddr( device, "vkGetDeviceImageSparseMemoryRequirements" ) );
//=== VK_KHR_swapchain ===
vkCreateSwapchainKHR = PFN_vkCreateSwapchainKHR( vkGetDeviceProcAddr( device, "vkCreateSwapchainKHR" ) );
vkDestroySwapchainKHR = PFN_vkDestroySwapchainKHR( vkGetDeviceProcAddr( device, "vkDestroySwapchainKHR" ) );
vkGetSwapchainImagesKHR = PFN_vkGetSwapchainImagesKHR( vkGetDeviceProcAddr( device, "vkGetSwapchainImagesKHR" ) );
vkAcquireNextImageKHR = PFN_vkAcquireNextImageKHR( vkGetDeviceProcAddr( device, "vkAcquireNextImageKHR" ) );
vkQueuePresentKHR = PFN_vkQueuePresentKHR( vkGetDeviceProcAddr( device, "vkQueuePresentKHR" ) );
vkGetDeviceGroupPresentCapabilitiesKHR =
PFN_vkGetDeviceGroupPresentCapabilitiesKHR( vkGetDeviceProcAddr( device, "vkGetDeviceGroupPresentCapabilitiesKHR" ) );
vkGetDeviceGroupSurfacePresentModesKHR =
PFN_vkGetDeviceGroupSurfacePresentModesKHR( vkGetDeviceProcAddr( device, "vkGetDeviceGroupSurfacePresentModesKHR" ) );
vkAcquireNextImage2KHR = PFN_vkAcquireNextImage2KHR( vkGetDeviceProcAddr( device, "vkAcquireNextImage2KHR" ) );
//=== VK_KHR_display_swapchain ===
vkCreateSharedSwapchainsKHR = PFN_vkCreateSharedSwapchainsKHR( vkGetDeviceProcAddr( device, "vkCreateSharedSwapchainsKHR" ) );
//=== VK_EXT_debug_marker ===
vkDebugMarkerSetObjectTagEXT = PFN_vkDebugMarkerSetObjectTagEXT( vkGetDeviceProcAddr( device, "vkDebugMarkerSetObjectTagEXT" ) );
vkDebugMarkerSetObjectNameEXT = PFN_vkDebugMarkerSetObjectNameEXT( vkGetDeviceProcAddr( device, "vkDebugMarkerSetObjectNameEXT" ) );
vkCmdDebugMarkerBeginEXT = PFN_vkCmdDebugMarkerBeginEXT( vkGetDeviceProcAddr( device, "vkCmdDebugMarkerBeginEXT" ) );
vkCmdDebugMarkerEndEXT = PFN_vkCmdDebugMarkerEndEXT( vkGetDeviceProcAddr( device, "vkCmdDebugMarkerEndEXT" ) );
vkCmdDebugMarkerInsertEXT = PFN_vkCmdDebugMarkerInsertEXT( vkGetDeviceProcAddr( device, "vkCmdDebugMarkerInsertEXT" ) );
//=== VK_KHR_video_queue ===
vkCreateVideoSessionKHR = PFN_vkCreateVideoSessionKHR( vkGetDeviceProcAddr( device, "vkCreateVideoSessionKHR" ) );
vkDestroyVideoSessionKHR = PFN_vkDestroyVideoSessionKHR( vkGetDeviceProcAddr( device, "vkDestroyVideoSessionKHR" ) );
vkGetVideoSessionMemoryRequirementsKHR =
PFN_vkGetVideoSessionMemoryRequirementsKHR( vkGetDeviceProcAddr( device, "vkGetVideoSessionMemoryRequirementsKHR" ) );
vkBindVideoSessionMemoryKHR = PFN_vkBindVideoSessionMemoryKHR( vkGetDeviceProcAddr( device, "vkBindVideoSessionMemoryKHR" ) );
vkCreateVideoSessionParametersKHR = PFN_vkCreateVideoSessionParametersKHR( vkGetDeviceProcAddr( device, "vkCreateVideoSessionParametersKHR" ) );
vkUpdateVideoSessionParametersKHR = PFN_vkUpdateVideoSessionParametersKHR( vkGetDeviceProcAddr( device, "vkUpdateVideoSessionParametersKHR" ) );
vkDestroyVideoSessionParametersKHR = PFN_vkDestroyVideoSessionParametersKHR( vkGetDeviceProcAddr( device, "vkDestroyVideoSessionParametersKHR" ) );
vkCmdBeginVideoCodingKHR = PFN_vkCmdBeginVideoCodingKHR( vkGetDeviceProcAddr( device, "vkCmdBeginVideoCodingKHR" ) );
vkCmdEndVideoCodingKHR = PFN_vkCmdEndVideoCodingKHR( vkGetDeviceProcAddr( device, "vkCmdEndVideoCodingKHR" ) );
vkCmdControlVideoCodingKHR = PFN_vkCmdControlVideoCodingKHR( vkGetDeviceProcAddr( device, "vkCmdControlVideoCodingKHR" ) );
//=== VK_KHR_video_decode_queue ===
vkCmdDecodeVideoKHR = PFN_vkCmdDecodeVideoKHR( vkGetDeviceProcAddr( device, "vkCmdDecodeVideoKHR" ) );
//=== VK_EXT_transform_feedback ===
vkCmdBindTransformFeedbackBuffersEXT =
PFN_vkCmdBindTransformFeedbackBuffersEXT( vkGetDeviceProcAddr( device, "vkCmdBindTransformFeedbackBuffersEXT" ) );
vkCmdBeginTransformFeedbackEXT = PFN_vkCmdBeginTransformFeedbackEXT( vkGetDeviceProcAddr( device, "vkCmdBeginTransformFeedbackEXT" ) );
vkCmdEndTransformFeedbackEXT = PFN_vkCmdEndTransformFeedbackEXT( vkGetDeviceProcAddr( device, "vkCmdEndTransformFeedbackEXT" ) );
vkCmdBeginQueryIndexedEXT = PFN_vkCmdBeginQueryIndexedEXT( vkGetDeviceProcAddr( device, "vkCmdBeginQueryIndexedEXT" ) );
vkCmdEndQueryIndexedEXT = PFN_vkCmdEndQueryIndexedEXT( vkGetDeviceProcAddr( device, "vkCmdEndQueryIndexedEXT" ) );
vkCmdDrawIndirectByteCountEXT = PFN_vkCmdDrawIndirectByteCountEXT( vkGetDeviceProcAddr( device, "vkCmdDrawIndirectByteCountEXT" ) );
//=== VK_NVX_binary_import ===
vkCreateCuModuleNVX = PFN_vkCreateCuModuleNVX( vkGetDeviceProcAddr( device, "vkCreateCuModuleNVX" ) );
vkCreateCuFunctionNVX = PFN_vkCreateCuFunctionNVX( vkGetDeviceProcAddr( device, "vkCreateCuFunctionNVX" ) );
vkDestroyCuModuleNVX = PFN_vkDestroyCuModuleNVX( vkGetDeviceProcAddr( device, "vkDestroyCuModuleNVX" ) );
vkDestroyCuFunctionNVX = PFN_vkDestroyCuFunctionNVX( vkGetDeviceProcAddr( device, "vkDestroyCuFunctionNVX" ) );
vkCmdCuLaunchKernelNVX = PFN_vkCmdCuLaunchKernelNVX( vkGetDeviceProcAddr( device, "vkCmdCuLaunchKernelNVX" ) );
//=== VK_NVX_image_view_handle ===
vkGetImageViewHandleNVX = PFN_vkGetImageViewHandleNVX( vkGetDeviceProcAddr( device, "vkGetImageViewHandleNVX" ) );
vkGetImageViewAddressNVX = PFN_vkGetImageViewAddressNVX( vkGetDeviceProcAddr( device, "vkGetImageViewAddressNVX" ) );
//=== VK_AMD_draw_indirect_count ===
vkCmdDrawIndirectCountAMD = PFN_vkCmdDrawIndirectCountAMD( vkGetDeviceProcAddr( device, "vkCmdDrawIndirectCountAMD" ) );
if ( !vkCmdDrawIndirectCount )
vkCmdDrawIndirectCount = vkCmdDrawIndirectCountAMD;
vkCmdDrawIndexedIndirectCountAMD = PFN_vkCmdDrawIndexedIndirectCountAMD( vkGetDeviceProcAddr( device, "vkCmdDrawIndexedIndirectCountAMD" ) );
if ( !vkCmdDrawIndexedIndirectCount )
vkCmdDrawIndexedIndirectCount = vkCmdDrawIndexedIndirectCountAMD;
//=== VK_AMD_shader_info ===
vkGetShaderInfoAMD = PFN_vkGetShaderInfoAMD( vkGetDeviceProcAddr( device, "vkGetShaderInfoAMD" ) );
//=== VK_KHR_dynamic_rendering ===
vkCmdBeginRenderingKHR = PFN_vkCmdBeginRenderingKHR( vkGetDeviceProcAddr( device, "vkCmdBeginRenderingKHR" ) );
if ( !vkCmdBeginRendering )
vkCmdBeginRendering = vkCmdBeginRenderingKHR;
vkCmdEndRenderingKHR = PFN_vkCmdEndRenderingKHR( vkGetDeviceProcAddr( device, "vkCmdEndRenderingKHR" ) );
if ( !vkCmdEndRendering )
vkCmdEndRendering = vkCmdEndRenderingKHR;
# if defined( VK_USE_PLATFORM_WIN32_KHR )
//=== VK_NV_external_memory_win32 ===
vkGetMemoryWin32HandleNV = PFN_vkGetMemoryWin32HandleNV( vkGetDeviceProcAddr( device, "vkGetMemoryWin32HandleNV" ) );
# endif /*VK_USE_PLATFORM_WIN32_KHR*/
//=== VK_KHR_device_group ===
vkGetDeviceGroupPeerMemoryFeaturesKHR =
PFN_vkGetDeviceGroupPeerMemoryFeaturesKHR( vkGetDeviceProcAddr( device, "vkGetDeviceGroupPeerMemoryFeaturesKHR" ) );
if ( !vkGetDeviceGroupPeerMemoryFeatures )
vkGetDeviceGroupPeerMemoryFeatures = vkGetDeviceGroupPeerMemoryFeaturesKHR;
vkCmdSetDeviceMaskKHR = PFN_vkCmdSetDeviceMaskKHR( vkGetDeviceProcAddr( device, "vkCmdSetDeviceMaskKHR" ) );
if ( !vkCmdSetDeviceMask )
vkCmdSetDeviceMask = vkCmdSetDeviceMaskKHR;
vkCmdDispatchBaseKHR = PFN_vkCmdDispatchBaseKHR( vkGetDeviceProcAddr( device, "vkCmdDispatchBaseKHR" ) );
if ( !vkCmdDispatchBase )
vkCmdDispatchBase = vkCmdDispatchBaseKHR;
//=== VK_KHR_maintenance1 ===
vkTrimCommandPoolKHR = PFN_vkTrimCommandPoolKHR( vkGetDeviceProcAddr( device, "vkTrimCommandPoolKHR" ) );
if ( !vkTrimCommandPool )
vkTrimCommandPool = vkTrimCommandPoolKHR;
# if defined( VK_USE_PLATFORM_WIN32_KHR )
//=== VK_KHR_external_memory_win32 ===
vkGetMemoryWin32HandleKHR = PFN_vkGetMemoryWin32HandleKHR( vkGetDeviceProcAddr( device, "vkGetMemoryWin32HandleKHR" ) );
vkGetMemoryWin32HandlePropertiesKHR = PFN_vkGetMemoryWin32HandlePropertiesKHR( vkGetDeviceProcAddr( device, "vkGetMemoryWin32HandlePropertiesKHR" ) );
# endif /*VK_USE_PLATFORM_WIN32_KHR*/
//=== VK_KHR_external_memory_fd ===
vkGetMemoryFdKHR = PFN_vkGetMemoryFdKHR( vkGetDeviceProcAddr( device, "vkGetMemoryFdKHR" ) );
vkGetMemoryFdPropertiesKHR = PFN_vkGetMemoryFdPropertiesKHR( vkGetDeviceProcAddr( device, "vkGetMemoryFdPropertiesKHR" ) );
# if defined( VK_USE_PLATFORM_WIN32_KHR )
//=== VK_KHR_external_semaphore_win32 ===
vkImportSemaphoreWin32HandleKHR = PFN_vkImportSemaphoreWin32HandleKHR( vkGetDeviceProcAddr( device, "vkImportSemaphoreWin32HandleKHR" ) );
vkGetSemaphoreWin32HandleKHR = PFN_vkGetSemaphoreWin32HandleKHR( vkGetDeviceProcAddr( device, "vkGetSemaphoreWin32HandleKHR" ) );
# endif /*VK_USE_PLATFORM_WIN32_KHR*/
//=== VK_KHR_external_semaphore_fd ===
vkImportSemaphoreFdKHR = PFN_vkImportSemaphoreFdKHR( vkGetDeviceProcAddr( device, "vkImportSemaphoreFdKHR" ) );
vkGetSemaphoreFdKHR = PFN_vkGetSemaphoreFdKHR( vkGetDeviceProcAddr( device, "vkGetSemaphoreFdKHR" ) );
//=== VK_KHR_push_descriptor ===
vkCmdPushDescriptorSetKHR = PFN_vkCmdPushDescriptorSetKHR( vkGetDeviceProcAddr( device, "vkCmdPushDescriptorSetKHR" ) );
vkCmdPushDescriptorSetWithTemplateKHR =
PFN_vkCmdPushDescriptorSetWithTemplateKHR( vkGetDeviceProcAddr( device, "vkCmdPushDescriptorSetWithTemplateKHR" ) );
//=== VK_EXT_conditional_rendering ===
vkCmdBeginConditionalRenderingEXT = PFN_vkCmdBeginConditionalRenderingEXT( vkGetDeviceProcAddr( device, "vkCmdBeginConditionalRenderingEXT" ) );
vkCmdEndConditionalRenderingEXT = PFN_vkCmdEndConditionalRenderingEXT( vkGetDeviceProcAddr( device, "vkCmdEndConditionalRenderingEXT" ) );
//=== VK_KHR_descriptor_update_template ===
vkCreateDescriptorUpdateTemplateKHR = PFN_vkCreateDescriptorUpdateTemplateKHR( vkGetDeviceProcAddr( device, "vkCreateDescriptorUpdateTemplateKHR" ) );
if ( !vkCreateDescriptorUpdateTemplate )
vkCreateDescriptorUpdateTemplate = vkCreateDescriptorUpdateTemplateKHR;
vkDestroyDescriptorUpdateTemplateKHR =
PFN_vkDestroyDescriptorUpdateTemplateKHR( vkGetDeviceProcAddr( device, "vkDestroyDescriptorUpdateTemplateKHR" ) );
if ( !vkDestroyDescriptorUpdateTemplate )
vkDestroyDescriptorUpdateTemplate = vkDestroyDescriptorUpdateTemplateKHR;
vkUpdateDescriptorSetWithTemplateKHR =
PFN_vkUpdateDescriptorSetWithTemplateKHR( vkGetDeviceProcAddr( device, "vkUpdateDescriptorSetWithTemplateKHR" ) );
if ( !vkUpdateDescriptorSetWithTemplate )
vkUpdateDescriptorSetWithTemplate = vkUpdateDescriptorSetWithTemplateKHR;
//=== VK_NV_clip_space_w_scaling ===
vkCmdSetViewportWScalingNV = PFN_vkCmdSetViewportWScalingNV( vkGetDeviceProcAddr( device, "vkCmdSetViewportWScalingNV" ) );
//=== VK_EXT_display_control ===
vkDisplayPowerControlEXT = PFN_vkDisplayPowerControlEXT( vkGetDeviceProcAddr( device, "vkDisplayPowerControlEXT" ) );
vkRegisterDeviceEventEXT = PFN_vkRegisterDeviceEventEXT( vkGetDeviceProcAddr( device, "vkRegisterDeviceEventEXT" ) );
vkRegisterDisplayEventEXT = PFN_vkRegisterDisplayEventEXT( vkGetDeviceProcAddr( device, "vkRegisterDisplayEventEXT" ) );
vkGetSwapchainCounterEXT = PFN_vkGetSwapchainCounterEXT( vkGetDeviceProcAddr( device, "vkGetSwapchainCounterEXT" ) );
//=== VK_GOOGLE_display_timing ===
vkGetRefreshCycleDurationGOOGLE = PFN_vkGetRefreshCycleDurationGOOGLE( vkGetDeviceProcAddr( device, "vkGetRefreshCycleDurationGOOGLE" ) );
vkGetPastPresentationTimingGOOGLE = PFN_vkGetPastPresentationTimingGOOGLE( vkGetDeviceProcAddr( device, "vkGetPastPresentationTimingGOOGLE" ) );
//=== VK_EXT_discard_rectangles ===
vkCmdSetDiscardRectangleEXT = PFN_vkCmdSetDiscardRectangleEXT( vkGetDeviceProcAddr( device, "vkCmdSetDiscardRectangleEXT" ) );
vkCmdSetDiscardRectangleEnableEXT = PFN_vkCmdSetDiscardRectangleEnableEXT( vkGetDeviceProcAddr( device, "vkCmdSetDiscardRectangleEnableEXT" ) );
vkCmdSetDiscardRectangleModeEXT = PFN_vkCmdSetDiscardRectangleModeEXT( vkGetDeviceProcAddr( device, "vkCmdSetDiscardRectangleModeEXT" ) );
//=== VK_EXT_hdr_metadata ===
vkSetHdrMetadataEXT = PFN_vkSetHdrMetadataEXT( vkGetDeviceProcAddr( device, "vkSetHdrMetadataEXT" ) );
//=== VK_KHR_create_renderpass2 ===
vkCreateRenderPass2KHR = PFN_vkCreateRenderPass2KHR( vkGetDeviceProcAddr( device, "vkCreateRenderPass2KHR" ) );
if ( !vkCreateRenderPass2 )
vkCreateRenderPass2 = vkCreateRenderPass2KHR;
vkCmdBeginRenderPass2KHR = PFN_vkCmdBeginRenderPass2KHR( vkGetDeviceProcAddr( device, "vkCmdBeginRenderPass2KHR" ) );
if ( !vkCmdBeginRenderPass2 )
vkCmdBeginRenderPass2 = vkCmdBeginRenderPass2KHR;
vkCmdNextSubpass2KHR = PFN_vkCmdNextSubpass2KHR( vkGetDeviceProcAddr( device, "vkCmdNextSubpass2KHR" ) );
if ( !vkCmdNextSubpass2 )
vkCmdNextSubpass2 = vkCmdNextSubpass2KHR;
vkCmdEndRenderPass2KHR = PFN_vkCmdEndRenderPass2KHR( vkGetDeviceProcAddr( device, "vkCmdEndRenderPass2KHR" ) );
if ( !vkCmdEndRenderPass2 )
vkCmdEndRenderPass2 = vkCmdEndRenderPass2KHR;
//=== VK_KHR_shared_presentable_image ===
vkGetSwapchainStatusKHR = PFN_vkGetSwapchainStatusKHR( vkGetDeviceProcAddr( device, "vkGetSwapchainStatusKHR" ) );
# if defined( VK_USE_PLATFORM_WIN32_KHR )
//=== VK_KHR_external_fence_win32 ===
vkImportFenceWin32HandleKHR = PFN_vkImportFenceWin32HandleKHR( vkGetDeviceProcAddr( device, "vkImportFenceWin32HandleKHR" ) );
vkGetFenceWin32HandleKHR = PFN_vkGetFenceWin32HandleKHR( vkGetDeviceProcAddr( device, "vkGetFenceWin32HandleKHR" ) );
# endif /*VK_USE_PLATFORM_WIN32_KHR*/
//=== VK_KHR_external_fence_fd ===
vkImportFenceFdKHR = PFN_vkImportFenceFdKHR( vkGetDeviceProcAddr( device, "vkImportFenceFdKHR" ) );
vkGetFenceFdKHR = PFN_vkGetFenceFdKHR( vkGetDeviceProcAddr( device, "vkGetFenceFdKHR" ) );
//=== VK_KHR_performance_query ===
vkAcquireProfilingLockKHR = PFN_vkAcquireProfilingLockKHR( vkGetDeviceProcAddr( device, "vkAcquireProfilingLockKHR" ) );
vkReleaseProfilingLockKHR = PFN_vkReleaseProfilingLockKHR( vkGetDeviceProcAddr( device, "vkReleaseProfilingLockKHR" ) );
//=== VK_EXT_debug_utils ===
vkSetDebugUtilsObjectNameEXT = PFN_vkSetDebugUtilsObjectNameEXT( vkGetDeviceProcAddr( device, "vkSetDebugUtilsObjectNameEXT" ) );
vkSetDebugUtilsObjectTagEXT = PFN_vkSetDebugUtilsObjectTagEXT( vkGetDeviceProcAddr( device, "vkSetDebugUtilsObjectTagEXT" ) );
vkQueueBeginDebugUtilsLabelEXT = PFN_vkQueueBeginDebugUtilsLabelEXT( vkGetDeviceProcAddr( device, "vkQueueBeginDebugUtilsLabelEXT" ) );
vkQueueEndDebugUtilsLabelEXT = PFN_vkQueueEndDebugUtilsLabelEXT( vkGetDeviceProcAddr( device, "vkQueueEndDebugUtilsLabelEXT" ) );
vkQueueInsertDebugUtilsLabelEXT = PFN_vkQueueInsertDebugUtilsLabelEXT( vkGetDeviceProcAddr( device, "vkQueueInsertDebugUtilsLabelEXT" ) );
vkCmdBeginDebugUtilsLabelEXT = PFN_vkCmdBeginDebugUtilsLabelEXT( vkGetDeviceProcAddr( device, "vkCmdBeginDebugUtilsLabelEXT" ) );
vkCmdEndDebugUtilsLabelEXT = PFN_vkCmdEndDebugUtilsLabelEXT( vkGetDeviceProcAddr( device, "vkCmdEndDebugUtilsLabelEXT" ) );
vkCmdInsertDebugUtilsLabelEXT = PFN_vkCmdInsertDebugUtilsLabelEXT( vkGetDeviceProcAddr( device, "vkCmdInsertDebugUtilsLabelEXT" ) );
# if defined( VK_USE_PLATFORM_ANDROID_KHR )
//=== VK_ANDROID_external_memory_android_hardware_buffer ===
vkGetAndroidHardwareBufferPropertiesANDROID =
PFN_vkGetAndroidHardwareBufferPropertiesANDROID( vkGetDeviceProcAddr( device, "vkGetAndroidHardwareBufferPropertiesANDROID" ) );
vkGetMemoryAndroidHardwareBufferANDROID =
PFN_vkGetMemoryAndroidHardwareBufferANDROID( vkGetDeviceProcAddr( device, "vkGetMemoryAndroidHardwareBufferANDROID" ) );
# endif /*VK_USE_PLATFORM_ANDROID_KHR*/
# if defined( VK_ENABLE_BETA_EXTENSIONS )
//=== VK_AMDX_shader_enqueue ===
vkCreateExecutionGraphPipelinesAMDX = PFN_vkCreateExecutionGraphPipelinesAMDX( vkGetDeviceProcAddr( device, "vkCreateExecutionGraphPipelinesAMDX" ) );
vkGetExecutionGraphPipelineScratchSizeAMDX =
PFN_vkGetExecutionGraphPipelineScratchSizeAMDX( vkGetDeviceProcAddr( device, "vkGetExecutionGraphPipelineScratchSizeAMDX" ) );
vkGetExecutionGraphPipelineNodeIndexAMDX =
PFN_vkGetExecutionGraphPipelineNodeIndexAMDX( vkGetDeviceProcAddr( device, "vkGetExecutionGraphPipelineNodeIndexAMDX" ) );
vkCmdInitializeGraphScratchMemoryAMDX =
PFN_vkCmdInitializeGraphScratchMemoryAMDX( vkGetDeviceProcAddr( device, "vkCmdInitializeGraphScratchMemoryAMDX" ) );
vkCmdDispatchGraphAMDX = PFN_vkCmdDispatchGraphAMDX( vkGetDeviceProcAddr( device, "vkCmdDispatchGraphAMDX" ) );
vkCmdDispatchGraphIndirectAMDX = PFN_vkCmdDispatchGraphIndirectAMDX( vkGetDeviceProcAddr( device, "vkCmdDispatchGraphIndirectAMDX" ) );
vkCmdDispatchGraphIndirectCountAMDX = PFN_vkCmdDispatchGraphIndirectCountAMDX( vkGetDeviceProcAddr( device, "vkCmdDispatchGraphIndirectCountAMDX" ) );
# endif /*VK_ENABLE_BETA_EXTENSIONS*/
//=== VK_EXT_sample_locations ===
vkCmdSetSampleLocationsEXT = PFN_vkCmdSetSampleLocationsEXT( vkGetDeviceProcAddr( device, "vkCmdSetSampleLocationsEXT" ) );
//=== VK_KHR_get_memory_requirements2 ===
vkGetImageMemoryRequirements2KHR = PFN_vkGetImageMemoryRequirements2KHR( vkGetDeviceProcAddr( device, "vkGetImageMemoryRequirements2KHR" ) );
if ( !vkGetImageMemoryRequirements2 )
vkGetImageMemoryRequirements2 = vkGetImageMemoryRequirements2KHR;
vkGetBufferMemoryRequirements2KHR = PFN_vkGetBufferMemoryRequirements2KHR( vkGetDeviceProcAddr( device, "vkGetBufferMemoryRequirements2KHR" ) );
if ( !vkGetBufferMemoryRequirements2 )
vkGetBufferMemoryRequirements2 = vkGetBufferMemoryRequirements2KHR;
vkGetImageSparseMemoryRequirements2KHR =
PFN_vkGetImageSparseMemoryRequirements2KHR( vkGetDeviceProcAddr( device, "vkGetImageSparseMemoryRequirements2KHR" ) );
if ( !vkGetImageSparseMemoryRequirements2 )
vkGetImageSparseMemoryRequirements2 = vkGetImageSparseMemoryRequirements2KHR;
//=== VK_KHR_acceleration_structure ===
vkCreateAccelerationStructureKHR = PFN_vkCreateAccelerationStructureKHR( vkGetDeviceProcAddr( device, "vkCreateAccelerationStructureKHR" ) );
vkDestroyAccelerationStructureKHR = PFN_vkDestroyAccelerationStructureKHR( vkGetDeviceProcAddr( device, "vkDestroyAccelerationStructureKHR" ) );
vkCmdBuildAccelerationStructuresKHR = PFN_vkCmdBuildAccelerationStructuresKHR( vkGetDeviceProcAddr( device, "vkCmdBuildAccelerationStructuresKHR" ) );
vkCmdBuildAccelerationStructuresIndirectKHR =
PFN_vkCmdBuildAccelerationStructuresIndirectKHR( vkGetDeviceProcAddr( device, "vkCmdBuildAccelerationStructuresIndirectKHR" ) );
vkBuildAccelerationStructuresKHR = PFN_vkBuildAccelerationStructuresKHR( vkGetDeviceProcAddr( device, "vkBuildAccelerationStructuresKHR" ) );
vkCopyAccelerationStructureKHR = PFN_vkCopyAccelerationStructureKHR( vkGetDeviceProcAddr( device, "vkCopyAccelerationStructureKHR" ) );
vkCopyAccelerationStructureToMemoryKHR =
PFN_vkCopyAccelerationStructureToMemoryKHR( vkGetDeviceProcAddr( device, "vkCopyAccelerationStructureToMemoryKHR" ) );
vkCopyMemoryToAccelerationStructureKHR =
PFN_vkCopyMemoryToAccelerationStructureKHR( vkGetDeviceProcAddr( device, "vkCopyMemoryToAccelerationStructureKHR" ) );
vkWriteAccelerationStructuresPropertiesKHR =
PFN_vkWriteAccelerationStructuresPropertiesKHR( vkGetDeviceProcAddr( device, "vkWriteAccelerationStructuresPropertiesKHR" ) );
vkCmdCopyAccelerationStructureKHR = PFN_vkCmdCopyAccelerationStructureKHR( vkGetDeviceProcAddr( device, "vkCmdCopyAccelerationStructureKHR" ) );
vkCmdCopyAccelerationStructureToMemoryKHR =
PFN_vkCmdCopyAccelerationStructureToMemoryKHR( vkGetDeviceProcAddr( device, "vkCmdCopyAccelerationStructureToMemoryKHR" ) );
vkCmdCopyMemoryToAccelerationStructureKHR =
PFN_vkCmdCopyMemoryToAccelerationStructureKHR( vkGetDeviceProcAddr( device, "vkCmdCopyMemoryToAccelerationStructureKHR" ) );
vkGetAccelerationStructureDeviceAddressKHR =
PFN_vkGetAccelerationStructureDeviceAddressKHR( vkGetDeviceProcAddr( device, "vkGetAccelerationStructureDeviceAddressKHR" ) );
vkCmdWriteAccelerationStructuresPropertiesKHR =
PFN_vkCmdWriteAccelerationStructuresPropertiesKHR( vkGetDeviceProcAddr( device, "vkCmdWriteAccelerationStructuresPropertiesKHR" ) );
vkGetDeviceAccelerationStructureCompatibilityKHR =
PFN_vkGetDeviceAccelerationStructureCompatibilityKHR( vkGetDeviceProcAddr( device, "vkGetDeviceAccelerationStructureCompatibilityKHR" ) );
vkGetAccelerationStructureBuildSizesKHR =
PFN_vkGetAccelerationStructureBuildSizesKHR( vkGetDeviceProcAddr( device, "vkGetAccelerationStructureBuildSizesKHR" ) );
//=== VK_KHR_ray_tracing_pipeline ===
vkCmdTraceRaysKHR = PFN_vkCmdTraceRaysKHR( vkGetDeviceProcAddr( device, "vkCmdTraceRaysKHR" ) );
vkCreateRayTracingPipelinesKHR = PFN_vkCreateRayTracingPipelinesKHR( vkGetDeviceProcAddr( device, "vkCreateRayTracingPipelinesKHR" ) );
vkGetRayTracingShaderGroupHandlesKHR =
PFN_vkGetRayTracingShaderGroupHandlesKHR( vkGetDeviceProcAddr( device, "vkGetRayTracingShaderGroupHandlesKHR" ) );
vkGetRayTracingCaptureReplayShaderGroupHandlesKHR =
PFN_vkGetRayTracingCaptureReplayShaderGroupHandlesKHR( vkGetDeviceProcAddr( device, "vkGetRayTracingCaptureReplayShaderGroupHandlesKHR" ) );
vkCmdTraceRaysIndirectKHR = PFN_vkCmdTraceRaysIndirectKHR( vkGetDeviceProcAddr( device, "vkCmdTraceRaysIndirectKHR" ) );
vkGetRayTracingShaderGroupStackSizeKHR =
PFN_vkGetRayTracingShaderGroupStackSizeKHR( vkGetDeviceProcAddr( device, "vkGetRayTracingShaderGroupStackSizeKHR" ) );
vkCmdSetRayTracingPipelineStackSizeKHR =
PFN_vkCmdSetRayTracingPipelineStackSizeKHR( vkGetDeviceProcAddr( device, "vkCmdSetRayTracingPipelineStackSizeKHR" ) );
//=== VK_KHR_sampler_ycbcr_conversion ===
vkCreateSamplerYcbcrConversionKHR = PFN_vkCreateSamplerYcbcrConversionKHR( vkGetDeviceProcAddr( device, "vkCreateSamplerYcbcrConversionKHR" ) );
if ( !vkCreateSamplerYcbcrConversion )
vkCreateSamplerYcbcrConversion = vkCreateSamplerYcbcrConversionKHR;
vkDestroySamplerYcbcrConversionKHR = PFN_vkDestroySamplerYcbcrConversionKHR( vkGetDeviceProcAddr( device, "vkDestroySamplerYcbcrConversionKHR" ) );
if ( !vkDestroySamplerYcbcrConversion )
vkDestroySamplerYcbcrConversion = vkDestroySamplerYcbcrConversionKHR;
//=== VK_KHR_bind_memory2 ===
vkBindBufferMemory2KHR = PFN_vkBindBufferMemory2KHR( vkGetDeviceProcAddr( device, "vkBindBufferMemory2KHR" ) );
if ( !vkBindBufferMemory2 )
vkBindBufferMemory2 = vkBindBufferMemory2KHR;
vkBindImageMemory2KHR = PFN_vkBindImageMemory2KHR( vkGetDeviceProcAddr( device, "vkBindImageMemory2KHR" ) );
if ( !vkBindImageMemory2 )
vkBindImageMemory2 = vkBindImageMemory2KHR;
//=== VK_EXT_image_drm_format_modifier ===
vkGetImageDrmFormatModifierPropertiesEXT =
PFN_vkGetImageDrmFormatModifierPropertiesEXT( vkGetDeviceProcAddr( device, "vkGetImageDrmFormatModifierPropertiesEXT" ) );
//=== VK_EXT_validation_cache ===
vkCreateValidationCacheEXT = PFN_vkCreateValidationCacheEXT( vkGetDeviceProcAddr( device, "vkCreateValidationCacheEXT" ) );
vkDestroyValidationCacheEXT = PFN_vkDestroyValidationCacheEXT( vkGetDeviceProcAddr( device, "vkDestroyValidationCacheEXT" ) );
vkMergeValidationCachesEXT = PFN_vkMergeValidationCachesEXT( vkGetDeviceProcAddr( device, "vkMergeValidationCachesEXT" ) );
vkGetValidationCacheDataEXT = PFN_vkGetValidationCacheDataEXT( vkGetDeviceProcAddr( device, "vkGetValidationCacheDataEXT" ) );
//=== VK_NV_shading_rate_image ===
vkCmdBindShadingRateImageNV = PFN_vkCmdBindShadingRateImageNV( vkGetDeviceProcAddr( device, "vkCmdBindShadingRateImageNV" ) );
vkCmdSetViewportShadingRatePaletteNV =
PFN_vkCmdSetViewportShadingRatePaletteNV( vkGetDeviceProcAddr( device, "vkCmdSetViewportShadingRatePaletteNV" ) );
vkCmdSetCoarseSampleOrderNV = PFN_vkCmdSetCoarseSampleOrderNV( vkGetDeviceProcAddr( device, "vkCmdSetCoarseSampleOrderNV" ) );
//=== VK_NV_ray_tracing ===
vkCreateAccelerationStructureNV = PFN_vkCreateAccelerationStructureNV( vkGetDeviceProcAddr( device, "vkCreateAccelerationStructureNV" ) );
vkDestroyAccelerationStructureNV = PFN_vkDestroyAccelerationStructureNV( vkGetDeviceProcAddr( device, "vkDestroyAccelerationStructureNV" ) );
vkGetAccelerationStructureMemoryRequirementsNV =
PFN_vkGetAccelerationStructureMemoryRequirementsNV( vkGetDeviceProcAddr( device, "vkGetAccelerationStructureMemoryRequirementsNV" ) );
vkBindAccelerationStructureMemoryNV = PFN_vkBindAccelerationStructureMemoryNV( vkGetDeviceProcAddr( device, "vkBindAccelerationStructureMemoryNV" ) );
vkCmdBuildAccelerationStructureNV = PFN_vkCmdBuildAccelerationStructureNV( vkGetDeviceProcAddr( device, "vkCmdBuildAccelerationStructureNV" ) );
vkCmdCopyAccelerationStructureNV = PFN_vkCmdCopyAccelerationStructureNV( vkGetDeviceProcAddr( device, "vkCmdCopyAccelerationStructureNV" ) );
vkCmdTraceRaysNV = PFN_vkCmdTraceRaysNV( vkGetDeviceProcAddr( device, "vkCmdTraceRaysNV" ) );
vkCreateRayTracingPipelinesNV = PFN_vkCreateRayTracingPipelinesNV( vkGetDeviceProcAddr( device, "vkCreateRayTracingPipelinesNV" ) );
vkGetRayTracingShaderGroupHandlesNV = PFN_vkGetRayTracingShaderGroupHandlesNV( vkGetDeviceProcAddr( device, "vkGetRayTracingShaderGroupHandlesNV" ) );
if ( !vkGetRayTracingShaderGroupHandlesKHR )
vkGetRayTracingShaderGroupHandlesKHR = vkGetRayTracingShaderGroupHandlesNV;
vkGetAccelerationStructureHandleNV = PFN_vkGetAccelerationStructureHandleNV( vkGetDeviceProcAddr( device, "vkGetAccelerationStructureHandleNV" ) );
vkCmdWriteAccelerationStructuresPropertiesNV =
PFN_vkCmdWriteAccelerationStructuresPropertiesNV( vkGetDeviceProcAddr( device, "vkCmdWriteAccelerationStructuresPropertiesNV" ) );
vkCompileDeferredNV = PFN_vkCompileDeferredNV( vkGetDeviceProcAddr( device, "vkCompileDeferredNV" ) );
//=== VK_KHR_maintenance3 ===
vkGetDescriptorSetLayoutSupportKHR = PFN_vkGetDescriptorSetLayoutSupportKHR( vkGetDeviceProcAddr( device, "vkGetDescriptorSetLayoutSupportKHR" ) );
if ( !vkGetDescriptorSetLayoutSupport )
vkGetDescriptorSetLayoutSupport = vkGetDescriptorSetLayoutSupportKHR;
//=== VK_KHR_draw_indirect_count ===
vkCmdDrawIndirectCountKHR = PFN_vkCmdDrawIndirectCountKHR( vkGetDeviceProcAddr( device, "vkCmdDrawIndirectCountKHR" ) );
if ( !vkCmdDrawIndirectCount )
vkCmdDrawIndirectCount = vkCmdDrawIndirectCountKHR;
vkCmdDrawIndexedIndirectCountKHR = PFN_vkCmdDrawIndexedIndirectCountKHR( vkGetDeviceProcAddr( device, "vkCmdDrawIndexedIndirectCountKHR" ) );
if ( !vkCmdDrawIndexedIndirectCount )
vkCmdDrawIndexedIndirectCount = vkCmdDrawIndexedIndirectCountKHR;
//=== VK_EXT_external_memory_host ===
vkGetMemoryHostPointerPropertiesEXT = PFN_vkGetMemoryHostPointerPropertiesEXT( vkGetDeviceProcAddr( device, "vkGetMemoryHostPointerPropertiesEXT" ) );
//=== VK_AMD_buffer_marker ===
vkCmdWriteBufferMarkerAMD = PFN_vkCmdWriteBufferMarkerAMD( vkGetDeviceProcAddr( device, "vkCmdWriteBufferMarkerAMD" ) );
vkCmdWriteBufferMarker2AMD = PFN_vkCmdWriteBufferMarker2AMD( vkGetDeviceProcAddr( device, "vkCmdWriteBufferMarker2AMD" ) );
//=== VK_EXT_calibrated_timestamps ===
vkGetCalibratedTimestampsEXT = PFN_vkGetCalibratedTimestampsEXT( vkGetDeviceProcAddr( device, "vkGetCalibratedTimestampsEXT" ) );
if ( !vkGetCalibratedTimestampsKHR )
vkGetCalibratedTimestampsKHR = vkGetCalibratedTimestampsEXT;
//=== VK_NV_mesh_shader ===
vkCmdDrawMeshTasksNV = PFN_vkCmdDrawMeshTasksNV( vkGetDeviceProcAddr( device, "vkCmdDrawMeshTasksNV" ) );
vkCmdDrawMeshTasksIndirectNV = PFN_vkCmdDrawMeshTasksIndirectNV( vkGetDeviceProcAddr( device, "vkCmdDrawMeshTasksIndirectNV" ) );
vkCmdDrawMeshTasksIndirectCountNV = PFN_vkCmdDrawMeshTasksIndirectCountNV( vkGetDeviceProcAddr( device, "vkCmdDrawMeshTasksIndirectCountNV" ) );
//=== VK_NV_scissor_exclusive ===
vkCmdSetExclusiveScissorEnableNV = PFN_vkCmdSetExclusiveScissorEnableNV( vkGetDeviceProcAddr( device, "vkCmdSetExclusiveScissorEnableNV" ) );
vkCmdSetExclusiveScissorNV = PFN_vkCmdSetExclusiveScissorNV( vkGetDeviceProcAddr( device, "vkCmdSetExclusiveScissorNV" ) );
//=== VK_NV_device_diagnostic_checkpoints ===
vkCmdSetCheckpointNV = PFN_vkCmdSetCheckpointNV( vkGetDeviceProcAddr( device, "vkCmdSetCheckpointNV" ) );
vkGetQueueCheckpointDataNV = PFN_vkGetQueueCheckpointDataNV( vkGetDeviceProcAddr( device, "vkGetQueueCheckpointDataNV" ) );
vkGetQueueCheckpointData2NV = PFN_vkGetQueueCheckpointData2NV( vkGetDeviceProcAddr( device, "vkGetQueueCheckpointData2NV" ) );
//=== VK_KHR_timeline_semaphore ===
vkGetSemaphoreCounterValueKHR = PFN_vkGetSemaphoreCounterValueKHR( vkGetDeviceProcAddr( device, "vkGetSemaphoreCounterValueKHR" ) );
if ( !vkGetSemaphoreCounterValue )
vkGetSemaphoreCounterValue = vkGetSemaphoreCounterValueKHR;
vkWaitSemaphoresKHR = PFN_vkWaitSemaphoresKHR( vkGetDeviceProcAddr( device, "vkWaitSemaphoresKHR" ) );
if ( !vkWaitSemaphores )
vkWaitSemaphores = vkWaitSemaphoresKHR;
vkSignalSemaphoreKHR = PFN_vkSignalSemaphoreKHR( vkGetDeviceProcAddr( device, "vkSignalSemaphoreKHR" ) );
if ( !vkSignalSemaphore )
vkSignalSemaphore = vkSignalSemaphoreKHR;
//=== VK_INTEL_performance_query ===
vkInitializePerformanceApiINTEL = PFN_vkInitializePerformanceApiINTEL( vkGetDeviceProcAddr( device, "vkInitializePerformanceApiINTEL" ) );
vkUninitializePerformanceApiINTEL = PFN_vkUninitializePerformanceApiINTEL( vkGetDeviceProcAddr( device, "vkUninitializePerformanceApiINTEL" ) );
vkCmdSetPerformanceMarkerINTEL = PFN_vkCmdSetPerformanceMarkerINTEL( vkGetDeviceProcAddr( device, "vkCmdSetPerformanceMarkerINTEL" ) );
vkCmdSetPerformanceStreamMarkerINTEL =
PFN_vkCmdSetPerformanceStreamMarkerINTEL( vkGetDeviceProcAddr( device, "vkCmdSetPerformanceStreamMarkerINTEL" ) );
vkCmdSetPerformanceOverrideINTEL = PFN_vkCmdSetPerformanceOverrideINTEL( vkGetDeviceProcAddr( device, "vkCmdSetPerformanceOverrideINTEL" ) );
vkAcquirePerformanceConfigurationINTEL =
PFN_vkAcquirePerformanceConfigurationINTEL( vkGetDeviceProcAddr( device, "vkAcquirePerformanceConfigurationINTEL" ) );
vkReleasePerformanceConfigurationINTEL =
PFN_vkReleasePerformanceConfigurationINTEL( vkGetDeviceProcAddr( device, "vkReleasePerformanceConfigurationINTEL" ) );
vkQueueSetPerformanceConfigurationINTEL =
PFN_vkQueueSetPerformanceConfigurationINTEL( vkGetDeviceProcAddr( device, "vkQueueSetPerformanceConfigurationINTEL" ) );
vkGetPerformanceParameterINTEL = PFN_vkGetPerformanceParameterINTEL( vkGetDeviceProcAddr( device, "vkGetPerformanceParameterINTEL" ) );
//=== VK_AMD_display_native_hdr ===
vkSetLocalDimmingAMD = PFN_vkSetLocalDimmingAMD( vkGetDeviceProcAddr( device, "vkSetLocalDimmingAMD" ) );
//=== VK_KHR_fragment_shading_rate ===
vkCmdSetFragmentShadingRateKHR = PFN_vkCmdSetFragmentShadingRateKHR( vkGetDeviceProcAddr( device, "vkCmdSetFragmentShadingRateKHR" ) );
//=== VK_KHR_dynamic_rendering_local_read ===
vkCmdSetRenderingAttachmentLocationsKHR =
PFN_vkCmdSetRenderingAttachmentLocationsKHR( vkGetDeviceProcAddr( device, "vkCmdSetRenderingAttachmentLocationsKHR" ) );
vkCmdSetRenderingInputAttachmentIndicesKHR =
PFN_vkCmdSetRenderingInputAttachmentIndicesKHR( vkGetDeviceProcAddr( device, "vkCmdSetRenderingInputAttachmentIndicesKHR" ) );
//=== VK_EXT_buffer_device_address ===
vkGetBufferDeviceAddressEXT = PFN_vkGetBufferDeviceAddressEXT( vkGetDeviceProcAddr( device, "vkGetBufferDeviceAddressEXT" ) );
if ( !vkGetBufferDeviceAddress )
vkGetBufferDeviceAddress = vkGetBufferDeviceAddressEXT;
//=== VK_KHR_present_wait ===
vkWaitForPresentKHR = PFN_vkWaitForPresentKHR( vkGetDeviceProcAddr( device, "vkWaitForPresentKHR" ) );
# if defined( VK_USE_PLATFORM_WIN32_KHR )
//=== VK_EXT_full_screen_exclusive ===
vkAcquireFullScreenExclusiveModeEXT = PFN_vkAcquireFullScreenExclusiveModeEXT( vkGetDeviceProcAddr( device, "vkAcquireFullScreenExclusiveModeEXT" ) );
vkReleaseFullScreenExclusiveModeEXT = PFN_vkReleaseFullScreenExclusiveModeEXT( vkGetDeviceProcAddr( device, "vkReleaseFullScreenExclusiveModeEXT" ) );
vkGetDeviceGroupSurfacePresentModes2EXT =
PFN_vkGetDeviceGroupSurfacePresentModes2EXT( vkGetDeviceProcAddr( device, "vkGetDeviceGroupSurfacePresentModes2EXT" ) );
# endif /*VK_USE_PLATFORM_WIN32_KHR*/
//=== VK_KHR_buffer_device_address ===
vkGetBufferDeviceAddressKHR = PFN_vkGetBufferDeviceAddressKHR( vkGetDeviceProcAddr( device, "vkGetBufferDeviceAddressKHR" ) );
if ( !vkGetBufferDeviceAddress )
vkGetBufferDeviceAddress = vkGetBufferDeviceAddressKHR;
vkGetBufferOpaqueCaptureAddressKHR = PFN_vkGetBufferOpaqueCaptureAddressKHR( vkGetDeviceProcAddr( device, "vkGetBufferOpaqueCaptureAddressKHR" ) );
if ( !vkGetBufferOpaqueCaptureAddress )
vkGetBufferOpaqueCaptureAddress = vkGetBufferOpaqueCaptureAddressKHR;
vkGetDeviceMemoryOpaqueCaptureAddressKHR =
PFN_vkGetDeviceMemoryOpaqueCaptureAddressKHR( vkGetDeviceProcAddr( device, "vkGetDeviceMemoryOpaqueCaptureAddressKHR" ) );
if ( !vkGetDeviceMemoryOpaqueCaptureAddress )
vkGetDeviceMemoryOpaqueCaptureAddress = vkGetDeviceMemoryOpaqueCaptureAddressKHR;
//=== VK_EXT_line_rasterization ===
vkCmdSetLineStippleEXT = PFN_vkCmdSetLineStippleEXT( vkGetDeviceProcAddr( device, "vkCmdSetLineStippleEXT" ) );
if ( !vkCmdSetLineStippleKHR )
vkCmdSetLineStippleKHR = vkCmdSetLineStippleEXT;
//=== VK_EXT_host_query_reset ===
vkResetQueryPoolEXT = PFN_vkResetQueryPoolEXT( vkGetDeviceProcAddr( device, "vkResetQueryPoolEXT" ) );
if ( !vkResetQueryPool )
vkResetQueryPool = vkResetQueryPoolEXT;
//=== VK_EXT_extended_dynamic_state ===
vkCmdSetCullModeEXT = PFN_vkCmdSetCullModeEXT( vkGetDeviceProcAddr( device, "vkCmdSetCullModeEXT" ) );
if ( !vkCmdSetCullMode )
vkCmdSetCullMode = vkCmdSetCullModeEXT;
vkCmdSetFrontFaceEXT = PFN_vkCmdSetFrontFaceEXT( vkGetDeviceProcAddr( device, "vkCmdSetFrontFaceEXT" ) );
if ( !vkCmdSetFrontFace )
vkCmdSetFrontFace = vkCmdSetFrontFaceEXT;
vkCmdSetPrimitiveTopologyEXT = PFN_vkCmdSetPrimitiveTopologyEXT( vkGetDeviceProcAddr( device, "vkCmdSetPrimitiveTopologyEXT" ) );
if ( !vkCmdSetPrimitiveTopology )
vkCmdSetPrimitiveTopology = vkCmdSetPrimitiveTopologyEXT;
vkCmdSetViewportWithCountEXT = PFN_vkCmdSetViewportWithCountEXT( vkGetDeviceProcAddr( device, "vkCmdSetViewportWithCountEXT" ) );
if ( !vkCmdSetViewportWithCount )
vkCmdSetViewportWithCount = vkCmdSetViewportWithCountEXT;
vkCmdSetScissorWithCountEXT = PFN_vkCmdSetScissorWithCountEXT( vkGetDeviceProcAddr( device, "vkCmdSetScissorWithCountEXT" ) );
if ( !vkCmdSetScissorWithCount )
vkCmdSetScissorWithCount = vkCmdSetScissorWithCountEXT;
vkCmdBindVertexBuffers2EXT = PFN_vkCmdBindVertexBuffers2EXT( vkGetDeviceProcAddr( device, "vkCmdBindVertexBuffers2EXT" ) );
if ( !vkCmdBindVertexBuffers2 )
vkCmdBindVertexBuffers2 = vkCmdBindVertexBuffers2EXT;
vkCmdSetDepthTestEnableEXT = PFN_vkCmdSetDepthTestEnableEXT( vkGetDeviceProcAddr( device, "vkCmdSetDepthTestEnableEXT" ) );
if ( !vkCmdSetDepthTestEnable )
vkCmdSetDepthTestEnable = vkCmdSetDepthTestEnableEXT;
vkCmdSetDepthWriteEnableEXT = PFN_vkCmdSetDepthWriteEnableEXT( vkGetDeviceProcAddr( device, "vkCmdSetDepthWriteEnableEXT" ) );
if ( !vkCmdSetDepthWriteEnable )
vkCmdSetDepthWriteEnable = vkCmdSetDepthWriteEnableEXT;
vkCmdSetDepthCompareOpEXT = PFN_vkCmdSetDepthCompareOpEXT( vkGetDeviceProcAddr( device, "vkCmdSetDepthCompareOpEXT" ) );
if ( !vkCmdSetDepthCompareOp )
vkCmdSetDepthCompareOp = vkCmdSetDepthCompareOpEXT;
vkCmdSetDepthBoundsTestEnableEXT = PFN_vkCmdSetDepthBoundsTestEnableEXT( vkGetDeviceProcAddr( device, "vkCmdSetDepthBoundsTestEnableEXT" ) );
if ( !vkCmdSetDepthBoundsTestEnable )
vkCmdSetDepthBoundsTestEnable = vkCmdSetDepthBoundsTestEnableEXT;
vkCmdSetStencilTestEnableEXT = PFN_vkCmdSetStencilTestEnableEXT( vkGetDeviceProcAddr( device, "vkCmdSetStencilTestEnableEXT" ) );
if ( !vkCmdSetStencilTestEnable )
vkCmdSetStencilTestEnable = vkCmdSetStencilTestEnableEXT;
vkCmdSetStencilOpEXT = PFN_vkCmdSetStencilOpEXT( vkGetDeviceProcAddr( device, "vkCmdSetStencilOpEXT" ) );
if ( !vkCmdSetStencilOp )
vkCmdSetStencilOp = vkCmdSetStencilOpEXT;
//=== VK_KHR_deferred_host_operations ===
vkCreateDeferredOperationKHR = PFN_vkCreateDeferredOperationKHR( vkGetDeviceProcAddr( device, "vkCreateDeferredOperationKHR" ) );
vkDestroyDeferredOperationKHR = PFN_vkDestroyDeferredOperationKHR( vkGetDeviceProcAddr( device, "vkDestroyDeferredOperationKHR" ) );
vkGetDeferredOperationMaxConcurrencyKHR =
PFN_vkGetDeferredOperationMaxConcurrencyKHR( vkGetDeviceProcAddr( device, "vkGetDeferredOperationMaxConcurrencyKHR" ) );
vkGetDeferredOperationResultKHR = PFN_vkGetDeferredOperationResultKHR( vkGetDeviceProcAddr( device, "vkGetDeferredOperationResultKHR" ) );
vkDeferredOperationJoinKHR = PFN_vkDeferredOperationJoinKHR( vkGetDeviceProcAddr( device, "vkDeferredOperationJoinKHR" ) );
//=== VK_KHR_pipeline_executable_properties ===
vkGetPipelineExecutablePropertiesKHR =
PFN_vkGetPipelineExecutablePropertiesKHR( vkGetDeviceProcAddr( device, "vkGetPipelineExecutablePropertiesKHR" ) );
vkGetPipelineExecutableStatisticsKHR =
PFN_vkGetPipelineExecutableStatisticsKHR( vkGetDeviceProcAddr( device, "vkGetPipelineExecutableStatisticsKHR" ) );
vkGetPipelineExecutableInternalRepresentationsKHR =
PFN_vkGetPipelineExecutableInternalRepresentationsKHR( vkGetDeviceProcAddr( device, "vkGetPipelineExecutableInternalRepresentationsKHR" ) );
//=== VK_EXT_host_image_copy ===
vkCopyMemoryToImageEXT = PFN_vkCopyMemoryToImageEXT( vkGetDeviceProcAddr( device, "vkCopyMemoryToImageEXT" ) );
vkCopyImageToMemoryEXT = PFN_vkCopyImageToMemoryEXT( vkGetDeviceProcAddr( device, "vkCopyImageToMemoryEXT" ) );
vkCopyImageToImageEXT = PFN_vkCopyImageToImageEXT( vkGetDeviceProcAddr( device, "vkCopyImageToImageEXT" ) );
vkTransitionImageLayoutEXT = PFN_vkTransitionImageLayoutEXT( vkGetDeviceProcAddr( device, "vkTransitionImageLayoutEXT" ) );
vkGetImageSubresourceLayout2EXT = PFN_vkGetImageSubresourceLayout2EXT( vkGetDeviceProcAddr( device, "vkGetImageSubresourceLayout2EXT" ) );
if ( !vkGetImageSubresourceLayout2KHR )
vkGetImageSubresourceLayout2KHR = vkGetImageSubresourceLayout2EXT;
//=== VK_KHR_map_memory2 ===
vkMapMemory2KHR = PFN_vkMapMemory2KHR( vkGetDeviceProcAddr( device, "vkMapMemory2KHR" ) );
vkUnmapMemory2KHR = PFN_vkUnmapMemory2KHR( vkGetDeviceProcAddr( device, "vkUnmapMemory2KHR" ) );
//=== VK_EXT_swapchain_maintenance1 ===
vkReleaseSwapchainImagesEXT = PFN_vkReleaseSwapchainImagesEXT( vkGetDeviceProcAddr( device, "vkReleaseSwapchainImagesEXT" ) );
//=== VK_NV_device_generated_commands ===
vkGetGeneratedCommandsMemoryRequirementsNV =
PFN_vkGetGeneratedCommandsMemoryRequirementsNV( vkGetDeviceProcAddr( device, "vkGetGeneratedCommandsMemoryRequirementsNV" ) );
vkCmdPreprocessGeneratedCommandsNV = PFN_vkCmdPreprocessGeneratedCommandsNV( vkGetDeviceProcAddr( device, "vkCmdPreprocessGeneratedCommandsNV" ) );
vkCmdExecuteGeneratedCommandsNV = PFN_vkCmdExecuteGeneratedCommandsNV( vkGetDeviceProcAddr( device, "vkCmdExecuteGeneratedCommandsNV" ) );
vkCmdBindPipelineShaderGroupNV = PFN_vkCmdBindPipelineShaderGroupNV( vkGetDeviceProcAddr( device, "vkCmdBindPipelineShaderGroupNV" ) );
vkCreateIndirectCommandsLayoutNV = PFN_vkCreateIndirectCommandsLayoutNV( vkGetDeviceProcAddr( device, "vkCreateIndirectCommandsLayoutNV" ) );
vkDestroyIndirectCommandsLayoutNV = PFN_vkDestroyIndirectCommandsLayoutNV( vkGetDeviceProcAddr( device, "vkDestroyIndirectCommandsLayoutNV" ) );
//=== VK_EXT_depth_bias_control ===
vkCmdSetDepthBias2EXT = PFN_vkCmdSetDepthBias2EXT( vkGetDeviceProcAddr( device, "vkCmdSetDepthBias2EXT" ) );
//=== VK_EXT_private_data ===
vkCreatePrivateDataSlotEXT = PFN_vkCreatePrivateDataSlotEXT( vkGetDeviceProcAddr( device, "vkCreatePrivateDataSlotEXT" ) );
if ( !vkCreatePrivateDataSlot )
vkCreatePrivateDataSlot = vkCreatePrivateDataSlotEXT;
vkDestroyPrivateDataSlotEXT = PFN_vkDestroyPrivateDataSlotEXT( vkGetDeviceProcAddr( device, "vkDestroyPrivateDataSlotEXT" ) );
if ( !vkDestroyPrivateDataSlot )
vkDestroyPrivateDataSlot = vkDestroyPrivateDataSlotEXT;
vkSetPrivateDataEXT = PFN_vkSetPrivateDataEXT( vkGetDeviceProcAddr( device, "vkSetPrivateDataEXT" ) );
if ( !vkSetPrivateData )
vkSetPrivateData = vkSetPrivateDataEXT;
vkGetPrivateDataEXT = PFN_vkGetPrivateDataEXT( vkGetDeviceProcAddr( device, "vkGetPrivateDataEXT" ) );
if ( !vkGetPrivateData )
vkGetPrivateData = vkGetPrivateDataEXT;
//=== VK_KHR_video_encode_queue ===
vkGetEncodedVideoSessionParametersKHR =
PFN_vkGetEncodedVideoSessionParametersKHR( vkGetDeviceProcAddr( device, "vkGetEncodedVideoSessionParametersKHR" ) );
vkCmdEncodeVideoKHR = PFN_vkCmdEncodeVideoKHR( vkGetDeviceProcAddr( device, "vkCmdEncodeVideoKHR" ) );
# if defined( VK_ENABLE_BETA_EXTENSIONS )
//=== VK_NV_cuda_kernel_launch ===
vkCreateCudaModuleNV = PFN_vkCreateCudaModuleNV( vkGetDeviceProcAddr( device, "vkCreateCudaModuleNV" ) );
vkGetCudaModuleCacheNV = PFN_vkGetCudaModuleCacheNV( vkGetDeviceProcAddr( device, "vkGetCudaModuleCacheNV" ) );
vkCreateCudaFunctionNV = PFN_vkCreateCudaFunctionNV( vkGetDeviceProcAddr( device, "vkCreateCudaFunctionNV" ) );
vkDestroyCudaModuleNV = PFN_vkDestroyCudaModuleNV( vkGetDeviceProcAddr( device, "vkDestroyCudaModuleNV" ) );
vkDestroyCudaFunctionNV = PFN_vkDestroyCudaFunctionNV( vkGetDeviceProcAddr( device, "vkDestroyCudaFunctionNV" ) );
vkCmdCudaLaunchKernelNV = PFN_vkCmdCudaLaunchKernelNV( vkGetDeviceProcAddr( device, "vkCmdCudaLaunchKernelNV" ) );
# endif /*VK_ENABLE_BETA_EXTENSIONS*/
# if defined( VK_USE_PLATFORM_METAL_EXT )
//=== VK_EXT_metal_objects ===
vkExportMetalObjectsEXT = PFN_vkExportMetalObjectsEXT( vkGetDeviceProcAddr( device, "vkExportMetalObjectsEXT" ) );
# endif /*VK_USE_PLATFORM_METAL_EXT*/
//=== VK_KHR_synchronization2 ===
vkCmdSetEvent2KHR = PFN_vkCmdSetEvent2KHR( vkGetDeviceProcAddr( device, "vkCmdSetEvent2KHR" ) );
if ( !vkCmdSetEvent2 )
vkCmdSetEvent2 = vkCmdSetEvent2KHR;
vkCmdResetEvent2KHR = PFN_vkCmdResetEvent2KHR( vkGetDeviceProcAddr( device, "vkCmdResetEvent2KHR" ) );
if ( !vkCmdResetEvent2 )
vkCmdResetEvent2 = vkCmdResetEvent2KHR;
vkCmdWaitEvents2KHR = PFN_vkCmdWaitEvents2KHR( vkGetDeviceProcAddr( device, "vkCmdWaitEvents2KHR" ) );
if ( !vkCmdWaitEvents2 )
vkCmdWaitEvents2 = vkCmdWaitEvents2KHR;
vkCmdPipelineBarrier2KHR = PFN_vkCmdPipelineBarrier2KHR( vkGetDeviceProcAddr( device, "vkCmdPipelineBarrier2KHR" ) );
if ( !vkCmdPipelineBarrier2 )
vkCmdPipelineBarrier2 = vkCmdPipelineBarrier2KHR;
vkCmdWriteTimestamp2KHR = PFN_vkCmdWriteTimestamp2KHR( vkGetDeviceProcAddr( device, "vkCmdWriteTimestamp2KHR" ) );
if ( !vkCmdWriteTimestamp2 )
vkCmdWriteTimestamp2 = vkCmdWriteTimestamp2KHR;
vkQueueSubmit2KHR = PFN_vkQueueSubmit2KHR( vkGetDeviceProcAddr( device, "vkQueueSubmit2KHR" ) );
if ( !vkQueueSubmit2 )
vkQueueSubmit2 = vkQueueSubmit2KHR;
//=== VK_EXT_descriptor_buffer ===
vkGetDescriptorSetLayoutSizeEXT = PFN_vkGetDescriptorSetLayoutSizeEXT( vkGetDeviceProcAddr( device, "vkGetDescriptorSetLayoutSizeEXT" ) );
vkGetDescriptorSetLayoutBindingOffsetEXT =
PFN_vkGetDescriptorSetLayoutBindingOffsetEXT( vkGetDeviceProcAddr( device, "vkGetDescriptorSetLayoutBindingOffsetEXT" ) );
vkGetDescriptorEXT = PFN_vkGetDescriptorEXT( vkGetDeviceProcAddr( device, "vkGetDescriptorEXT" ) );
vkCmdBindDescriptorBuffersEXT = PFN_vkCmdBindDescriptorBuffersEXT( vkGetDeviceProcAddr( device, "vkCmdBindDescriptorBuffersEXT" ) );
vkCmdSetDescriptorBufferOffsetsEXT = PFN_vkCmdSetDescriptorBufferOffsetsEXT( vkGetDeviceProcAddr( device, "vkCmdSetDescriptorBufferOffsetsEXT" ) );
vkCmdBindDescriptorBufferEmbeddedSamplersEXT =
PFN_vkCmdBindDescriptorBufferEmbeddedSamplersEXT( vkGetDeviceProcAddr( device, "vkCmdBindDescriptorBufferEmbeddedSamplersEXT" ) );
vkGetBufferOpaqueCaptureDescriptorDataEXT =
PFN_vkGetBufferOpaqueCaptureDescriptorDataEXT( vkGetDeviceProcAddr( device, "vkGetBufferOpaqueCaptureDescriptorDataEXT" ) );
vkGetImageOpaqueCaptureDescriptorDataEXT =
PFN_vkGetImageOpaqueCaptureDescriptorDataEXT( vkGetDeviceProcAddr( device, "vkGetImageOpaqueCaptureDescriptorDataEXT" ) );
vkGetImageViewOpaqueCaptureDescriptorDataEXT =
PFN_vkGetImageViewOpaqueCaptureDescriptorDataEXT( vkGetDeviceProcAddr( device, "vkGetImageViewOpaqueCaptureDescriptorDataEXT" ) );
vkGetSamplerOpaqueCaptureDescriptorDataEXT =
PFN_vkGetSamplerOpaqueCaptureDescriptorDataEXT( vkGetDeviceProcAddr( device, "vkGetSamplerOpaqueCaptureDescriptorDataEXT" ) );
vkGetAccelerationStructureOpaqueCaptureDescriptorDataEXT = PFN_vkGetAccelerationStructureOpaqueCaptureDescriptorDataEXT(
vkGetDeviceProcAddr( device, "vkGetAccelerationStructureOpaqueCaptureDescriptorDataEXT" ) );
//=== VK_NV_fragment_shading_rate_enums ===
vkCmdSetFragmentShadingRateEnumNV = PFN_vkCmdSetFragmentShadingRateEnumNV( vkGetDeviceProcAddr( device, "vkCmdSetFragmentShadingRateEnumNV" ) );
//=== VK_EXT_mesh_shader ===
vkCmdDrawMeshTasksEXT = PFN_vkCmdDrawMeshTasksEXT( vkGetDeviceProcAddr( device, "vkCmdDrawMeshTasksEXT" ) );
vkCmdDrawMeshTasksIndirectEXT = PFN_vkCmdDrawMeshTasksIndirectEXT( vkGetDeviceProcAddr( device, "vkCmdDrawMeshTasksIndirectEXT" ) );
vkCmdDrawMeshTasksIndirectCountEXT = PFN_vkCmdDrawMeshTasksIndirectCountEXT( vkGetDeviceProcAddr( device, "vkCmdDrawMeshTasksIndirectCountEXT" ) );
//=== VK_KHR_copy_commands2 ===
vkCmdCopyBuffer2KHR = PFN_vkCmdCopyBuffer2KHR( vkGetDeviceProcAddr( device, "vkCmdCopyBuffer2KHR" ) );
if ( !vkCmdCopyBuffer2 )
vkCmdCopyBuffer2 = vkCmdCopyBuffer2KHR;
vkCmdCopyImage2KHR = PFN_vkCmdCopyImage2KHR( vkGetDeviceProcAddr( device, "vkCmdCopyImage2KHR" ) );
if ( !vkCmdCopyImage2 )
vkCmdCopyImage2 = vkCmdCopyImage2KHR;
vkCmdCopyBufferToImage2KHR = PFN_vkCmdCopyBufferToImage2KHR( vkGetDeviceProcAddr( device, "vkCmdCopyBufferToImage2KHR" ) );
if ( !vkCmdCopyBufferToImage2 )
vkCmdCopyBufferToImage2 = vkCmdCopyBufferToImage2KHR;
vkCmdCopyImageToBuffer2KHR = PFN_vkCmdCopyImageToBuffer2KHR( vkGetDeviceProcAddr( device, "vkCmdCopyImageToBuffer2KHR" ) );
if ( !vkCmdCopyImageToBuffer2 )
vkCmdCopyImageToBuffer2 = vkCmdCopyImageToBuffer2KHR;
vkCmdBlitImage2KHR = PFN_vkCmdBlitImage2KHR( vkGetDeviceProcAddr( device, "vkCmdBlitImage2KHR" ) );
if ( !vkCmdBlitImage2 )
vkCmdBlitImage2 = vkCmdBlitImage2KHR;
vkCmdResolveImage2KHR = PFN_vkCmdResolveImage2KHR( vkGetDeviceProcAddr( device, "vkCmdResolveImage2KHR" ) );
if ( !vkCmdResolveImage2 )
vkCmdResolveImage2 = vkCmdResolveImage2KHR;
//=== VK_EXT_device_fault ===
vkGetDeviceFaultInfoEXT = PFN_vkGetDeviceFaultInfoEXT( vkGetDeviceProcAddr( device, "vkGetDeviceFaultInfoEXT" ) );
//=== VK_EXT_vertex_input_dynamic_state ===
vkCmdSetVertexInputEXT = PFN_vkCmdSetVertexInputEXT( vkGetDeviceProcAddr( device, "vkCmdSetVertexInputEXT" ) );
# if defined( VK_USE_PLATFORM_FUCHSIA )
//=== VK_FUCHSIA_external_memory ===
vkGetMemoryZirconHandleFUCHSIA = PFN_vkGetMemoryZirconHandleFUCHSIA( vkGetDeviceProcAddr( device, "vkGetMemoryZirconHandleFUCHSIA" ) );
vkGetMemoryZirconHandlePropertiesFUCHSIA =
PFN_vkGetMemoryZirconHandlePropertiesFUCHSIA( vkGetDeviceProcAddr( device, "vkGetMemoryZirconHandlePropertiesFUCHSIA" ) );
# endif /*VK_USE_PLATFORM_FUCHSIA*/
# if defined( VK_USE_PLATFORM_FUCHSIA )
//=== VK_FUCHSIA_external_semaphore ===
vkImportSemaphoreZirconHandleFUCHSIA =
PFN_vkImportSemaphoreZirconHandleFUCHSIA( vkGetDeviceProcAddr( device, "vkImportSemaphoreZirconHandleFUCHSIA" ) );
vkGetSemaphoreZirconHandleFUCHSIA = PFN_vkGetSemaphoreZirconHandleFUCHSIA( vkGetDeviceProcAddr( device, "vkGetSemaphoreZirconHandleFUCHSIA" ) );
# endif /*VK_USE_PLATFORM_FUCHSIA*/
# if defined( VK_USE_PLATFORM_FUCHSIA )
//=== VK_FUCHSIA_buffer_collection ===
vkCreateBufferCollectionFUCHSIA = PFN_vkCreateBufferCollectionFUCHSIA( vkGetDeviceProcAddr( device, "vkCreateBufferCollectionFUCHSIA" ) );
vkSetBufferCollectionImageConstraintsFUCHSIA =
PFN_vkSetBufferCollectionImageConstraintsFUCHSIA( vkGetDeviceProcAddr( device, "vkSetBufferCollectionImageConstraintsFUCHSIA" ) );
vkSetBufferCollectionBufferConstraintsFUCHSIA =
PFN_vkSetBufferCollectionBufferConstraintsFUCHSIA( vkGetDeviceProcAddr( device, "vkSetBufferCollectionBufferConstraintsFUCHSIA" ) );
vkDestroyBufferCollectionFUCHSIA = PFN_vkDestroyBufferCollectionFUCHSIA( vkGetDeviceProcAddr( device, "vkDestroyBufferCollectionFUCHSIA" ) );
vkGetBufferCollectionPropertiesFUCHSIA =
PFN_vkGetBufferCollectionPropertiesFUCHSIA( vkGetDeviceProcAddr( device, "vkGetBufferCollectionPropertiesFUCHSIA" ) );
# endif /*VK_USE_PLATFORM_FUCHSIA*/
//=== VK_HUAWEI_subpass_shading ===
vkGetDeviceSubpassShadingMaxWorkgroupSizeHUAWEI =
PFN_vkGetDeviceSubpassShadingMaxWorkgroupSizeHUAWEI( vkGetDeviceProcAddr( device, "vkGetDeviceSubpassShadingMaxWorkgroupSizeHUAWEI" ) );
vkCmdSubpassShadingHUAWEI = PFN_vkCmdSubpassShadingHUAWEI( vkGetDeviceProcAddr( device, "vkCmdSubpassShadingHUAWEI" ) );
//=== VK_HUAWEI_invocation_mask ===
vkCmdBindInvocationMaskHUAWEI = PFN_vkCmdBindInvocationMaskHUAWEI( vkGetDeviceProcAddr( device, "vkCmdBindInvocationMaskHUAWEI" ) );
//=== VK_NV_external_memory_rdma ===
vkGetMemoryRemoteAddressNV = PFN_vkGetMemoryRemoteAddressNV( vkGetDeviceProcAddr( device, "vkGetMemoryRemoteAddressNV" ) );
//=== VK_EXT_pipeline_properties ===
vkGetPipelinePropertiesEXT = PFN_vkGetPipelinePropertiesEXT( vkGetDeviceProcAddr( device, "vkGetPipelinePropertiesEXT" ) );
//=== VK_EXT_extended_dynamic_state2 ===
vkCmdSetPatchControlPointsEXT = PFN_vkCmdSetPatchControlPointsEXT( vkGetDeviceProcAddr( device, "vkCmdSetPatchControlPointsEXT" ) );
vkCmdSetRasterizerDiscardEnableEXT = PFN_vkCmdSetRasterizerDiscardEnableEXT( vkGetDeviceProcAddr( device, "vkCmdSetRasterizerDiscardEnableEXT" ) );
if ( !vkCmdSetRasterizerDiscardEnable )
vkCmdSetRasterizerDiscardEnable = vkCmdSetRasterizerDiscardEnableEXT;
vkCmdSetDepthBiasEnableEXT = PFN_vkCmdSetDepthBiasEnableEXT( vkGetDeviceProcAddr( device, "vkCmdSetDepthBiasEnableEXT" ) );
if ( !vkCmdSetDepthBiasEnable )
vkCmdSetDepthBiasEnable = vkCmdSetDepthBiasEnableEXT;
vkCmdSetLogicOpEXT = PFN_vkCmdSetLogicOpEXT( vkGetDeviceProcAddr( device, "vkCmdSetLogicOpEXT" ) );
vkCmdSetPrimitiveRestartEnableEXT = PFN_vkCmdSetPrimitiveRestartEnableEXT( vkGetDeviceProcAddr( device, "vkCmdSetPrimitiveRestartEnableEXT" ) );
if ( !vkCmdSetPrimitiveRestartEnable )
vkCmdSetPrimitiveRestartEnable = vkCmdSetPrimitiveRestartEnableEXT;
//=== VK_EXT_color_write_enable ===
vkCmdSetColorWriteEnableEXT = PFN_vkCmdSetColorWriteEnableEXT( vkGetDeviceProcAddr( device, "vkCmdSetColorWriteEnableEXT" ) );
//=== VK_KHR_ray_tracing_maintenance1 ===
vkCmdTraceRaysIndirect2KHR = PFN_vkCmdTraceRaysIndirect2KHR( vkGetDeviceProcAddr( device, "vkCmdTraceRaysIndirect2KHR" ) );
//=== VK_EXT_multi_draw ===
vkCmdDrawMultiEXT = PFN_vkCmdDrawMultiEXT( vkGetDeviceProcAddr( device, "vkCmdDrawMultiEXT" ) );
vkCmdDrawMultiIndexedEXT = PFN_vkCmdDrawMultiIndexedEXT( vkGetDeviceProcAddr( device, "vkCmdDrawMultiIndexedEXT" ) );
//=== VK_EXT_opacity_micromap ===
vkCreateMicromapEXT = PFN_vkCreateMicromapEXT( vkGetDeviceProcAddr( device, "vkCreateMicromapEXT" ) );
vkDestroyMicromapEXT = PFN_vkDestroyMicromapEXT( vkGetDeviceProcAddr( device, "vkDestroyMicromapEXT" ) );
vkCmdBuildMicromapsEXT = PFN_vkCmdBuildMicromapsEXT( vkGetDeviceProcAddr( device, "vkCmdBuildMicromapsEXT" ) );
vkBuildMicromapsEXT = PFN_vkBuildMicromapsEXT( vkGetDeviceProcAddr( device, "vkBuildMicromapsEXT" ) );
vkCopyMicromapEXT = PFN_vkCopyMicromapEXT( vkGetDeviceProcAddr( device, "vkCopyMicromapEXT" ) );
vkCopyMicromapToMemoryEXT = PFN_vkCopyMicromapToMemoryEXT( vkGetDeviceProcAddr( device, "vkCopyMicromapToMemoryEXT" ) );
vkCopyMemoryToMicromapEXT = PFN_vkCopyMemoryToMicromapEXT( vkGetDeviceProcAddr( device, "vkCopyMemoryToMicromapEXT" ) );
vkWriteMicromapsPropertiesEXT = PFN_vkWriteMicromapsPropertiesEXT( vkGetDeviceProcAddr( device, "vkWriteMicromapsPropertiesEXT" ) );
vkCmdCopyMicromapEXT = PFN_vkCmdCopyMicromapEXT( vkGetDeviceProcAddr( device, "vkCmdCopyMicromapEXT" ) );
vkCmdCopyMicromapToMemoryEXT = PFN_vkCmdCopyMicromapToMemoryEXT( vkGetDeviceProcAddr( device, "vkCmdCopyMicromapToMemoryEXT" ) );
vkCmdCopyMemoryToMicromapEXT = PFN_vkCmdCopyMemoryToMicromapEXT( vkGetDeviceProcAddr( device, "vkCmdCopyMemoryToMicromapEXT" ) );
vkCmdWriteMicromapsPropertiesEXT = PFN_vkCmdWriteMicromapsPropertiesEXT( vkGetDeviceProcAddr( device, "vkCmdWriteMicromapsPropertiesEXT" ) );
vkGetDeviceMicromapCompatibilityEXT = PFN_vkGetDeviceMicromapCompatibilityEXT( vkGetDeviceProcAddr( device, "vkGetDeviceMicromapCompatibilityEXT" ) );
vkGetMicromapBuildSizesEXT = PFN_vkGetMicromapBuildSizesEXT( vkGetDeviceProcAddr( device, "vkGetMicromapBuildSizesEXT" ) );
//=== VK_HUAWEI_cluster_culling_shader ===
vkCmdDrawClusterHUAWEI = PFN_vkCmdDrawClusterHUAWEI( vkGetDeviceProcAddr( device, "vkCmdDrawClusterHUAWEI" ) );
vkCmdDrawClusterIndirectHUAWEI = PFN_vkCmdDrawClusterIndirectHUAWEI( vkGetDeviceProcAddr( device, "vkCmdDrawClusterIndirectHUAWEI" ) );
//=== VK_EXT_pageable_device_local_memory ===
vkSetDeviceMemoryPriorityEXT = PFN_vkSetDeviceMemoryPriorityEXT( vkGetDeviceProcAddr( device, "vkSetDeviceMemoryPriorityEXT" ) );
//=== VK_KHR_maintenance4 ===
vkGetDeviceBufferMemoryRequirementsKHR =
PFN_vkGetDeviceBufferMemoryRequirementsKHR( vkGetDeviceProcAddr( device, "vkGetDeviceBufferMemoryRequirementsKHR" ) );
if ( !vkGetDeviceBufferMemoryRequirements )
vkGetDeviceBufferMemoryRequirements = vkGetDeviceBufferMemoryRequirementsKHR;
vkGetDeviceImageMemoryRequirementsKHR =
PFN_vkGetDeviceImageMemoryRequirementsKHR( vkGetDeviceProcAddr( device, "vkGetDeviceImageMemoryRequirementsKHR" ) );
if ( !vkGetDeviceImageMemoryRequirements )
vkGetDeviceImageMemoryRequirements = vkGetDeviceImageMemoryRequirementsKHR;
vkGetDeviceImageSparseMemoryRequirementsKHR =
PFN_vkGetDeviceImageSparseMemoryRequirementsKHR( vkGetDeviceProcAddr( device, "vkGetDeviceImageSparseMemoryRequirementsKHR" ) );
if ( !vkGetDeviceImageSparseMemoryRequirements )
vkGetDeviceImageSparseMemoryRequirements = vkGetDeviceImageSparseMemoryRequirementsKHR;
//=== VK_VALVE_descriptor_set_host_mapping ===
vkGetDescriptorSetLayoutHostMappingInfoVALVE =
PFN_vkGetDescriptorSetLayoutHostMappingInfoVALVE( vkGetDeviceProcAddr( device, "vkGetDescriptorSetLayoutHostMappingInfoVALVE" ) );
vkGetDescriptorSetHostMappingVALVE = PFN_vkGetDescriptorSetHostMappingVALVE( vkGetDeviceProcAddr( device, "vkGetDescriptorSetHostMappingVALVE" ) );
//=== VK_NV_copy_memory_indirect ===
vkCmdCopyMemoryIndirectNV = PFN_vkCmdCopyMemoryIndirectNV( vkGetDeviceProcAddr( device, "vkCmdCopyMemoryIndirectNV" ) );
vkCmdCopyMemoryToImageIndirectNV = PFN_vkCmdCopyMemoryToImageIndirectNV( vkGetDeviceProcAddr( device, "vkCmdCopyMemoryToImageIndirectNV" ) );
//=== VK_NV_memory_decompression ===
vkCmdDecompressMemoryNV = PFN_vkCmdDecompressMemoryNV( vkGetDeviceProcAddr( device, "vkCmdDecompressMemoryNV" ) );
vkCmdDecompressMemoryIndirectCountNV =
PFN_vkCmdDecompressMemoryIndirectCountNV( vkGetDeviceProcAddr( device, "vkCmdDecompressMemoryIndirectCountNV" ) );
//=== VK_NV_device_generated_commands_compute ===
vkGetPipelineIndirectMemoryRequirementsNV =
PFN_vkGetPipelineIndirectMemoryRequirementsNV( vkGetDeviceProcAddr( device, "vkGetPipelineIndirectMemoryRequirementsNV" ) );
vkCmdUpdatePipelineIndirectBufferNV = PFN_vkCmdUpdatePipelineIndirectBufferNV( vkGetDeviceProcAddr( device, "vkCmdUpdatePipelineIndirectBufferNV" ) );
vkGetPipelineIndirectDeviceAddressNV =
PFN_vkGetPipelineIndirectDeviceAddressNV( vkGetDeviceProcAddr( device, "vkGetPipelineIndirectDeviceAddressNV" ) );
//=== VK_EXT_extended_dynamic_state3 ===
vkCmdSetDepthClampEnableEXT = PFN_vkCmdSetDepthClampEnableEXT( vkGetDeviceProcAddr( device, "vkCmdSetDepthClampEnableEXT" ) );
vkCmdSetPolygonModeEXT = PFN_vkCmdSetPolygonModeEXT( vkGetDeviceProcAddr( device, "vkCmdSetPolygonModeEXT" ) );
vkCmdSetRasterizationSamplesEXT = PFN_vkCmdSetRasterizationSamplesEXT( vkGetDeviceProcAddr( device, "vkCmdSetRasterizationSamplesEXT" ) );
vkCmdSetSampleMaskEXT = PFN_vkCmdSetSampleMaskEXT( vkGetDeviceProcAddr( device, "vkCmdSetSampleMaskEXT" ) );
vkCmdSetAlphaToCoverageEnableEXT = PFN_vkCmdSetAlphaToCoverageEnableEXT( vkGetDeviceProcAddr( device, "vkCmdSetAlphaToCoverageEnableEXT" ) );
vkCmdSetAlphaToOneEnableEXT = PFN_vkCmdSetAlphaToOneEnableEXT( vkGetDeviceProcAddr( device, "vkCmdSetAlphaToOneEnableEXT" ) );
vkCmdSetLogicOpEnableEXT = PFN_vkCmdSetLogicOpEnableEXT( vkGetDeviceProcAddr( device, "vkCmdSetLogicOpEnableEXT" ) );
vkCmdSetColorBlendEnableEXT = PFN_vkCmdSetColorBlendEnableEXT( vkGetDeviceProcAddr( device, "vkCmdSetColorBlendEnableEXT" ) );
vkCmdSetColorBlendEquationEXT = PFN_vkCmdSetColorBlendEquationEXT( vkGetDeviceProcAddr( device, "vkCmdSetColorBlendEquationEXT" ) );
vkCmdSetColorWriteMaskEXT = PFN_vkCmdSetColorWriteMaskEXT( vkGetDeviceProcAddr( device, "vkCmdSetColorWriteMaskEXT" ) );
vkCmdSetTessellationDomainOriginEXT = PFN_vkCmdSetTessellationDomainOriginEXT( vkGetDeviceProcAddr( device, "vkCmdSetTessellationDomainOriginEXT" ) );
vkCmdSetRasterizationStreamEXT = PFN_vkCmdSetRasterizationStreamEXT( vkGetDeviceProcAddr( device, "vkCmdSetRasterizationStreamEXT" ) );
vkCmdSetConservativeRasterizationModeEXT =
PFN_vkCmdSetConservativeRasterizationModeEXT( vkGetDeviceProcAddr( device, "vkCmdSetConservativeRasterizationModeEXT" ) );
vkCmdSetExtraPrimitiveOverestimationSizeEXT =
PFN_vkCmdSetExtraPrimitiveOverestimationSizeEXT( vkGetDeviceProcAddr( device, "vkCmdSetExtraPrimitiveOverestimationSizeEXT" ) );
vkCmdSetDepthClipEnableEXT = PFN_vkCmdSetDepthClipEnableEXT( vkGetDeviceProcAddr( device, "vkCmdSetDepthClipEnableEXT" ) );
vkCmdSetSampleLocationsEnableEXT = PFN_vkCmdSetSampleLocationsEnableEXT( vkGetDeviceProcAddr( device, "vkCmdSetSampleLocationsEnableEXT" ) );
vkCmdSetColorBlendAdvancedEXT = PFN_vkCmdSetColorBlendAdvancedEXT( vkGetDeviceProcAddr( device, "vkCmdSetColorBlendAdvancedEXT" ) );
vkCmdSetProvokingVertexModeEXT = PFN_vkCmdSetProvokingVertexModeEXT( vkGetDeviceProcAddr( device, "vkCmdSetProvokingVertexModeEXT" ) );
vkCmdSetLineRasterizationModeEXT = PFN_vkCmdSetLineRasterizationModeEXT( vkGetDeviceProcAddr( device, "vkCmdSetLineRasterizationModeEXT" ) );
vkCmdSetLineStippleEnableEXT = PFN_vkCmdSetLineStippleEnableEXT( vkGetDeviceProcAddr( device, "vkCmdSetLineStippleEnableEXT" ) );
vkCmdSetDepthClipNegativeOneToOneEXT =
PFN_vkCmdSetDepthClipNegativeOneToOneEXT( vkGetDeviceProcAddr( device, "vkCmdSetDepthClipNegativeOneToOneEXT" ) );
vkCmdSetViewportWScalingEnableNV = PFN_vkCmdSetViewportWScalingEnableNV( vkGetDeviceProcAddr( device, "vkCmdSetViewportWScalingEnableNV" ) );
vkCmdSetViewportSwizzleNV = PFN_vkCmdSetViewportSwizzleNV( vkGetDeviceProcAddr( device, "vkCmdSetViewportSwizzleNV" ) );
vkCmdSetCoverageToColorEnableNV = PFN_vkCmdSetCoverageToColorEnableNV( vkGetDeviceProcAddr( device, "vkCmdSetCoverageToColorEnableNV" ) );
vkCmdSetCoverageToColorLocationNV = PFN_vkCmdSetCoverageToColorLocationNV( vkGetDeviceProcAddr( device, "vkCmdSetCoverageToColorLocationNV" ) );
vkCmdSetCoverageModulationModeNV = PFN_vkCmdSetCoverageModulationModeNV( vkGetDeviceProcAddr( device, "vkCmdSetCoverageModulationModeNV" ) );
vkCmdSetCoverageModulationTableEnableNV =
PFN_vkCmdSetCoverageModulationTableEnableNV( vkGetDeviceProcAddr( device, "vkCmdSetCoverageModulationTableEnableNV" ) );
vkCmdSetCoverageModulationTableNV = PFN_vkCmdSetCoverageModulationTableNV( vkGetDeviceProcAddr( device, "vkCmdSetCoverageModulationTableNV" ) );
vkCmdSetShadingRateImageEnableNV = PFN_vkCmdSetShadingRateImageEnableNV( vkGetDeviceProcAddr( device, "vkCmdSetShadingRateImageEnableNV" ) );
vkCmdSetRepresentativeFragmentTestEnableNV =
PFN_vkCmdSetRepresentativeFragmentTestEnableNV( vkGetDeviceProcAddr( device, "vkCmdSetRepresentativeFragmentTestEnableNV" ) );
vkCmdSetCoverageReductionModeNV = PFN_vkCmdSetCoverageReductionModeNV( vkGetDeviceProcAddr( device, "vkCmdSetCoverageReductionModeNV" ) );
//=== VK_EXT_shader_module_identifier ===
vkGetShaderModuleIdentifierEXT = PFN_vkGetShaderModuleIdentifierEXT( vkGetDeviceProcAddr( device, "vkGetShaderModuleIdentifierEXT" ) );
vkGetShaderModuleCreateInfoIdentifierEXT =
PFN_vkGetShaderModuleCreateInfoIdentifierEXT( vkGetDeviceProcAddr( device, "vkGetShaderModuleCreateInfoIdentifierEXT" ) );
//=== VK_NV_optical_flow ===
vkCreateOpticalFlowSessionNV = PFN_vkCreateOpticalFlowSessionNV( vkGetDeviceProcAddr( device, "vkCreateOpticalFlowSessionNV" ) );
vkDestroyOpticalFlowSessionNV = PFN_vkDestroyOpticalFlowSessionNV( vkGetDeviceProcAddr( device, "vkDestroyOpticalFlowSessionNV" ) );
vkBindOpticalFlowSessionImageNV = PFN_vkBindOpticalFlowSessionImageNV( vkGetDeviceProcAddr( device, "vkBindOpticalFlowSessionImageNV" ) );
vkCmdOpticalFlowExecuteNV = PFN_vkCmdOpticalFlowExecuteNV( vkGetDeviceProcAddr( device, "vkCmdOpticalFlowExecuteNV" ) );
//=== VK_KHR_maintenance5 ===
vkCmdBindIndexBuffer2KHR = PFN_vkCmdBindIndexBuffer2KHR( vkGetDeviceProcAddr( device, "vkCmdBindIndexBuffer2KHR" ) );
vkGetRenderingAreaGranularityKHR = PFN_vkGetRenderingAreaGranularityKHR( vkGetDeviceProcAddr( device, "vkGetRenderingAreaGranularityKHR" ) );
vkGetDeviceImageSubresourceLayoutKHR =
PFN_vkGetDeviceImageSubresourceLayoutKHR( vkGetDeviceProcAddr( device, "vkGetDeviceImageSubresourceLayoutKHR" ) );
vkGetImageSubresourceLayout2KHR = PFN_vkGetImageSubresourceLayout2KHR( vkGetDeviceProcAddr( device, "vkGetImageSubresourceLayout2KHR" ) );
//=== VK_AMD_anti_lag ===
vkAntiLagUpdateAMD = PFN_vkAntiLagUpdateAMD( vkGetDeviceProcAddr( device, "vkAntiLagUpdateAMD" ) );
//=== VK_EXT_shader_object ===
vkCreateShadersEXT = PFN_vkCreateShadersEXT( vkGetDeviceProcAddr( device, "vkCreateShadersEXT" ) );
vkDestroyShaderEXT = PFN_vkDestroyShaderEXT( vkGetDeviceProcAddr( device, "vkDestroyShaderEXT" ) );
vkGetShaderBinaryDataEXT = PFN_vkGetShaderBinaryDataEXT( vkGetDeviceProcAddr( device, "vkGetShaderBinaryDataEXT" ) );
vkCmdBindShadersEXT = PFN_vkCmdBindShadersEXT( vkGetDeviceProcAddr( device, "vkCmdBindShadersEXT" ) );
vkCmdSetDepthClampRangeEXT = PFN_vkCmdSetDepthClampRangeEXT( vkGetDeviceProcAddr( device, "vkCmdSetDepthClampRangeEXT" ) );
//=== VK_KHR_pipeline_binary ===
vkCreatePipelineBinariesKHR = PFN_vkCreatePipelineBinariesKHR( vkGetDeviceProcAddr( device, "vkCreatePipelineBinariesKHR" ) );
vkDestroyPipelineBinaryKHR = PFN_vkDestroyPipelineBinaryKHR( vkGetDeviceProcAddr( device, "vkDestroyPipelineBinaryKHR" ) );
vkGetPipelineKeyKHR = PFN_vkGetPipelineKeyKHR( vkGetDeviceProcAddr( device, "vkGetPipelineKeyKHR" ) );
vkGetPipelineBinaryDataKHR = PFN_vkGetPipelineBinaryDataKHR( vkGetDeviceProcAddr( device, "vkGetPipelineBinaryDataKHR" ) );
vkReleaseCapturedPipelineDataKHR = PFN_vkReleaseCapturedPipelineDataKHR( vkGetDeviceProcAddr( device, "vkReleaseCapturedPipelineDataKHR" ) );
//=== VK_QCOM_tile_properties ===
vkGetFramebufferTilePropertiesQCOM = PFN_vkGetFramebufferTilePropertiesQCOM( vkGetDeviceProcAddr( device, "vkGetFramebufferTilePropertiesQCOM" ) );
vkGetDynamicRenderingTilePropertiesQCOM =
PFN_vkGetDynamicRenderingTilePropertiesQCOM( vkGetDeviceProcAddr( device, "vkGetDynamicRenderingTilePropertiesQCOM" ) );
//=== VK_NV_low_latency2 ===
vkSetLatencySleepModeNV = PFN_vkSetLatencySleepModeNV( vkGetDeviceProcAddr( device, "vkSetLatencySleepModeNV" ) );
vkLatencySleepNV = PFN_vkLatencySleepNV( vkGetDeviceProcAddr( device, "vkLatencySleepNV" ) );
vkSetLatencyMarkerNV = PFN_vkSetLatencyMarkerNV( vkGetDeviceProcAddr( device, "vkSetLatencyMarkerNV" ) );
vkGetLatencyTimingsNV = PFN_vkGetLatencyTimingsNV( vkGetDeviceProcAddr( device, "vkGetLatencyTimingsNV" ) );
vkQueueNotifyOutOfBandNV = PFN_vkQueueNotifyOutOfBandNV( vkGetDeviceProcAddr( device, "vkQueueNotifyOutOfBandNV" ) );
//=== VK_EXT_attachment_feedback_loop_dynamic_state ===
vkCmdSetAttachmentFeedbackLoopEnableEXT =
PFN_vkCmdSetAttachmentFeedbackLoopEnableEXT( vkGetDeviceProcAddr( device, "vkCmdSetAttachmentFeedbackLoopEnableEXT" ) );
# if defined( VK_USE_PLATFORM_SCREEN_QNX )
//=== VK_QNX_external_memory_screen_buffer ===
vkGetScreenBufferPropertiesQNX = PFN_vkGetScreenBufferPropertiesQNX( vkGetDeviceProcAddr( device, "vkGetScreenBufferPropertiesQNX" ) );
# endif /*VK_USE_PLATFORM_SCREEN_QNX*/
//=== VK_KHR_line_rasterization ===
vkCmdSetLineStippleKHR = PFN_vkCmdSetLineStippleKHR( vkGetDeviceProcAddr( device, "vkCmdSetLineStippleKHR" ) );
//=== VK_KHR_calibrated_timestamps ===
vkGetCalibratedTimestampsKHR = PFN_vkGetCalibratedTimestampsKHR( vkGetDeviceProcAddr( device, "vkGetCalibratedTimestampsKHR" ) );
//=== VK_KHR_maintenance6 ===
vkCmdBindDescriptorSets2KHR = PFN_vkCmdBindDescriptorSets2KHR( vkGetDeviceProcAddr( device, "vkCmdBindDescriptorSets2KHR" ) );
vkCmdPushConstants2KHR = PFN_vkCmdPushConstants2KHR( vkGetDeviceProcAddr( device, "vkCmdPushConstants2KHR" ) );
vkCmdPushDescriptorSet2KHR = PFN_vkCmdPushDescriptorSet2KHR( vkGetDeviceProcAddr( device, "vkCmdPushDescriptorSet2KHR" ) );
vkCmdPushDescriptorSetWithTemplate2KHR =
PFN_vkCmdPushDescriptorSetWithTemplate2KHR( vkGetDeviceProcAddr( device, "vkCmdPushDescriptorSetWithTemplate2KHR" ) );
vkCmdSetDescriptorBufferOffsets2EXT = PFN_vkCmdSetDescriptorBufferOffsets2EXT( vkGetDeviceProcAddr( device, "vkCmdSetDescriptorBufferOffsets2EXT" ) );
vkCmdBindDescriptorBufferEmbeddedSamplers2EXT =
PFN_vkCmdBindDescriptorBufferEmbeddedSamplers2EXT( vkGetDeviceProcAddr( device, "vkCmdBindDescriptorBufferEmbeddedSamplers2EXT" ) );
//=== VK_EXT_device_generated_commands ===
vkGetGeneratedCommandsMemoryRequirementsEXT =
PFN_vkGetGeneratedCommandsMemoryRequirementsEXT( vkGetDeviceProcAddr( device, "vkGetGeneratedCommandsMemoryRequirementsEXT" ) );
vkCmdPreprocessGeneratedCommandsEXT = PFN_vkCmdPreprocessGeneratedCommandsEXT( vkGetDeviceProcAddr( device, "vkCmdPreprocessGeneratedCommandsEXT" ) );
vkCmdExecuteGeneratedCommandsEXT = PFN_vkCmdExecuteGeneratedCommandsEXT( vkGetDeviceProcAddr( device, "vkCmdExecuteGeneratedCommandsEXT" ) );
vkCreateIndirectCommandsLayoutEXT = PFN_vkCreateIndirectCommandsLayoutEXT( vkGetDeviceProcAddr( device, "vkCreateIndirectCommandsLayoutEXT" ) );
vkDestroyIndirectCommandsLayoutEXT = PFN_vkDestroyIndirectCommandsLayoutEXT( vkGetDeviceProcAddr( device, "vkDestroyIndirectCommandsLayoutEXT" ) );
vkCreateIndirectExecutionSetEXT = PFN_vkCreateIndirectExecutionSetEXT( vkGetDeviceProcAddr( device, "vkCreateIndirectExecutionSetEXT" ) );
vkDestroyIndirectExecutionSetEXT = PFN_vkDestroyIndirectExecutionSetEXT( vkGetDeviceProcAddr( device, "vkDestroyIndirectExecutionSetEXT" ) );
vkUpdateIndirectExecutionSetPipelineEXT =
PFN_vkUpdateIndirectExecutionSetPipelineEXT( vkGetDeviceProcAddr( device, "vkUpdateIndirectExecutionSetPipelineEXT" ) );
vkUpdateIndirectExecutionSetShaderEXT =
PFN_vkUpdateIndirectExecutionSetShaderEXT( vkGetDeviceProcAddr( device, "vkUpdateIndirectExecutionSetShaderEXT" ) );
}
public:
2021-07-21 16:15:31 +00:00
//=== VK_VERSION_1_0 ===
PFN_vkGetDeviceProcAddr vkGetDeviceProcAddr = 0;
PFN_vkDestroyDevice vkDestroyDevice = 0;
PFN_vkGetDeviceQueue vkGetDeviceQueue = 0;
PFN_vkQueueSubmit vkQueueSubmit = 0;
PFN_vkQueueWaitIdle vkQueueWaitIdle = 0;
PFN_vkDeviceWaitIdle vkDeviceWaitIdle = 0;
PFN_vkAllocateMemory vkAllocateMemory = 0;
PFN_vkFreeMemory vkFreeMemory = 0;
PFN_vkMapMemory vkMapMemory = 0;
PFN_vkUnmapMemory vkUnmapMemory = 0;
PFN_vkFlushMappedMemoryRanges vkFlushMappedMemoryRanges = 0;
PFN_vkInvalidateMappedMemoryRanges vkInvalidateMappedMemoryRanges = 0;
PFN_vkGetDeviceMemoryCommitment vkGetDeviceMemoryCommitment = 0;
PFN_vkBindBufferMemory vkBindBufferMemory = 0;
PFN_vkBindImageMemory vkBindImageMemory = 0;
PFN_vkGetBufferMemoryRequirements vkGetBufferMemoryRequirements = 0;
PFN_vkGetImageMemoryRequirements vkGetImageMemoryRequirements = 0;
PFN_vkGetImageSparseMemoryRequirements vkGetImageSparseMemoryRequirements = 0;
PFN_vkQueueBindSparse vkQueueBindSparse = 0;
PFN_vkCreateFence vkCreateFence = 0;
PFN_vkDestroyFence vkDestroyFence = 0;
PFN_vkResetFences vkResetFences = 0;
PFN_vkGetFenceStatus vkGetFenceStatus = 0;
PFN_vkWaitForFences vkWaitForFences = 0;
PFN_vkCreateSemaphore vkCreateSemaphore = 0;
PFN_vkDestroySemaphore vkDestroySemaphore = 0;
PFN_vkCreateEvent vkCreateEvent = 0;
PFN_vkDestroyEvent vkDestroyEvent = 0;
PFN_vkGetEventStatus vkGetEventStatus = 0;
PFN_vkSetEvent vkSetEvent = 0;
PFN_vkResetEvent vkResetEvent = 0;
PFN_vkCreateQueryPool vkCreateQueryPool = 0;
PFN_vkDestroyQueryPool vkDestroyQueryPool = 0;
PFN_vkGetQueryPoolResults vkGetQueryPoolResults = 0;
PFN_vkCreateBuffer vkCreateBuffer = 0;
PFN_vkDestroyBuffer vkDestroyBuffer = 0;
PFN_vkCreateBufferView vkCreateBufferView = 0;
PFN_vkDestroyBufferView vkDestroyBufferView = 0;
PFN_vkCreateImage vkCreateImage = 0;
PFN_vkDestroyImage vkDestroyImage = 0;
PFN_vkGetImageSubresourceLayout vkGetImageSubresourceLayout = 0;
PFN_vkCreateImageView vkCreateImageView = 0;
PFN_vkDestroyImageView vkDestroyImageView = 0;
PFN_vkCreateShaderModule vkCreateShaderModule = 0;
PFN_vkDestroyShaderModule vkDestroyShaderModule = 0;
PFN_vkCreatePipelineCache vkCreatePipelineCache = 0;
PFN_vkDestroyPipelineCache vkDestroyPipelineCache = 0;
PFN_vkGetPipelineCacheData vkGetPipelineCacheData = 0;
PFN_vkMergePipelineCaches vkMergePipelineCaches = 0;
PFN_vkCreateGraphicsPipelines vkCreateGraphicsPipelines = 0;
PFN_vkCreateComputePipelines vkCreateComputePipelines = 0;
PFN_vkDestroyPipeline vkDestroyPipeline = 0;
PFN_vkCreatePipelineLayout vkCreatePipelineLayout = 0;
PFN_vkDestroyPipelineLayout vkDestroyPipelineLayout = 0;
PFN_vkCreateSampler vkCreateSampler = 0;
PFN_vkDestroySampler vkDestroySampler = 0;
PFN_vkCreateDescriptorSetLayout vkCreateDescriptorSetLayout = 0;
PFN_vkDestroyDescriptorSetLayout vkDestroyDescriptorSetLayout = 0;
PFN_vkCreateDescriptorPool vkCreateDescriptorPool = 0;
PFN_vkDestroyDescriptorPool vkDestroyDescriptorPool = 0;
PFN_vkResetDescriptorPool vkResetDescriptorPool = 0;
PFN_vkAllocateDescriptorSets vkAllocateDescriptorSets = 0;
PFN_vkFreeDescriptorSets vkFreeDescriptorSets = 0;
PFN_vkUpdateDescriptorSets vkUpdateDescriptorSets = 0;
PFN_vkCreateFramebuffer vkCreateFramebuffer = 0;
PFN_vkDestroyFramebuffer vkDestroyFramebuffer = 0;
PFN_vkCreateRenderPass vkCreateRenderPass = 0;
PFN_vkDestroyRenderPass vkDestroyRenderPass = 0;
PFN_vkGetRenderAreaGranularity vkGetRenderAreaGranularity = 0;
PFN_vkCreateCommandPool vkCreateCommandPool = 0;
PFN_vkDestroyCommandPool vkDestroyCommandPool = 0;
PFN_vkResetCommandPool vkResetCommandPool = 0;
PFN_vkAllocateCommandBuffers vkAllocateCommandBuffers = 0;
PFN_vkFreeCommandBuffers vkFreeCommandBuffers = 0;
PFN_vkBeginCommandBuffer vkBeginCommandBuffer = 0;
PFN_vkEndCommandBuffer vkEndCommandBuffer = 0;
PFN_vkResetCommandBuffer vkResetCommandBuffer = 0;
PFN_vkCmdBindPipeline vkCmdBindPipeline = 0;
PFN_vkCmdSetViewport vkCmdSetViewport = 0;
PFN_vkCmdSetScissor vkCmdSetScissor = 0;
PFN_vkCmdSetLineWidth vkCmdSetLineWidth = 0;
PFN_vkCmdSetDepthBias vkCmdSetDepthBias = 0;
PFN_vkCmdSetBlendConstants vkCmdSetBlendConstants = 0;
PFN_vkCmdSetDepthBounds vkCmdSetDepthBounds = 0;
PFN_vkCmdSetStencilCompareMask vkCmdSetStencilCompareMask = 0;
PFN_vkCmdSetStencilWriteMask vkCmdSetStencilWriteMask = 0;
PFN_vkCmdSetStencilReference vkCmdSetStencilReference = 0;
PFN_vkCmdBindDescriptorSets vkCmdBindDescriptorSets = 0;
PFN_vkCmdBindIndexBuffer vkCmdBindIndexBuffer = 0;
PFN_vkCmdBindVertexBuffers vkCmdBindVertexBuffers = 0;
PFN_vkCmdDraw vkCmdDraw = 0;
PFN_vkCmdDrawIndexed vkCmdDrawIndexed = 0;
PFN_vkCmdDrawIndirect vkCmdDrawIndirect = 0;
PFN_vkCmdDrawIndexedIndirect vkCmdDrawIndexedIndirect = 0;
PFN_vkCmdDispatch vkCmdDispatch = 0;
PFN_vkCmdDispatchIndirect vkCmdDispatchIndirect = 0;
PFN_vkCmdCopyBuffer vkCmdCopyBuffer = 0;
PFN_vkCmdCopyImage vkCmdCopyImage = 0;
PFN_vkCmdBlitImage vkCmdBlitImage = 0;
PFN_vkCmdCopyBufferToImage vkCmdCopyBufferToImage = 0;
PFN_vkCmdCopyImageToBuffer vkCmdCopyImageToBuffer = 0;
PFN_vkCmdUpdateBuffer vkCmdUpdateBuffer = 0;
PFN_vkCmdFillBuffer vkCmdFillBuffer = 0;
PFN_vkCmdClearColorImage vkCmdClearColorImage = 0;
PFN_vkCmdClearDepthStencilImage vkCmdClearDepthStencilImage = 0;
PFN_vkCmdClearAttachments vkCmdClearAttachments = 0;
PFN_vkCmdResolveImage vkCmdResolveImage = 0;
PFN_vkCmdSetEvent vkCmdSetEvent = 0;
PFN_vkCmdResetEvent vkCmdResetEvent = 0;
PFN_vkCmdWaitEvents vkCmdWaitEvents = 0;
PFN_vkCmdPipelineBarrier vkCmdPipelineBarrier = 0;
PFN_vkCmdBeginQuery vkCmdBeginQuery = 0;
PFN_vkCmdEndQuery vkCmdEndQuery = 0;
PFN_vkCmdResetQueryPool vkCmdResetQueryPool = 0;
PFN_vkCmdWriteTimestamp vkCmdWriteTimestamp = 0;
PFN_vkCmdCopyQueryPoolResults vkCmdCopyQueryPoolResults = 0;
PFN_vkCmdPushConstants vkCmdPushConstants = 0;
PFN_vkCmdBeginRenderPass vkCmdBeginRenderPass = 0;
PFN_vkCmdNextSubpass vkCmdNextSubpass = 0;
PFN_vkCmdEndRenderPass vkCmdEndRenderPass = 0;
PFN_vkCmdExecuteCommands vkCmdExecuteCommands = 0;
2021-07-21 16:15:31 +00:00
//=== VK_VERSION_1_1 ===
PFN_vkBindBufferMemory2 vkBindBufferMemory2 = 0;
PFN_vkBindImageMemory2 vkBindImageMemory2 = 0;
PFN_vkGetDeviceGroupPeerMemoryFeatures vkGetDeviceGroupPeerMemoryFeatures = 0;
PFN_vkCmdSetDeviceMask vkCmdSetDeviceMask = 0;
PFN_vkCmdDispatchBase vkCmdDispatchBase = 0;
PFN_vkGetImageMemoryRequirements2 vkGetImageMemoryRequirements2 = 0;
PFN_vkGetBufferMemoryRequirements2 vkGetBufferMemoryRequirements2 = 0;
PFN_vkGetImageSparseMemoryRequirements2 vkGetImageSparseMemoryRequirements2 = 0;
PFN_vkTrimCommandPool vkTrimCommandPool = 0;
PFN_vkGetDeviceQueue2 vkGetDeviceQueue2 = 0;
PFN_vkCreateSamplerYcbcrConversion vkCreateSamplerYcbcrConversion = 0;
PFN_vkDestroySamplerYcbcrConversion vkDestroySamplerYcbcrConversion = 0;
PFN_vkCreateDescriptorUpdateTemplate vkCreateDescriptorUpdateTemplate = 0;
PFN_vkDestroyDescriptorUpdateTemplate vkDestroyDescriptorUpdateTemplate = 0;
PFN_vkUpdateDescriptorSetWithTemplate vkUpdateDescriptorSetWithTemplate = 0;
PFN_vkGetDescriptorSetLayoutSupport vkGetDescriptorSetLayoutSupport = 0;
2021-07-21 16:15:31 +00:00
//=== VK_VERSION_1_2 ===
PFN_vkCmdDrawIndirectCount vkCmdDrawIndirectCount = 0;
PFN_vkCmdDrawIndexedIndirectCount vkCmdDrawIndexedIndirectCount = 0;
PFN_vkCreateRenderPass2 vkCreateRenderPass2 = 0;
PFN_vkCmdBeginRenderPass2 vkCmdBeginRenderPass2 = 0;
PFN_vkCmdNextSubpass2 vkCmdNextSubpass2 = 0;
PFN_vkCmdEndRenderPass2 vkCmdEndRenderPass2 = 0;
PFN_vkResetQueryPool vkResetQueryPool = 0;
PFN_vkGetSemaphoreCounterValue vkGetSemaphoreCounterValue = 0;
PFN_vkWaitSemaphores vkWaitSemaphores = 0;
PFN_vkSignalSemaphore vkSignalSemaphore = 0;
PFN_vkGetBufferDeviceAddress vkGetBufferDeviceAddress = 0;
PFN_vkGetBufferOpaqueCaptureAddress vkGetBufferOpaqueCaptureAddress = 0;
PFN_vkGetDeviceMemoryOpaqueCaptureAddress vkGetDeviceMemoryOpaqueCaptureAddress = 0;
2021-07-21 16:15:31 +00:00
2022-01-26 00:42:08 +00:00
//=== VK_VERSION_1_3 ===
PFN_vkCreatePrivateDataSlot vkCreatePrivateDataSlot = 0;
PFN_vkDestroyPrivateDataSlot vkDestroyPrivateDataSlot = 0;
PFN_vkSetPrivateData vkSetPrivateData = 0;
PFN_vkGetPrivateData vkGetPrivateData = 0;
PFN_vkCmdSetEvent2 vkCmdSetEvent2 = 0;
PFN_vkCmdResetEvent2 vkCmdResetEvent2 = 0;
PFN_vkCmdWaitEvents2 vkCmdWaitEvents2 = 0;
PFN_vkCmdPipelineBarrier2 vkCmdPipelineBarrier2 = 0;
PFN_vkCmdWriteTimestamp2 vkCmdWriteTimestamp2 = 0;
PFN_vkQueueSubmit2 vkQueueSubmit2 = 0;
PFN_vkCmdCopyBuffer2 vkCmdCopyBuffer2 = 0;
PFN_vkCmdCopyImage2 vkCmdCopyImage2 = 0;
PFN_vkCmdCopyBufferToImage2 vkCmdCopyBufferToImage2 = 0;
PFN_vkCmdCopyImageToBuffer2 vkCmdCopyImageToBuffer2 = 0;
PFN_vkCmdBlitImage2 vkCmdBlitImage2 = 0;
PFN_vkCmdResolveImage2 vkCmdResolveImage2 = 0;
PFN_vkCmdBeginRendering vkCmdBeginRendering = 0;
PFN_vkCmdEndRendering vkCmdEndRendering = 0;
PFN_vkCmdSetCullMode vkCmdSetCullMode = 0;
PFN_vkCmdSetFrontFace vkCmdSetFrontFace = 0;
PFN_vkCmdSetPrimitiveTopology vkCmdSetPrimitiveTopology = 0;
PFN_vkCmdSetViewportWithCount vkCmdSetViewportWithCount = 0;
PFN_vkCmdSetScissorWithCount vkCmdSetScissorWithCount = 0;
PFN_vkCmdBindVertexBuffers2 vkCmdBindVertexBuffers2 = 0;
PFN_vkCmdSetDepthTestEnable vkCmdSetDepthTestEnable = 0;
PFN_vkCmdSetDepthWriteEnable vkCmdSetDepthWriteEnable = 0;
PFN_vkCmdSetDepthCompareOp vkCmdSetDepthCompareOp = 0;
PFN_vkCmdSetDepthBoundsTestEnable vkCmdSetDepthBoundsTestEnable = 0;
PFN_vkCmdSetStencilTestEnable vkCmdSetStencilTestEnable = 0;
PFN_vkCmdSetStencilOp vkCmdSetStencilOp = 0;
PFN_vkCmdSetRasterizerDiscardEnable vkCmdSetRasterizerDiscardEnable = 0;
PFN_vkCmdSetDepthBiasEnable vkCmdSetDepthBiasEnable = 0;
PFN_vkCmdSetPrimitiveRestartEnable vkCmdSetPrimitiveRestartEnable = 0;
PFN_vkGetDeviceBufferMemoryRequirements vkGetDeviceBufferMemoryRequirements = 0;
PFN_vkGetDeviceImageMemoryRequirements vkGetDeviceImageMemoryRequirements = 0;
PFN_vkGetDeviceImageSparseMemoryRequirements vkGetDeviceImageSparseMemoryRequirements = 0;
2022-01-26 00:42:08 +00:00
2023-03-01 09:17:02 +00:00
//=== VK_KHR_swapchain ===
PFN_vkCreateSwapchainKHR vkCreateSwapchainKHR = 0;
PFN_vkDestroySwapchainKHR vkDestroySwapchainKHR = 0;
PFN_vkGetSwapchainImagesKHR vkGetSwapchainImagesKHR = 0;
PFN_vkAcquireNextImageKHR vkAcquireNextImageKHR = 0;
PFN_vkQueuePresentKHR vkQueuePresentKHR = 0;
PFN_vkGetDeviceGroupPresentCapabilitiesKHR vkGetDeviceGroupPresentCapabilitiesKHR = 0;
PFN_vkGetDeviceGroupSurfacePresentModesKHR vkGetDeviceGroupSurfacePresentModesKHR = 0;
PFN_vkAcquireNextImage2KHR vkAcquireNextImage2KHR = 0;
2021-07-21 16:15:31 +00:00
2023-03-01 09:17:02 +00:00
//=== VK_KHR_display_swapchain ===
PFN_vkCreateSharedSwapchainsKHR vkCreateSharedSwapchainsKHR = 0;
2023-03-01 09:17:02 +00:00
//=== VK_EXT_debug_marker ===
PFN_vkDebugMarkerSetObjectTagEXT vkDebugMarkerSetObjectTagEXT = 0;
PFN_vkDebugMarkerSetObjectNameEXT vkDebugMarkerSetObjectNameEXT = 0;
PFN_vkCmdDebugMarkerBeginEXT vkCmdDebugMarkerBeginEXT = 0;
PFN_vkCmdDebugMarkerEndEXT vkCmdDebugMarkerEndEXT = 0;
PFN_vkCmdDebugMarkerInsertEXT vkCmdDebugMarkerInsertEXT = 0;
2023-03-01 09:17:02 +00:00
//=== VK_KHR_video_queue ===
PFN_vkCreateVideoSessionKHR vkCreateVideoSessionKHR = 0;
PFN_vkDestroyVideoSessionKHR vkDestroyVideoSessionKHR = 0;
PFN_vkGetVideoSessionMemoryRequirementsKHR vkGetVideoSessionMemoryRequirementsKHR = 0;
PFN_vkBindVideoSessionMemoryKHR vkBindVideoSessionMemoryKHR = 0;
PFN_vkCreateVideoSessionParametersKHR vkCreateVideoSessionParametersKHR = 0;
PFN_vkUpdateVideoSessionParametersKHR vkUpdateVideoSessionParametersKHR = 0;
PFN_vkDestroyVideoSessionParametersKHR vkDestroyVideoSessionParametersKHR = 0;
PFN_vkCmdBeginVideoCodingKHR vkCmdBeginVideoCodingKHR = 0;
PFN_vkCmdEndVideoCodingKHR vkCmdEndVideoCodingKHR = 0;
PFN_vkCmdControlVideoCodingKHR vkCmdControlVideoCodingKHR = 0;
2023-03-01 09:17:02 +00:00
//=== VK_KHR_video_decode_queue ===
PFN_vkCmdDecodeVideoKHR vkCmdDecodeVideoKHR = 0;
2023-03-01 09:17:02 +00:00
//=== VK_EXT_transform_feedback ===
PFN_vkCmdBindTransformFeedbackBuffersEXT vkCmdBindTransformFeedbackBuffersEXT = 0;
PFN_vkCmdBeginTransformFeedbackEXT vkCmdBeginTransformFeedbackEXT = 0;
PFN_vkCmdEndTransformFeedbackEXT vkCmdEndTransformFeedbackEXT = 0;
PFN_vkCmdBeginQueryIndexedEXT vkCmdBeginQueryIndexedEXT = 0;
PFN_vkCmdEndQueryIndexedEXT vkCmdEndQueryIndexedEXT = 0;
PFN_vkCmdDrawIndirectByteCountEXT vkCmdDrawIndirectByteCountEXT = 0;
2023-03-01 09:17:02 +00:00
//=== VK_NVX_binary_import ===
PFN_vkCreateCuModuleNVX vkCreateCuModuleNVX = 0;
PFN_vkCreateCuFunctionNVX vkCreateCuFunctionNVX = 0;
PFN_vkDestroyCuModuleNVX vkDestroyCuModuleNVX = 0;
PFN_vkDestroyCuFunctionNVX vkDestroyCuFunctionNVX = 0;
PFN_vkCmdCuLaunchKernelNVX vkCmdCuLaunchKernelNVX = 0;
2023-03-01 09:17:02 +00:00
//=== VK_NVX_image_view_handle ===
PFN_vkGetImageViewHandleNVX vkGetImageViewHandleNVX = 0;
PFN_vkGetImageViewAddressNVX vkGetImageViewAddressNVX = 0;
2021-07-21 16:15:31 +00:00
//=== VK_AMD_draw_indirect_count ===
PFN_vkCmdDrawIndirectCountAMD vkCmdDrawIndirectCountAMD = 0;
PFN_vkCmdDrawIndexedIndirectCountAMD vkCmdDrawIndexedIndirectCountAMD = 0;
2021-07-21 16:15:31 +00:00
//=== VK_AMD_shader_info ===
PFN_vkGetShaderInfoAMD vkGetShaderInfoAMD = 0;
2021-07-21 16:15:31 +00:00
2023-03-01 09:17:02 +00:00
//=== VK_KHR_dynamic_rendering ===
PFN_vkCmdBeginRenderingKHR vkCmdBeginRenderingKHR = 0;
PFN_vkCmdEndRenderingKHR vkCmdEndRenderingKHR = 0;
2021-07-21 16:15:31 +00:00
2023-03-01 09:17:02 +00:00
# if defined( VK_USE_PLATFORM_WIN32_KHR )
//=== VK_NV_external_memory_win32 ===
PFN_vkGetMemoryWin32HandleNV vkGetMemoryWin32HandleNV = 0;
# else
PFN_dummy vkGetMemoryWin32HandleNV_placeholder = 0;
2023-03-01 09:17:02 +00:00
# endif /*VK_USE_PLATFORM_WIN32_KHR*/
2021-07-21 16:15:31 +00:00
2023-03-01 09:17:02 +00:00
//=== VK_KHR_device_group ===
PFN_vkGetDeviceGroupPeerMemoryFeaturesKHR vkGetDeviceGroupPeerMemoryFeaturesKHR = 0;
PFN_vkCmdSetDeviceMaskKHR vkCmdSetDeviceMaskKHR = 0;
PFN_vkCmdDispatchBaseKHR vkCmdDispatchBaseKHR = 0;
2021-07-21 16:15:31 +00:00
2023-03-01 09:17:02 +00:00
//=== VK_KHR_maintenance1 ===
PFN_vkTrimCommandPoolKHR vkTrimCommandPoolKHR = 0;
2021-07-21 16:15:31 +00:00
2023-03-01 09:17:02 +00:00
# if defined( VK_USE_PLATFORM_WIN32_KHR )
//=== VK_KHR_external_memory_win32 ===
PFN_vkGetMemoryWin32HandleKHR vkGetMemoryWin32HandleKHR = 0;
PFN_vkGetMemoryWin32HandlePropertiesKHR vkGetMemoryWin32HandlePropertiesKHR = 0;
# else
PFN_dummy vkGetMemoryWin32HandleKHR_placeholder = 0;
PFN_dummy vkGetMemoryWin32HandlePropertiesKHR_placeholder = 0;
2023-03-01 09:17:02 +00:00
# endif /*VK_USE_PLATFORM_WIN32_KHR*/
2021-07-21 16:15:31 +00:00
2023-03-01 09:17:02 +00:00
//=== VK_KHR_external_memory_fd ===
PFN_vkGetMemoryFdKHR vkGetMemoryFdKHR = 0;
PFN_vkGetMemoryFdPropertiesKHR vkGetMemoryFdPropertiesKHR = 0;
2021-07-21 16:15:31 +00:00
2023-03-01 09:17:02 +00:00
# if defined( VK_USE_PLATFORM_WIN32_KHR )
//=== VK_KHR_external_semaphore_win32 ===
PFN_vkImportSemaphoreWin32HandleKHR vkImportSemaphoreWin32HandleKHR = 0;
PFN_vkGetSemaphoreWin32HandleKHR vkGetSemaphoreWin32HandleKHR = 0;
# else
PFN_dummy vkImportSemaphoreWin32HandleKHR_placeholder = 0;
PFN_dummy vkGetSemaphoreWin32HandleKHR_placeholder = 0;
2023-03-01 09:17:02 +00:00
# endif /*VK_USE_PLATFORM_WIN32_KHR*/
2022-11-18 00:12:31 +00:00
2023-03-01 09:17:02 +00:00
//=== VK_KHR_external_semaphore_fd ===
PFN_vkImportSemaphoreFdKHR vkImportSemaphoreFdKHR = 0;
PFN_vkGetSemaphoreFdKHR vkGetSemaphoreFdKHR = 0;
2022-09-30 00:20:29 +00:00
2023-03-01 09:17:02 +00:00
//=== VK_KHR_push_descriptor ===
PFN_vkCmdPushDescriptorSetKHR vkCmdPushDescriptorSetKHR = 0;
PFN_vkCmdPushDescriptorSetWithTemplateKHR vkCmdPushDescriptorSetWithTemplateKHR = 0;
2023-03-01 09:17:02 +00:00
//=== VK_EXT_conditional_rendering ===
PFN_vkCmdBeginConditionalRenderingEXT vkCmdBeginConditionalRenderingEXT = 0;
PFN_vkCmdEndConditionalRenderingEXT vkCmdEndConditionalRenderingEXT = 0;
2023-03-01 09:17:02 +00:00
//=== VK_KHR_descriptor_update_template ===
PFN_vkCreateDescriptorUpdateTemplateKHR vkCreateDescriptorUpdateTemplateKHR = 0;
PFN_vkDestroyDescriptorUpdateTemplateKHR vkDestroyDescriptorUpdateTemplateKHR = 0;
PFN_vkUpdateDescriptorSetWithTemplateKHR vkUpdateDescriptorSetWithTemplateKHR = 0;
2023-03-01 09:17:02 +00:00
//=== VK_NV_clip_space_w_scaling ===
PFN_vkCmdSetViewportWScalingNV vkCmdSetViewportWScalingNV = 0;
2021-07-21 16:15:31 +00:00
//=== VK_EXT_display_control ===
PFN_vkDisplayPowerControlEXT vkDisplayPowerControlEXT = 0;
PFN_vkRegisterDeviceEventEXT vkRegisterDeviceEventEXT = 0;
PFN_vkRegisterDisplayEventEXT vkRegisterDisplayEventEXT = 0;
PFN_vkGetSwapchainCounterEXT vkGetSwapchainCounterEXT = 0;
2021-07-21 16:15:31 +00:00
2023-03-01 09:17:02 +00:00
//=== VK_GOOGLE_display_timing ===
PFN_vkGetRefreshCycleDurationGOOGLE vkGetRefreshCycleDurationGOOGLE = 0;
PFN_vkGetPastPresentationTimingGOOGLE vkGetPastPresentationTimingGOOGLE = 0;
2021-07-21 16:15:31 +00:00
2023-03-01 09:17:02 +00:00
//=== VK_EXT_discard_rectangles ===
PFN_vkCmdSetDiscardRectangleEXT vkCmdSetDiscardRectangleEXT = 0;
PFN_vkCmdSetDiscardRectangleEnableEXT vkCmdSetDiscardRectangleEnableEXT = 0;
PFN_vkCmdSetDiscardRectangleModeEXT vkCmdSetDiscardRectangleModeEXT = 0;
2021-07-21 16:15:31 +00:00
//=== VK_EXT_hdr_metadata ===
PFN_vkSetHdrMetadataEXT vkSetHdrMetadataEXT = 0;
2021-07-21 16:15:31 +00:00
2023-03-01 09:17:02 +00:00
//=== VK_KHR_create_renderpass2 ===
PFN_vkCreateRenderPass2KHR vkCreateRenderPass2KHR = 0;
PFN_vkCmdBeginRenderPass2KHR vkCmdBeginRenderPass2KHR = 0;
PFN_vkCmdNextSubpass2KHR vkCmdNextSubpass2KHR = 0;
PFN_vkCmdEndRenderPass2KHR vkCmdEndRenderPass2KHR = 0;
2022-05-11 00:09:28 +00:00
2023-03-01 09:17:02 +00:00
//=== VK_KHR_shared_presentable_image ===
PFN_vkGetSwapchainStatusKHR vkGetSwapchainStatusKHR = 0;
2021-07-21 16:15:31 +00:00
2023-03-01 09:17:02 +00:00
# if defined( VK_USE_PLATFORM_WIN32_KHR )
//=== VK_KHR_external_fence_win32 ===
PFN_vkImportFenceWin32HandleKHR vkImportFenceWin32HandleKHR = 0;
PFN_vkGetFenceWin32HandleKHR vkGetFenceWin32HandleKHR = 0;
# else
PFN_dummy vkImportFenceWin32HandleKHR_placeholder = 0;
PFN_dummy vkGetFenceWin32HandleKHR_placeholder = 0;
2023-03-01 09:17:02 +00:00
# endif /*VK_USE_PLATFORM_WIN32_KHR*/
2021-07-21 16:15:31 +00:00
2023-03-01 09:17:02 +00:00
//=== VK_KHR_external_fence_fd ===
PFN_vkImportFenceFdKHR vkImportFenceFdKHR = 0;
PFN_vkGetFenceFdKHR vkGetFenceFdKHR = 0;
2022-09-02 00:12:10 +00:00
2023-03-01 09:17:02 +00:00
//=== VK_KHR_performance_query ===
PFN_vkAcquireProfilingLockKHR vkAcquireProfilingLockKHR = 0;
PFN_vkReleaseProfilingLockKHR vkReleaseProfilingLockKHR = 0;
2022-06-10 00:09:44 +00:00
2023-03-01 09:17:02 +00:00
//=== VK_EXT_debug_utils ===
PFN_vkSetDebugUtilsObjectNameEXT vkSetDebugUtilsObjectNameEXT = 0;
PFN_vkSetDebugUtilsObjectTagEXT vkSetDebugUtilsObjectTagEXT = 0;
PFN_vkQueueBeginDebugUtilsLabelEXT vkQueueBeginDebugUtilsLabelEXT = 0;
PFN_vkQueueEndDebugUtilsLabelEXT vkQueueEndDebugUtilsLabelEXT = 0;
PFN_vkQueueInsertDebugUtilsLabelEXT vkQueueInsertDebugUtilsLabelEXT = 0;
PFN_vkCmdBeginDebugUtilsLabelEXT vkCmdBeginDebugUtilsLabelEXT = 0;
PFN_vkCmdEndDebugUtilsLabelEXT vkCmdEndDebugUtilsLabelEXT = 0;
PFN_vkCmdInsertDebugUtilsLabelEXT vkCmdInsertDebugUtilsLabelEXT = 0;
2021-07-21 16:15:31 +00:00
2023-03-01 09:17:02 +00:00
# if defined( VK_USE_PLATFORM_ANDROID_KHR )
//=== VK_ANDROID_external_memory_android_hardware_buffer ===
PFN_vkGetAndroidHardwareBufferPropertiesANDROID vkGetAndroidHardwareBufferPropertiesANDROID = 0;
PFN_vkGetMemoryAndroidHardwareBufferANDROID vkGetMemoryAndroidHardwareBufferANDROID = 0;
# else
PFN_dummy vkGetAndroidHardwareBufferPropertiesANDROID_placeholder = 0;
PFN_dummy vkGetMemoryAndroidHardwareBufferANDROID_placeholder = 0;
2023-03-01 09:17:02 +00:00
# endif /*VK_USE_PLATFORM_ANDROID_KHR*/
2022-09-30 00:20:29 +00:00
# if defined( VK_ENABLE_BETA_EXTENSIONS )
//=== VK_AMDX_shader_enqueue ===
PFN_vkCreateExecutionGraphPipelinesAMDX vkCreateExecutionGraphPipelinesAMDX = 0;
PFN_vkGetExecutionGraphPipelineScratchSizeAMDX vkGetExecutionGraphPipelineScratchSizeAMDX = 0;
PFN_vkGetExecutionGraphPipelineNodeIndexAMDX vkGetExecutionGraphPipelineNodeIndexAMDX = 0;
PFN_vkCmdInitializeGraphScratchMemoryAMDX vkCmdInitializeGraphScratchMemoryAMDX = 0;
PFN_vkCmdDispatchGraphAMDX vkCmdDispatchGraphAMDX = 0;
PFN_vkCmdDispatchGraphIndirectAMDX vkCmdDispatchGraphIndirectAMDX = 0;
PFN_vkCmdDispatchGraphIndirectCountAMDX vkCmdDispatchGraphIndirectCountAMDX = 0;
# else
PFN_dummy vkCreateExecutionGraphPipelinesAMDX_placeholder = 0;
PFN_dummy vkGetExecutionGraphPipelineScratchSizeAMDX_placeholder = 0;
PFN_dummy vkGetExecutionGraphPipelineNodeIndexAMDX_placeholder = 0;
PFN_dummy vkCmdInitializeGraphScratchMemoryAMDX_placeholder = 0;
PFN_dummy vkCmdDispatchGraphAMDX_placeholder = 0;
PFN_dummy vkCmdDispatchGraphIndirectAMDX_placeholder = 0;
PFN_dummy vkCmdDispatchGraphIndirectCountAMDX_placeholder = 0;
# endif /*VK_ENABLE_BETA_EXTENSIONS*/
2023-03-01 09:17:02 +00:00
//=== VK_EXT_sample_locations ===
PFN_vkCmdSetSampleLocationsEXT vkCmdSetSampleLocationsEXT = 0;
2021-09-07 08:20:55 +00:00
2023-03-01 09:17:02 +00:00
//=== VK_KHR_get_memory_requirements2 ===
PFN_vkGetImageMemoryRequirements2KHR vkGetImageMemoryRequirements2KHR = 0;
PFN_vkGetBufferMemoryRequirements2KHR vkGetBufferMemoryRequirements2KHR = 0;
PFN_vkGetImageSparseMemoryRequirements2KHR vkGetImageSparseMemoryRequirements2KHR = 0;
2022-05-11 00:09:28 +00:00
2023-03-01 09:17:02 +00:00
//=== VK_KHR_acceleration_structure ===
PFN_vkCreateAccelerationStructureKHR vkCreateAccelerationStructureKHR = 0;
PFN_vkDestroyAccelerationStructureKHR vkDestroyAccelerationStructureKHR = 0;
PFN_vkCmdBuildAccelerationStructuresKHR vkCmdBuildAccelerationStructuresKHR = 0;
PFN_vkCmdBuildAccelerationStructuresIndirectKHR vkCmdBuildAccelerationStructuresIndirectKHR = 0;
PFN_vkBuildAccelerationStructuresKHR vkBuildAccelerationStructuresKHR = 0;
PFN_vkCopyAccelerationStructureKHR vkCopyAccelerationStructureKHR = 0;
PFN_vkCopyAccelerationStructureToMemoryKHR vkCopyAccelerationStructureToMemoryKHR = 0;
PFN_vkCopyMemoryToAccelerationStructureKHR vkCopyMemoryToAccelerationStructureKHR = 0;
PFN_vkWriteAccelerationStructuresPropertiesKHR vkWriteAccelerationStructuresPropertiesKHR = 0;
PFN_vkCmdCopyAccelerationStructureKHR vkCmdCopyAccelerationStructureKHR = 0;
PFN_vkCmdCopyAccelerationStructureToMemoryKHR vkCmdCopyAccelerationStructureToMemoryKHR = 0;
PFN_vkCmdCopyMemoryToAccelerationStructureKHR vkCmdCopyMemoryToAccelerationStructureKHR = 0;
PFN_vkGetAccelerationStructureDeviceAddressKHR vkGetAccelerationStructureDeviceAddressKHR = 0;
PFN_vkCmdWriteAccelerationStructuresPropertiesKHR vkCmdWriteAccelerationStructuresPropertiesKHR = 0;
PFN_vkGetDeviceAccelerationStructureCompatibilityKHR vkGetDeviceAccelerationStructureCompatibilityKHR = 0;
PFN_vkGetAccelerationStructureBuildSizesKHR vkGetAccelerationStructureBuildSizesKHR = 0;
2021-07-21 16:15:31 +00:00
2023-03-01 09:17:02 +00:00
//=== VK_KHR_ray_tracing_pipeline ===
PFN_vkCmdTraceRaysKHR vkCmdTraceRaysKHR = 0;
PFN_vkCreateRayTracingPipelinesKHR vkCreateRayTracingPipelinesKHR = 0;
PFN_vkGetRayTracingShaderGroupHandlesKHR vkGetRayTracingShaderGroupHandlesKHR = 0;
PFN_vkGetRayTracingCaptureReplayShaderGroupHandlesKHR vkGetRayTracingCaptureReplayShaderGroupHandlesKHR = 0;
PFN_vkCmdTraceRaysIndirectKHR vkCmdTraceRaysIndirectKHR = 0;
PFN_vkGetRayTracingShaderGroupStackSizeKHR vkGetRayTracingShaderGroupStackSizeKHR = 0;
PFN_vkCmdSetRayTracingPipelineStackSizeKHR vkCmdSetRayTracingPipelineStackSizeKHR = 0;
2021-07-21 16:15:31 +00:00
2023-03-01 09:17:02 +00:00
//=== VK_KHR_sampler_ycbcr_conversion ===
PFN_vkCreateSamplerYcbcrConversionKHR vkCreateSamplerYcbcrConversionKHR = 0;
PFN_vkDestroySamplerYcbcrConversionKHR vkDestroySamplerYcbcrConversionKHR = 0;
2022-07-01 00:11:04 +00:00
2023-03-01 09:17:02 +00:00
//=== VK_KHR_bind_memory2 ===
PFN_vkBindBufferMemory2KHR vkBindBufferMemory2KHR = 0;
PFN_vkBindImageMemory2KHR vkBindImageMemory2KHR = 0;
2023-03-01 09:17:02 +00:00
//=== VK_EXT_image_drm_format_modifier ===
PFN_vkGetImageDrmFormatModifierPropertiesEXT vkGetImageDrmFormatModifierPropertiesEXT = 0;
2021-07-21 16:15:31 +00:00
//=== VK_EXT_validation_cache ===
PFN_vkCreateValidationCacheEXT vkCreateValidationCacheEXT = 0;
PFN_vkDestroyValidationCacheEXT vkDestroyValidationCacheEXT = 0;
PFN_vkMergeValidationCachesEXT vkMergeValidationCachesEXT = 0;
PFN_vkGetValidationCacheDataEXT vkGetValidationCacheDataEXT = 0;
2021-07-21 16:15:31 +00:00
2023-03-01 09:17:02 +00:00
//=== VK_NV_shading_rate_image ===
PFN_vkCmdBindShadingRateImageNV vkCmdBindShadingRateImageNV = 0;
PFN_vkCmdSetViewportShadingRatePaletteNV vkCmdSetViewportShadingRatePaletteNV = 0;
PFN_vkCmdSetCoarseSampleOrderNV vkCmdSetCoarseSampleOrderNV = 0;
2021-07-21 16:15:31 +00:00
2023-03-01 09:17:02 +00:00
//=== VK_NV_ray_tracing ===
PFN_vkCreateAccelerationStructureNV vkCreateAccelerationStructureNV = 0;
PFN_vkDestroyAccelerationStructureNV vkDestroyAccelerationStructureNV = 0;
PFN_vkGetAccelerationStructureMemoryRequirementsNV vkGetAccelerationStructureMemoryRequirementsNV = 0;
PFN_vkBindAccelerationStructureMemoryNV vkBindAccelerationStructureMemoryNV = 0;
PFN_vkCmdBuildAccelerationStructureNV vkCmdBuildAccelerationStructureNV = 0;
PFN_vkCmdCopyAccelerationStructureNV vkCmdCopyAccelerationStructureNV = 0;
PFN_vkCmdTraceRaysNV vkCmdTraceRaysNV = 0;
PFN_vkCreateRayTracingPipelinesNV vkCreateRayTracingPipelinesNV = 0;
PFN_vkGetRayTracingShaderGroupHandlesNV vkGetRayTracingShaderGroupHandlesNV = 0;
PFN_vkGetAccelerationStructureHandleNV vkGetAccelerationStructureHandleNV = 0;
PFN_vkCmdWriteAccelerationStructuresPropertiesNV vkCmdWriteAccelerationStructuresPropertiesNV = 0;
PFN_vkCompileDeferredNV vkCompileDeferredNV = 0;
2023-03-01 09:17:02 +00:00
//=== VK_KHR_maintenance3 ===
PFN_vkGetDescriptorSetLayoutSupportKHR vkGetDescriptorSetLayoutSupportKHR = 0;
2023-03-01 09:17:02 +00:00
//=== VK_KHR_draw_indirect_count ===
PFN_vkCmdDrawIndirectCountKHR vkCmdDrawIndirectCountKHR = 0;
PFN_vkCmdDrawIndexedIndirectCountKHR vkCmdDrawIndexedIndirectCountKHR = 0;
2021-09-29 00:34:11 +00:00
2023-03-01 09:17:02 +00:00
//=== VK_EXT_external_memory_host ===
PFN_vkGetMemoryHostPointerPropertiesEXT vkGetMemoryHostPointerPropertiesEXT = 0;
2021-07-21 16:15:31 +00:00
2023-03-01 09:17:02 +00:00
//=== VK_AMD_buffer_marker ===
PFN_vkCmdWriteBufferMarkerAMD vkCmdWriteBufferMarkerAMD = 0;
PFN_vkCmdWriteBufferMarker2AMD vkCmdWriteBufferMarker2AMD = 0;
2021-07-21 16:15:31 +00:00
2023-03-01 09:17:02 +00:00
//=== VK_EXT_calibrated_timestamps ===
PFN_vkGetCalibratedTimestampsEXT vkGetCalibratedTimestampsEXT = 0;
2021-07-21 16:15:31 +00:00
2023-03-01 09:17:02 +00:00
//=== VK_NV_mesh_shader ===
PFN_vkCmdDrawMeshTasksNV vkCmdDrawMeshTasksNV = 0;
PFN_vkCmdDrawMeshTasksIndirectNV vkCmdDrawMeshTasksIndirectNV = 0;
PFN_vkCmdDrawMeshTasksIndirectCountNV vkCmdDrawMeshTasksIndirectCountNV = 0;
2023-03-01 09:17:02 +00:00
//=== VK_NV_scissor_exclusive ===
PFN_vkCmdSetExclusiveScissorEnableNV vkCmdSetExclusiveScissorEnableNV = 0;
PFN_vkCmdSetExclusiveScissorNV vkCmdSetExclusiveScissorNV = 0;
2021-07-21 16:15:31 +00:00
2023-03-01 09:17:02 +00:00
//=== VK_NV_device_diagnostic_checkpoints ===
PFN_vkCmdSetCheckpointNV vkCmdSetCheckpointNV = 0;
PFN_vkGetQueueCheckpointDataNV vkGetQueueCheckpointDataNV = 0;
PFN_vkGetQueueCheckpointData2NV vkGetQueueCheckpointData2NV = 0;
2023-03-01 09:17:02 +00:00
//=== VK_KHR_timeline_semaphore ===
PFN_vkGetSemaphoreCounterValueKHR vkGetSemaphoreCounterValueKHR = 0;
PFN_vkWaitSemaphoresKHR vkWaitSemaphoresKHR = 0;
PFN_vkSignalSemaphoreKHR vkSignalSemaphoreKHR = 0;
2021-07-21 16:15:31 +00:00
//=== VK_INTEL_performance_query ===
PFN_vkInitializePerformanceApiINTEL vkInitializePerformanceApiINTEL = 0;
PFN_vkUninitializePerformanceApiINTEL vkUninitializePerformanceApiINTEL = 0;
PFN_vkCmdSetPerformanceMarkerINTEL vkCmdSetPerformanceMarkerINTEL = 0;
PFN_vkCmdSetPerformanceStreamMarkerINTEL vkCmdSetPerformanceStreamMarkerINTEL = 0;
PFN_vkCmdSetPerformanceOverrideINTEL vkCmdSetPerformanceOverrideINTEL = 0;
PFN_vkAcquirePerformanceConfigurationINTEL vkAcquirePerformanceConfigurationINTEL = 0;
PFN_vkReleasePerformanceConfigurationINTEL vkReleasePerformanceConfigurationINTEL = 0;
PFN_vkQueueSetPerformanceConfigurationINTEL vkQueueSetPerformanceConfigurationINTEL = 0;
PFN_vkGetPerformanceParameterINTEL vkGetPerformanceParameterINTEL = 0;
2021-07-21 16:15:31 +00:00
2023-03-01 09:17:02 +00:00
//=== VK_AMD_display_native_hdr ===
PFN_vkSetLocalDimmingAMD vkSetLocalDimmingAMD = 0;
2021-07-21 16:15:31 +00:00
2023-03-01 09:17:02 +00:00
//=== VK_KHR_fragment_shading_rate ===
PFN_vkCmdSetFragmentShadingRateKHR vkCmdSetFragmentShadingRateKHR = 0;
2023-03-01 09:17:02 +00:00
//=== VK_KHR_dynamic_rendering_local_read ===
PFN_vkCmdSetRenderingAttachmentLocationsKHR vkCmdSetRenderingAttachmentLocationsKHR = 0;
PFN_vkCmdSetRenderingInputAttachmentIndicesKHR vkCmdSetRenderingInputAttachmentIndicesKHR = 0;
2023-03-01 09:17:02 +00:00
//=== VK_EXT_buffer_device_address ===
PFN_vkGetBufferDeviceAddressEXT vkGetBufferDeviceAddressEXT = 0;
2023-03-01 09:17:02 +00:00
//=== VK_KHR_present_wait ===
PFN_vkWaitForPresentKHR vkWaitForPresentKHR = 0;
2023-03-01 09:17:02 +00:00
# if defined( VK_USE_PLATFORM_WIN32_KHR )
//=== VK_EXT_full_screen_exclusive ===
PFN_vkAcquireFullScreenExclusiveModeEXT vkAcquireFullScreenExclusiveModeEXT = 0;
PFN_vkReleaseFullScreenExclusiveModeEXT vkReleaseFullScreenExclusiveModeEXT = 0;
PFN_vkGetDeviceGroupSurfacePresentModes2EXT vkGetDeviceGroupSurfacePresentModes2EXT = 0;
# else
PFN_dummy vkAcquireFullScreenExclusiveModeEXT_placeholder = 0;
PFN_dummy vkReleaseFullScreenExclusiveModeEXT_placeholder = 0;
PFN_dummy vkGetDeviceGroupSurfacePresentModes2EXT_placeholder = 0;
2023-03-01 09:17:02 +00:00
# endif /*VK_USE_PLATFORM_WIN32_KHR*/
2021-07-21 16:15:31 +00:00
//=== VK_KHR_buffer_device_address ===
PFN_vkGetBufferDeviceAddressKHR vkGetBufferDeviceAddressKHR = 0;
PFN_vkGetBufferOpaqueCaptureAddressKHR vkGetBufferOpaqueCaptureAddressKHR = 0;
PFN_vkGetDeviceMemoryOpaqueCaptureAddressKHR vkGetDeviceMemoryOpaqueCaptureAddressKHR = 0;
2021-07-21 16:15:31 +00:00
2023-03-01 09:17:02 +00:00
//=== VK_EXT_line_rasterization ===
PFN_vkCmdSetLineStippleEXT vkCmdSetLineStippleEXT = 0;
2021-07-21 16:15:31 +00:00
2023-03-01 09:17:02 +00:00
//=== VK_EXT_host_query_reset ===
PFN_vkResetQueryPoolEXT vkResetQueryPoolEXT = 0;
2021-07-21 16:15:31 +00:00
2023-03-01 09:17:02 +00:00
//=== VK_EXT_extended_dynamic_state ===
PFN_vkCmdSetCullModeEXT vkCmdSetCullModeEXT = 0;
PFN_vkCmdSetFrontFaceEXT vkCmdSetFrontFaceEXT = 0;
PFN_vkCmdSetPrimitiveTopologyEXT vkCmdSetPrimitiveTopologyEXT = 0;
PFN_vkCmdSetViewportWithCountEXT vkCmdSetViewportWithCountEXT = 0;
PFN_vkCmdSetScissorWithCountEXT vkCmdSetScissorWithCountEXT = 0;
PFN_vkCmdBindVertexBuffers2EXT vkCmdBindVertexBuffers2EXT = 0;
PFN_vkCmdSetDepthTestEnableEXT vkCmdSetDepthTestEnableEXT = 0;
PFN_vkCmdSetDepthWriteEnableEXT vkCmdSetDepthWriteEnableEXT = 0;
PFN_vkCmdSetDepthCompareOpEXT vkCmdSetDepthCompareOpEXT = 0;
PFN_vkCmdSetDepthBoundsTestEnableEXT vkCmdSetDepthBoundsTestEnableEXT = 0;
PFN_vkCmdSetStencilTestEnableEXT vkCmdSetStencilTestEnableEXT = 0;
PFN_vkCmdSetStencilOpEXT vkCmdSetStencilOpEXT = 0;
2021-10-06 00:37:42 +00:00
2023-03-01 09:17:02 +00:00
//=== VK_KHR_deferred_host_operations ===
PFN_vkCreateDeferredOperationKHR vkCreateDeferredOperationKHR = 0;
PFN_vkDestroyDeferredOperationKHR vkDestroyDeferredOperationKHR = 0;
PFN_vkGetDeferredOperationMaxConcurrencyKHR vkGetDeferredOperationMaxConcurrencyKHR = 0;
PFN_vkGetDeferredOperationResultKHR vkGetDeferredOperationResultKHR = 0;
PFN_vkDeferredOperationJoinKHR vkDeferredOperationJoinKHR = 0;
2021-07-21 16:15:31 +00:00
//=== VK_KHR_pipeline_executable_properties ===
PFN_vkGetPipelineExecutablePropertiesKHR vkGetPipelineExecutablePropertiesKHR = 0;
PFN_vkGetPipelineExecutableStatisticsKHR vkGetPipelineExecutableStatisticsKHR = 0;
PFN_vkGetPipelineExecutableInternalRepresentationsKHR vkGetPipelineExecutableInternalRepresentationsKHR = 0;
2021-07-21 16:15:31 +00:00
//=== VK_EXT_host_image_copy ===
PFN_vkCopyMemoryToImageEXT vkCopyMemoryToImageEXT = 0;
PFN_vkCopyImageToMemoryEXT vkCopyImageToMemoryEXT = 0;
PFN_vkCopyImageToImageEXT vkCopyImageToImageEXT = 0;
PFN_vkTransitionImageLayoutEXT vkTransitionImageLayoutEXT = 0;
PFN_vkGetImageSubresourceLayout2EXT vkGetImageSubresourceLayout2EXT = 0;
//=== VK_KHR_map_memory2 ===
PFN_vkMapMemory2KHR vkMapMemory2KHR = 0;
PFN_vkUnmapMemory2KHR vkUnmapMemory2KHR = 0;
2023-03-01 09:17:02 +00:00
//=== VK_EXT_swapchain_maintenance1 ===
PFN_vkReleaseSwapchainImagesEXT vkReleaseSwapchainImagesEXT = 0;
2022-05-11 00:09:28 +00:00
2023-03-01 09:17:02 +00:00
//=== VK_NV_device_generated_commands ===
PFN_vkGetGeneratedCommandsMemoryRequirementsNV vkGetGeneratedCommandsMemoryRequirementsNV = 0;
PFN_vkCmdPreprocessGeneratedCommandsNV vkCmdPreprocessGeneratedCommandsNV = 0;
PFN_vkCmdExecuteGeneratedCommandsNV vkCmdExecuteGeneratedCommandsNV = 0;
PFN_vkCmdBindPipelineShaderGroupNV vkCmdBindPipelineShaderGroupNV = 0;
PFN_vkCreateIndirectCommandsLayoutNV vkCreateIndirectCommandsLayoutNV = 0;
PFN_vkDestroyIndirectCommandsLayoutNV vkDestroyIndirectCommandsLayoutNV = 0;
2021-07-21 16:15:31 +00:00
//=== VK_EXT_depth_bias_control ===
PFN_vkCmdSetDepthBias2EXT vkCmdSetDepthBias2EXT = 0;
2023-03-01 09:17:02 +00:00
//=== VK_EXT_private_data ===
PFN_vkCreatePrivateDataSlotEXT vkCreatePrivateDataSlotEXT = 0;
PFN_vkDestroyPrivateDataSlotEXT vkDestroyPrivateDataSlotEXT = 0;
PFN_vkSetPrivateDataEXT vkSetPrivateDataEXT = 0;
PFN_vkGetPrivateDataEXT vkGetPrivateDataEXT = 0;
2021-07-21 16:15:31 +00:00
2023-03-01 09:17:02 +00:00
//=== VK_KHR_video_encode_queue ===
PFN_vkGetEncodedVideoSessionParametersKHR vkGetEncodedVideoSessionParametersKHR = 0;
PFN_vkCmdEncodeVideoKHR vkCmdEncodeVideoKHR = 0;
2021-07-21 16:15:31 +00:00
# if defined( VK_ENABLE_BETA_EXTENSIONS )
//=== VK_NV_cuda_kernel_launch ===
PFN_vkCreateCudaModuleNV vkCreateCudaModuleNV = 0;
PFN_vkGetCudaModuleCacheNV vkGetCudaModuleCacheNV = 0;
PFN_vkCreateCudaFunctionNV vkCreateCudaFunctionNV = 0;
PFN_vkDestroyCudaModuleNV vkDestroyCudaModuleNV = 0;
PFN_vkDestroyCudaFunctionNV vkDestroyCudaFunctionNV = 0;
PFN_vkCmdCudaLaunchKernelNV vkCmdCudaLaunchKernelNV = 0;
# else
PFN_dummy vkCreateCudaModuleNV_placeholder = 0;
PFN_dummy vkGetCudaModuleCacheNV_placeholder = 0;
PFN_dummy vkCreateCudaFunctionNV_placeholder = 0;
PFN_dummy vkDestroyCudaModuleNV_placeholder = 0;
PFN_dummy vkDestroyCudaFunctionNV_placeholder = 0;
PFN_dummy vkCmdCudaLaunchKernelNV_placeholder = 0;
# endif /*VK_ENABLE_BETA_EXTENSIONS*/
2023-03-01 09:17:02 +00:00
# if defined( VK_USE_PLATFORM_METAL_EXT )
//=== VK_EXT_metal_objects ===
PFN_vkExportMetalObjectsEXT vkExportMetalObjectsEXT = 0;
# else
PFN_dummy vkExportMetalObjectsEXT_placeholder = 0;
2023-03-01 09:17:02 +00:00
# endif /*VK_USE_PLATFORM_METAL_EXT*/
2021-07-21 16:15:31 +00:00
//=== VK_KHR_synchronization2 ===
PFN_vkCmdSetEvent2KHR vkCmdSetEvent2KHR = 0;
PFN_vkCmdResetEvent2KHR vkCmdResetEvent2KHR = 0;
PFN_vkCmdWaitEvents2KHR vkCmdWaitEvents2KHR = 0;
PFN_vkCmdPipelineBarrier2KHR vkCmdPipelineBarrier2KHR = 0;
PFN_vkCmdWriteTimestamp2KHR vkCmdWriteTimestamp2KHR = 0;
PFN_vkQueueSubmit2KHR vkQueueSubmit2KHR = 0;
2021-07-21 16:15:31 +00:00
2023-03-01 09:17:02 +00:00
//=== VK_EXT_descriptor_buffer ===
PFN_vkGetDescriptorSetLayoutSizeEXT vkGetDescriptorSetLayoutSizeEXT = 0;
PFN_vkGetDescriptorSetLayoutBindingOffsetEXT vkGetDescriptorSetLayoutBindingOffsetEXT = 0;
PFN_vkGetDescriptorEXT vkGetDescriptorEXT = 0;
PFN_vkCmdBindDescriptorBuffersEXT vkCmdBindDescriptorBuffersEXT = 0;
PFN_vkCmdSetDescriptorBufferOffsetsEXT vkCmdSetDescriptorBufferOffsetsEXT = 0;
PFN_vkCmdBindDescriptorBufferEmbeddedSamplersEXT vkCmdBindDescriptorBufferEmbeddedSamplersEXT = 0;
PFN_vkGetBufferOpaqueCaptureDescriptorDataEXT vkGetBufferOpaqueCaptureDescriptorDataEXT = 0;
PFN_vkGetImageOpaqueCaptureDescriptorDataEXT vkGetImageOpaqueCaptureDescriptorDataEXT = 0;
PFN_vkGetImageViewOpaqueCaptureDescriptorDataEXT vkGetImageViewOpaqueCaptureDescriptorDataEXT = 0;
PFN_vkGetSamplerOpaqueCaptureDescriptorDataEXT vkGetSamplerOpaqueCaptureDescriptorDataEXT = 0;
PFN_vkGetAccelerationStructureOpaqueCaptureDescriptorDataEXT vkGetAccelerationStructureOpaqueCaptureDescriptorDataEXT = 0;
2021-07-21 16:15:31 +00:00
2023-03-01 09:17:02 +00:00
//=== VK_NV_fragment_shading_rate_enums ===
PFN_vkCmdSetFragmentShadingRateEnumNV vkCmdSetFragmentShadingRateEnumNV = 0;
2021-07-21 16:15:31 +00:00
2023-03-01 09:17:02 +00:00
//=== VK_EXT_mesh_shader ===
PFN_vkCmdDrawMeshTasksEXT vkCmdDrawMeshTasksEXT = 0;
PFN_vkCmdDrawMeshTasksIndirectEXT vkCmdDrawMeshTasksIndirectEXT = 0;
PFN_vkCmdDrawMeshTasksIndirectCountEXT vkCmdDrawMeshTasksIndirectCountEXT = 0;
2021-07-21 16:15:31 +00:00
2023-03-01 09:17:02 +00:00
//=== VK_KHR_copy_commands2 ===
PFN_vkCmdCopyBuffer2KHR vkCmdCopyBuffer2KHR = 0;
PFN_vkCmdCopyImage2KHR vkCmdCopyImage2KHR = 0;
PFN_vkCmdCopyBufferToImage2KHR vkCmdCopyBufferToImage2KHR = 0;
PFN_vkCmdCopyImageToBuffer2KHR vkCmdCopyImageToBuffer2KHR = 0;
PFN_vkCmdBlitImage2KHR vkCmdBlitImage2KHR = 0;
PFN_vkCmdResolveImage2KHR vkCmdResolveImage2KHR = 0;
2023-03-01 09:17:02 +00:00
//=== VK_EXT_device_fault ===
PFN_vkGetDeviceFaultInfoEXT vkGetDeviceFaultInfoEXT = 0;
2023-03-01 09:17:02 +00:00
//=== VK_EXT_vertex_input_dynamic_state ===
PFN_vkCmdSetVertexInputEXT vkCmdSetVertexInputEXT = 0;
2023-03-01 09:17:02 +00:00
# if defined( VK_USE_PLATFORM_FUCHSIA )
//=== VK_FUCHSIA_external_memory ===
PFN_vkGetMemoryZirconHandleFUCHSIA vkGetMemoryZirconHandleFUCHSIA = 0;
PFN_vkGetMemoryZirconHandlePropertiesFUCHSIA vkGetMemoryZirconHandlePropertiesFUCHSIA = 0;
# else
PFN_dummy vkGetMemoryZirconHandleFUCHSIA_placeholder = 0;
PFN_dummy vkGetMemoryZirconHandlePropertiesFUCHSIA_placeholder = 0;
2023-03-01 09:17:02 +00:00
# endif /*VK_USE_PLATFORM_FUCHSIA*/
# if defined( VK_USE_PLATFORM_FUCHSIA )
//=== VK_FUCHSIA_external_semaphore ===
PFN_vkImportSemaphoreZirconHandleFUCHSIA vkImportSemaphoreZirconHandleFUCHSIA = 0;
PFN_vkGetSemaphoreZirconHandleFUCHSIA vkGetSemaphoreZirconHandleFUCHSIA = 0;
2023-03-01 09:17:02 +00:00
# else
PFN_dummy vkImportSemaphoreZirconHandleFUCHSIA_placeholder = 0;
PFN_dummy vkGetSemaphoreZirconHandleFUCHSIA_placeholder = 0;
2023-03-01 09:17:02 +00:00
# endif /*VK_USE_PLATFORM_FUCHSIA*/
2021-07-21 16:15:31 +00:00
2023-03-01 09:17:02 +00:00
# if defined( VK_USE_PLATFORM_FUCHSIA )
//=== VK_FUCHSIA_buffer_collection ===
PFN_vkCreateBufferCollectionFUCHSIA vkCreateBufferCollectionFUCHSIA = 0;
PFN_vkSetBufferCollectionImageConstraintsFUCHSIA vkSetBufferCollectionImageConstraintsFUCHSIA = 0;
PFN_vkSetBufferCollectionBufferConstraintsFUCHSIA vkSetBufferCollectionBufferConstraintsFUCHSIA = 0;
PFN_vkDestroyBufferCollectionFUCHSIA vkDestroyBufferCollectionFUCHSIA = 0;
PFN_vkGetBufferCollectionPropertiesFUCHSIA vkGetBufferCollectionPropertiesFUCHSIA = 0;
2023-03-01 09:17:02 +00:00
# else
PFN_dummy vkCreateBufferCollectionFUCHSIA_placeholder = 0;
PFN_dummy vkSetBufferCollectionImageConstraintsFUCHSIA_placeholder = 0;
PFN_dummy vkSetBufferCollectionBufferConstraintsFUCHSIA_placeholder = 0;
PFN_dummy vkDestroyBufferCollectionFUCHSIA_placeholder = 0;
PFN_dummy vkGetBufferCollectionPropertiesFUCHSIA_placeholder = 0;
2023-03-01 09:17:02 +00:00
# endif /*VK_USE_PLATFORM_FUCHSIA*/
//=== VK_HUAWEI_subpass_shading ===
PFN_vkGetDeviceSubpassShadingMaxWorkgroupSizeHUAWEI vkGetDeviceSubpassShadingMaxWorkgroupSizeHUAWEI = 0;
PFN_vkCmdSubpassShadingHUAWEI vkCmdSubpassShadingHUAWEI = 0;
2023-03-01 09:17:02 +00:00
//=== VK_HUAWEI_invocation_mask ===
PFN_vkCmdBindInvocationMaskHUAWEI vkCmdBindInvocationMaskHUAWEI = 0;
2021-07-21 16:15:31 +00:00
//=== VK_NV_external_memory_rdma ===
PFN_vkGetMemoryRemoteAddressNV vkGetMemoryRemoteAddressNV = 0;
2021-07-21 16:15:31 +00:00
//=== VK_EXT_pipeline_properties ===
PFN_vkGetPipelinePropertiesEXT vkGetPipelinePropertiesEXT = 0;
2021-07-21 16:15:31 +00:00
//=== VK_EXT_extended_dynamic_state2 ===
PFN_vkCmdSetPatchControlPointsEXT vkCmdSetPatchControlPointsEXT = 0;
PFN_vkCmdSetRasterizerDiscardEnableEXT vkCmdSetRasterizerDiscardEnableEXT = 0;
PFN_vkCmdSetDepthBiasEnableEXT vkCmdSetDepthBiasEnableEXT = 0;
PFN_vkCmdSetLogicOpEXT vkCmdSetLogicOpEXT = 0;
PFN_vkCmdSetPrimitiveRestartEnableEXT vkCmdSetPrimitiveRestartEnableEXT = 0;
2023-03-01 09:17:02 +00:00
//=== VK_EXT_color_write_enable ===
PFN_vkCmdSetColorWriteEnableEXT vkCmdSetColorWriteEnableEXT = 0;
2023-03-01 09:17:02 +00:00
//=== VK_KHR_ray_tracing_maintenance1 ===
PFN_vkCmdTraceRaysIndirect2KHR vkCmdTraceRaysIndirect2KHR = 0;
2023-03-01 09:17:02 +00:00
//=== VK_EXT_multi_draw ===
PFN_vkCmdDrawMultiEXT vkCmdDrawMultiEXT = 0;
PFN_vkCmdDrawMultiIndexedEXT vkCmdDrawMultiIndexedEXT = 0;
2023-03-01 09:17:02 +00:00
//=== VK_EXT_opacity_micromap ===
PFN_vkCreateMicromapEXT vkCreateMicromapEXT = 0;
PFN_vkDestroyMicromapEXT vkDestroyMicromapEXT = 0;
PFN_vkCmdBuildMicromapsEXT vkCmdBuildMicromapsEXT = 0;
PFN_vkBuildMicromapsEXT vkBuildMicromapsEXT = 0;
PFN_vkCopyMicromapEXT vkCopyMicromapEXT = 0;
PFN_vkCopyMicromapToMemoryEXT vkCopyMicromapToMemoryEXT = 0;
PFN_vkCopyMemoryToMicromapEXT vkCopyMemoryToMicromapEXT = 0;
PFN_vkWriteMicromapsPropertiesEXT vkWriteMicromapsPropertiesEXT = 0;
PFN_vkCmdCopyMicromapEXT vkCmdCopyMicromapEXT = 0;
PFN_vkCmdCopyMicromapToMemoryEXT vkCmdCopyMicromapToMemoryEXT = 0;
PFN_vkCmdCopyMemoryToMicromapEXT vkCmdCopyMemoryToMicromapEXT = 0;
PFN_vkCmdWriteMicromapsPropertiesEXT vkCmdWriteMicromapsPropertiesEXT = 0;
PFN_vkGetDeviceMicromapCompatibilityEXT vkGetDeviceMicromapCompatibilityEXT = 0;
PFN_vkGetMicromapBuildSizesEXT vkGetMicromapBuildSizesEXT = 0;
2023-03-01 09:17:02 +00:00
//=== VK_HUAWEI_cluster_culling_shader ===
PFN_vkCmdDrawClusterHUAWEI vkCmdDrawClusterHUAWEI = 0;
PFN_vkCmdDrawClusterIndirectHUAWEI vkCmdDrawClusterIndirectHUAWEI = 0;
2023-03-01 09:17:02 +00:00
//=== VK_EXT_pageable_device_local_memory ===
PFN_vkSetDeviceMemoryPriorityEXT vkSetDeviceMemoryPriorityEXT = 0;
2023-03-01 09:17:02 +00:00
//=== VK_KHR_maintenance4 ===
PFN_vkGetDeviceBufferMemoryRequirementsKHR vkGetDeviceBufferMemoryRequirementsKHR = 0;
PFN_vkGetDeviceImageMemoryRequirementsKHR vkGetDeviceImageMemoryRequirementsKHR = 0;
PFN_vkGetDeviceImageSparseMemoryRequirementsKHR vkGetDeviceImageSparseMemoryRequirementsKHR = 0;
2023-03-01 09:17:02 +00:00
//=== VK_VALVE_descriptor_set_host_mapping ===
PFN_vkGetDescriptorSetLayoutHostMappingInfoVALVE vkGetDescriptorSetLayoutHostMappingInfoVALVE = 0;
PFN_vkGetDescriptorSetHostMappingVALVE vkGetDescriptorSetHostMappingVALVE = 0;
2023-03-01 09:17:02 +00:00
//=== VK_NV_copy_memory_indirect ===
PFN_vkCmdCopyMemoryIndirectNV vkCmdCopyMemoryIndirectNV = 0;
PFN_vkCmdCopyMemoryToImageIndirectNV vkCmdCopyMemoryToImageIndirectNV = 0;
2021-07-21 16:15:31 +00:00
//=== VK_NV_memory_decompression ===
PFN_vkCmdDecompressMemoryNV vkCmdDecompressMemoryNV = 0;
PFN_vkCmdDecompressMemoryIndirectCountNV vkCmdDecompressMemoryIndirectCountNV = 0;
2022-11-07 00:12:52 +00:00
//=== VK_NV_device_generated_commands_compute ===
PFN_vkGetPipelineIndirectMemoryRequirementsNV vkGetPipelineIndirectMemoryRequirementsNV = 0;
PFN_vkCmdUpdatePipelineIndirectBufferNV vkCmdUpdatePipelineIndirectBufferNV = 0;
PFN_vkGetPipelineIndirectDeviceAddressNV vkGetPipelineIndirectDeviceAddressNV = 0;
//=== VK_EXT_extended_dynamic_state3 ===
PFN_vkCmdSetDepthClampEnableEXT vkCmdSetDepthClampEnableEXT = 0;
PFN_vkCmdSetPolygonModeEXT vkCmdSetPolygonModeEXT = 0;
PFN_vkCmdSetRasterizationSamplesEXT vkCmdSetRasterizationSamplesEXT = 0;
PFN_vkCmdSetSampleMaskEXT vkCmdSetSampleMaskEXT = 0;
PFN_vkCmdSetAlphaToCoverageEnableEXT vkCmdSetAlphaToCoverageEnableEXT = 0;
PFN_vkCmdSetAlphaToOneEnableEXT vkCmdSetAlphaToOneEnableEXT = 0;
PFN_vkCmdSetLogicOpEnableEXT vkCmdSetLogicOpEnableEXT = 0;
PFN_vkCmdSetColorBlendEnableEXT vkCmdSetColorBlendEnableEXT = 0;
PFN_vkCmdSetColorBlendEquationEXT vkCmdSetColorBlendEquationEXT = 0;
PFN_vkCmdSetColorWriteMaskEXT vkCmdSetColorWriteMaskEXT = 0;
PFN_vkCmdSetTessellationDomainOriginEXT vkCmdSetTessellationDomainOriginEXT = 0;
PFN_vkCmdSetRasterizationStreamEXT vkCmdSetRasterizationStreamEXT = 0;
PFN_vkCmdSetConservativeRasterizationModeEXT vkCmdSetConservativeRasterizationModeEXT = 0;
PFN_vkCmdSetExtraPrimitiveOverestimationSizeEXT vkCmdSetExtraPrimitiveOverestimationSizeEXT = 0;
PFN_vkCmdSetDepthClipEnableEXT vkCmdSetDepthClipEnableEXT = 0;
PFN_vkCmdSetSampleLocationsEnableEXT vkCmdSetSampleLocationsEnableEXT = 0;
PFN_vkCmdSetColorBlendAdvancedEXT vkCmdSetColorBlendAdvancedEXT = 0;
PFN_vkCmdSetProvokingVertexModeEXT vkCmdSetProvokingVertexModeEXT = 0;
PFN_vkCmdSetLineRasterizationModeEXT vkCmdSetLineRasterizationModeEXT = 0;
PFN_vkCmdSetLineStippleEnableEXT vkCmdSetLineStippleEnableEXT = 0;
PFN_vkCmdSetDepthClipNegativeOneToOneEXT vkCmdSetDepthClipNegativeOneToOneEXT = 0;
PFN_vkCmdSetViewportWScalingEnableNV vkCmdSetViewportWScalingEnableNV = 0;
PFN_vkCmdSetViewportSwizzleNV vkCmdSetViewportSwizzleNV = 0;
PFN_vkCmdSetCoverageToColorEnableNV vkCmdSetCoverageToColorEnableNV = 0;
PFN_vkCmdSetCoverageToColorLocationNV vkCmdSetCoverageToColorLocationNV = 0;
PFN_vkCmdSetCoverageModulationModeNV vkCmdSetCoverageModulationModeNV = 0;
PFN_vkCmdSetCoverageModulationTableEnableNV vkCmdSetCoverageModulationTableEnableNV = 0;
PFN_vkCmdSetCoverageModulationTableNV vkCmdSetCoverageModulationTableNV = 0;
PFN_vkCmdSetShadingRateImageEnableNV vkCmdSetShadingRateImageEnableNV = 0;
PFN_vkCmdSetRepresentativeFragmentTestEnableNV vkCmdSetRepresentativeFragmentTestEnableNV = 0;
PFN_vkCmdSetCoverageReductionModeNV vkCmdSetCoverageReductionModeNV = 0;
2021-07-21 16:15:31 +00:00
//=== VK_EXT_shader_module_identifier ===
PFN_vkGetShaderModuleIdentifierEXT vkGetShaderModuleIdentifierEXT = 0;
PFN_vkGetShaderModuleCreateInfoIdentifierEXT vkGetShaderModuleCreateInfoIdentifierEXT = 0;
2021-07-21 16:15:31 +00:00
//=== VK_NV_optical_flow ===
PFN_vkCreateOpticalFlowSessionNV vkCreateOpticalFlowSessionNV = 0;
PFN_vkDestroyOpticalFlowSessionNV vkDestroyOpticalFlowSessionNV = 0;
PFN_vkBindOpticalFlowSessionImageNV vkBindOpticalFlowSessionImageNV = 0;
PFN_vkCmdOpticalFlowExecuteNV vkCmdOpticalFlowExecuteNV = 0;
2022-03-16 08:09:01 +00:00
//=== VK_KHR_maintenance5 ===
PFN_vkCmdBindIndexBuffer2KHR vkCmdBindIndexBuffer2KHR = 0;
PFN_vkGetRenderingAreaGranularityKHR vkGetRenderingAreaGranularityKHR = 0;
PFN_vkGetDeviceImageSubresourceLayoutKHR vkGetDeviceImageSubresourceLayoutKHR = 0;
PFN_vkGetImageSubresourceLayout2KHR vkGetImageSubresourceLayout2KHR = 0;
//=== VK_AMD_anti_lag ===
PFN_vkAntiLagUpdateAMD vkAntiLagUpdateAMD = 0;
//=== VK_EXT_shader_object ===
PFN_vkCreateShadersEXT vkCreateShadersEXT = 0;
PFN_vkDestroyShaderEXT vkDestroyShaderEXT = 0;
PFN_vkGetShaderBinaryDataEXT vkGetShaderBinaryDataEXT = 0;
PFN_vkCmdBindShadersEXT vkCmdBindShadersEXT = 0;
PFN_vkCmdSetDepthClampRangeEXT vkCmdSetDepthClampRangeEXT = 0;
//=== VK_KHR_pipeline_binary ===
PFN_vkCreatePipelineBinariesKHR vkCreatePipelineBinariesKHR = 0;
PFN_vkDestroyPipelineBinaryKHR vkDestroyPipelineBinaryKHR = 0;
PFN_vkGetPipelineKeyKHR vkGetPipelineKeyKHR = 0;
PFN_vkGetPipelineBinaryDataKHR vkGetPipelineBinaryDataKHR = 0;
PFN_vkReleaseCapturedPipelineDataKHR vkReleaseCapturedPipelineDataKHR = 0;
//=== VK_QCOM_tile_properties ===
PFN_vkGetFramebufferTilePropertiesQCOM vkGetFramebufferTilePropertiesQCOM = 0;
PFN_vkGetDynamicRenderingTilePropertiesQCOM vkGetDynamicRenderingTilePropertiesQCOM = 0;
//=== VK_NV_low_latency2 ===
PFN_vkSetLatencySleepModeNV vkSetLatencySleepModeNV = 0;
PFN_vkLatencySleepNV vkLatencySleepNV = 0;
PFN_vkSetLatencyMarkerNV vkSetLatencyMarkerNV = 0;
PFN_vkGetLatencyTimingsNV vkGetLatencyTimingsNV = 0;
PFN_vkQueueNotifyOutOfBandNV vkQueueNotifyOutOfBandNV = 0;
//=== VK_EXT_attachment_feedback_loop_dynamic_state ===
PFN_vkCmdSetAttachmentFeedbackLoopEnableEXT vkCmdSetAttachmentFeedbackLoopEnableEXT = 0;
# if defined( VK_USE_PLATFORM_SCREEN_QNX )
//=== VK_QNX_external_memory_screen_buffer ===
PFN_vkGetScreenBufferPropertiesQNX vkGetScreenBufferPropertiesQNX = 0;
# else
PFN_dummy vkGetScreenBufferPropertiesQNX_placeholder = 0;
# endif /*VK_USE_PLATFORM_SCREEN_QNX*/
//=== VK_KHR_line_rasterization ===
PFN_vkCmdSetLineStippleKHR vkCmdSetLineStippleKHR = 0;
//=== VK_KHR_calibrated_timestamps ===
PFN_vkGetCalibratedTimestampsKHR vkGetCalibratedTimestampsKHR = 0;
//=== VK_KHR_maintenance6 ===
PFN_vkCmdBindDescriptorSets2KHR vkCmdBindDescriptorSets2KHR = 0;
PFN_vkCmdPushConstants2KHR vkCmdPushConstants2KHR = 0;
PFN_vkCmdPushDescriptorSet2KHR vkCmdPushDescriptorSet2KHR = 0;
PFN_vkCmdPushDescriptorSetWithTemplate2KHR vkCmdPushDescriptorSetWithTemplate2KHR = 0;
PFN_vkCmdSetDescriptorBufferOffsets2EXT vkCmdSetDescriptorBufferOffsets2EXT = 0;
PFN_vkCmdBindDescriptorBufferEmbeddedSamplers2EXT vkCmdBindDescriptorBufferEmbeddedSamplers2EXT = 0;
2024-10-05 00:12:51 +00:00
//=== VK_EXT_device_generated_commands ===
PFN_vkGetGeneratedCommandsMemoryRequirementsEXT vkGetGeneratedCommandsMemoryRequirementsEXT = 0;
PFN_vkCmdPreprocessGeneratedCommandsEXT vkCmdPreprocessGeneratedCommandsEXT = 0;
PFN_vkCmdExecuteGeneratedCommandsEXT vkCmdExecuteGeneratedCommandsEXT = 0;
PFN_vkCreateIndirectCommandsLayoutEXT vkCreateIndirectCommandsLayoutEXT = 0;
PFN_vkDestroyIndirectCommandsLayoutEXT vkDestroyIndirectCommandsLayoutEXT = 0;
PFN_vkCreateIndirectExecutionSetEXT vkCreateIndirectExecutionSetEXT = 0;
PFN_vkDestroyIndirectExecutionSetEXT vkDestroyIndirectExecutionSetEXT = 0;
PFN_vkUpdateIndirectExecutionSetPipelineEXT vkUpdateIndirectExecutionSetPipelineEXT = 0;
PFN_vkUpdateIndirectExecutionSetShaderEXT vkUpdateIndirectExecutionSetShaderEXT = 0;
};
} // namespace detail
//========================================
//=== RAII HANDLE forward declarations ===
//========================================
//=== VK_VERSION_1_0 ===
class Instance;
class PhysicalDevice;
class Device;
class Queue;
class DeviceMemory;
class Fence;
class Semaphore;
class Event;
class QueryPool;
class Buffer;
class BufferView;
class Image;
class ImageView;
class ShaderModule;
class PipelineCache;
class Pipeline;
class PipelineLayout;
class Sampler;
class DescriptorPool;
class DescriptorSet;
class DescriptorSetLayout;
class Framebuffer;
class RenderPass;
class CommandPool;
class CommandBuffer;
//=== VK_VERSION_1_1 ===
class SamplerYcbcrConversion;
class DescriptorUpdateTemplate;
2022-01-26 00:42:08 +00:00
//=== VK_VERSION_1_3 ===
class PrivateDataSlot;
//=== VK_KHR_surface ===
class SurfaceKHR;
//=== VK_KHR_swapchain ===
class SwapchainKHR;
//=== VK_KHR_display ===
class DisplayKHR;
class DisplayModeKHR;
//=== VK_EXT_debug_report ===
class DebugReportCallbackEXT;
//=== VK_KHR_video_queue ===
class VideoSessionKHR;
class VideoSessionParametersKHR;
//=== VK_NVX_binary_import ===
class CuModuleNVX;
class CuFunctionNVX;
//=== VK_EXT_debug_utils ===
class DebugUtilsMessengerEXT;
//=== VK_KHR_acceleration_structure ===
class AccelerationStructureKHR;
//=== VK_EXT_validation_cache ===
class ValidationCacheEXT;
//=== VK_NV_ray_tracing ===
class AccelerationStructureNV;
//=== VK_INTEL_performance_query ===
class PerformanceConfigurationINTEL;
//=== VK_KHR_deferred_host_operations ===
class DeferredOperationKHR;
//=== VK_NV_device_generated_commands ===
class IndirectCommandsLayoutNV;
# if defined( VK_ENABLE_BETA_EXTENSIONS )
//=== VK_NV_cuda_kernel_launch ===
class CudaModuleNV;
class CudaFunctionNV;
# endif /*VK_ENABLE_BETA_EXTENSIONS*/
# if defined( VK_USE_PLATFORM_FUCHSIA )
//=== VK_FUCHSIA_buffer_collection ===
class BufferCollectionFUCHSIA;
# endif /*VK_USE_PLATFORM_FUCHSIA*/
2022-09-30 00:20:29 +00:00
//=== VK_EXT_opacity_micromap ===
class MicromapEXT;
//=== VK_NV_optical_flow ===
class OpticalFlowSessionNV;
//=== VK_EXT_shader_object ===
class ShaderEXT;
//=== VK_KHR_pipeline_binary ===
class PipelineBinaryKHR;
2024-10-05 00:12:51 +00:00
//=== VK_EXT_device_generated_commands ===
class IndirectCommandsLayoutEXT;
class IndirectExecutionSetEXT;
2021-07-13 07:01:55 +00:00
//====================
//=== RAII HANDLES ===
//====================
class Context
{
public:
# if VULKAN_HPP_ENABLE_DYNAMIC_LOADER_TOOL
Context()
: m_dispatcher( new VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::ContextDispatcher(
m_dynamicLoader.getProcAddress<PFN_vkGetInstanceProcAddr>( "vkGetInstanceProcAddr" ) ) )
# else
Context( PFN_vkGetInstanceProcAddr getInstanceProcAddr )
: m_dispatcher( new VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::ContextDispatcher( getInstanceProcAddr ) )
# endif
2022-03-09 10:20:05 +00:00
{
}
~Context() = default;
Context( Context const & ) = delete;
Context( Context && rhs ) VULKAN_HPP_NOEXCEPT
# if VULKAN_HPP_ENABLE_DYNAMIC_LOADER_TOOL
: m_dynamicLoader( std::move( rhs.m_dynamicLoader ) )
, m_dispatcher( rhs.m_dispatcher.release() )
# else
: m_dispatcher( rhs.m_dispatcher.release() )
# endif
2022-03-09 10:20:05 +00:00
{
}
Context & operator=( Context const & ) = delete;
2022-12-06 00:11:19 +00:00
Context & operator=( Context && rhs ) VULKAN_HPP_NOEXCEPT
{
if ( this != &rhs )
{
# if VULKAN_HPP_ENABLE_DYNAMIC_LOADER_TOOL
m_dynamicLoader = std::move( rhs.m_dynamicLoader );
# endif
m_dispatcher.reset( rhs.m_dispatcher.release() );
}
return *this;
}
VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::ContextDispatcher const * getDispatcher() const
{
VULKAN_HPP_ASSERT( m_dispatcher->getVkHeaderVersion() == VK_HEADER_VERSION );
return &*m_dispatcher;
}
void swap( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Context & rhs )
{
# if VULKAN_HPP_ENABLE_DYNAMIC_LOADER_TOOL
std::swap( m_dynamicLoader, rhs.m_dynamicLoader );
# endif
m_dispatcher.swap( rhs.m_dispatcher );
}
//=== VK_VERSION_1_0 ===
VULKAN_HPP_NODISCARD
VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::CreateReturnType<VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Instance>::Type
createInstance( VULKAN_HPP_NAMESPACE::InstanceCreateInfo const & createInfo,
VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr ) const
VULKAN_HPP_RAII_CREATE_NOEXCEPT;
VULKAN_HPP_NODISCARD std::vector<VULKAN_HPP_NAMESPACE::ExtensionProperties>
enumerateInstanceExtensionProperties( Optional<const std::string> layerName VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT ) const;
VULKAN_HPP_NODISCARD std::vector<VULKAN_HPP_NAMESPACE::LayerProperties> enumerateInstanceLayerProperties() const;
//=== VK_VERSION_1_1 ===
VULKAN_HPP_NODISCARD uint32_t enumerateInstanceVersion() const;
private:
# if VULKAN_HPP_ENABLE_DYNAMIC_LOADER_TOOL
VULKAN_HPP_NAMESPACE::detail::DynamicLoader m_dynamicLoader;
# endif
std::unique_ptr<VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::ContextDispatcher> m_dispatcher;
};
class Instance
{
public:
using CType = VkInstance;
using CppType = VULKAN_HPP_NAMESPACE::Instance;
static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = VULKAN_HPP_NAMESPACE::ObjectType::eInstance;
static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType =
VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eInstance;
public:
# if !defined( VULKAN_HPP_RAII_NO_EXCEPTIONS )
Instance( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Context const & context,
VULKAN_HPP_NAMESPACE::InstanceCreateInfo const & createInfo,
VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr )
{
*this = context.createInstance( createInfo, allocator );
}
# endif
Instance( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Context const & context,
VkInstance instance,
VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr )
: m_instance( instance ), m_allocator( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) )
{
m_dispatcher.reset( new VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::InstanceDispatcher( context.getDispatcher()->vkGetInstanceProcAddr,
static_cast<VkInstance>( m_instance ) ) );
}
Instance( std::nullptr_t ) {}
~Instance()
{
clear();
}
Instance() = delete;
Instance( Instance const & ) = delete;
Instance( Instance && rhs ) VULKAN_HPP_NOEXCEPT
: m_instance( VULKAN_HPP_NAMESPACE::exchange( rhs.m_instance, {} ) )
, m_allocator( VULKAN_HPP_NAMESPACE::exchange( rhs.m_allocator, {} ) )
, m_dispatcher( rhs.m_dispatcher.release() )
2022-03-09 10:20:05 +00:00
{
}
Instance & operator=( Instance const & ) = delete;
2022-12-06 00:11:19 +00:00
Instance & operator=( Instance && rhs ) VULKAN_HPP_NOEXCEPT
{
if ( this != &rhs )
{
std::swap( m_instance, rhs.m_instance );
std::swap( m_allocator, rhs.m_allocator );
std::swap( m_dispatcher, rhs.m_dispatcher );
}
return *this;
}
VULKAN_HPP_NAMESPACE::Instance const & operator*() const VULKAN_HPP_NOEXCEPT
{
return m_instance;
}
operator VULKAN_HPP_NAMESPACE::Instance() const VULKAN_HPP_NOEXCEPT
{
return m_instance;
}
void clear() VULKAN_HPP_NOEXCEPT
{
if ( m_instance )
{
getDispatcher()->vkDestroyInstance( static_cast<VkInstance>( m_instance ), reinterpret_cast<const VkAllocationCallbacks *>( m_allocator ) );
}
m_instance = nullptr;
m_allocator = nullptr;
m_dispatcher = nullptr;
}
VULKAN_HPP_NAMESPACE::Instance release()
{
m_allocator = nullptr;
m_dispatcher = nullptr;
return VULKAN_HPP_NAMESPACE::exchange( m_instance, nullptr );
}
VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::InstanceDispatcher const * getDispatcher() const
{
VULKAN_HPP_ASSERT( m_dispatcher->getVkHeaderVersion() == VK_HEADER_VERSION );
return &*m_dispatcher;
}
void swap( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Instance & rhs ) VULKAN_HPP_NOEXCEPT
{
std::swap( m_instance, rhs.m_instance );
std::swap( m_allocator, rhs.m_allocator );
std::swap( m_dispatcher, rhs.m_dispatcher );
}
//=== VK_VERSION_1_0 ===
VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::CreateReturnType<
std::vector<VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::PhysicalDevice>>::Type
enumeratePhysicalDevices() const;
VULKAN_HPP_NODISCARD PFN_vkVoidFunction getProcAddr( const std::string & name ) const VULKAN_HPP_NOEXCEPT;
//=== VK_VERSION_1_1 ===
VULKAN_HPP_NODISCARD std::vector<VULKAN_HPP_NAMESPACE::PhysicalDeviceGroupProperties> enumeratePhysicalDeviceGroups() const;
//=== VK_KHR_display ===
VULKAN_HPP_NODISCARD
VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::CreateReturnType<VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::SurfaceKHR>::Type
createDisplayPlaneSurfaceKHR( VULKAN_HPP_NAMESPACE::DisplaySurfaceCreateInfoKHR const & createInfo,
VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr ) const
VULKAN_HPP_RAII_CREATE_NOEXCEPT;
# if defined( VK_USE_PLATFORM_XLIB_KHR )
//=== VK_KHR_xlib_surface ===
VULKAN_HPP_NODISCARD
VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::CreateReturnType<VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::SurfaceKHR>::Type
createXlibSurfaceKHR( VULKAN_HPP_NAMESPACE::XlibSurfaceCreateInfoKHR const & createInfo,
VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr ) const
VULKAN_HPP_RAII_CREATE_NOEXCEPT;
# endif /*VK_USE_PLATFORM_XLIB_KHR*/
# if defined( VK_USE_PLATFORM_XCB_KHR )
//=== VK_KHR_xcb_surface ===
VULKAN_HPP_NODISCARD
VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::CreateReturnType<VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::SurfaceKHR>::Type
createXcbSurfaceKHR( VULKAN_HPP_NAMESPACE::XcbSurfaceCreateInfoKHR const & createInfo,
VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr ) const
VULKAN_HPP_RAII_CREATE_NOEXCEPT;
# endif /*VK_USE_PLATFORM_XCB_KHR*/
# if defined( VK_USE_PLATFORM_WAYLAND_KHR )
//=== VK_KHR_wayland_surface ===
VULKAN_HPP_NODISCARD
VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::CreateReturnType<VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::SurfaceKHR>::Type
createWaylandSurfaceKHR( VULKAN_HPP_NAMESPACE::WaylandSurfaceCreateInfoKHR const & createInfo,
VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr ) const
VULKAN_HPP_RAII_CREATE_NOEXCEPT;
# endif /*VK_USE_PLATFORM_WAYLAND_KHR*/
# if defined( VK_USE_PLATFORM_ANDROID_KHR )
//=== VK_KHR_android_surface ===
VULKAN_HPP_NODISCARD
VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::CreateReturnType<VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::SurfaceKHR>::Type
createAndroidSurfaceKHR( VULKAN_HPP_NAMESPACE::AndroidSurfaceCreateInfoKHR const & createInfo,
VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr ) const
VULKAN_HPP_RAII_CREATE_NOEXCEPT;
# endif /*VK_USE_PLATFORM_ANDROID_KHR*/
# if defined( VK_USE_PLATFORM_WIN32_KHR )
//=== VK_KHR_win32_surface ===
VULKAN_HPP_NODISCARD
VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::CreateReturnType<VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::SurfaceKHR>::Type
createWin32SurfaceKHR( VULKAN_HPP_NAMESPACE::Win32SurfaceCreateInfoKHR const & createInfo,
VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr ) const
VULKAN_HPP_RAII_CREATE_NOEXCEPT;
# endif /*VK_USE_PLATFORM_WIN32_KHR*/
//=== VK_EXT_debug_report ===
VULKAN_HPP_NODISCARD
VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::CreateReturnType<VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::DebugReportCallbackEXT>::Type
createDebugReportCallbackEXT( VULKAN_HPP_NAMESPACE::DebugReportCallbackCreateInfoEXT const & createInfo,
VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr ) const
VULKAN_HPP_RAII_CREATE_NOEXCEPT;
void debugReportMessageEXT( VULKAN_HPP_NAMESPACE::DebugReportFlagsEXT flags,
VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT objectType_,
uint64_t object,
size_t location,
int32_t messageCode,
const std::string & layerPrefix,
const std::string & message ) const VULKAN_HPP_NOEXCEPT;
# if defined( VK_USE_PLATFORM_GGP )
//=== VK_GGP_stream_descriptor_surface ===
VULKAN_HPP_NODISCARD
VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::CreateReturnType<VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::SurfaceKHR>::Type
createStreamDescriptorSurfaceGGP( VULKAN_HPP_NAMESPACE::StreamDescriptorSurfaceCreateInfoGGP const & createInfo,
VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr ) const
VULKAN_HPP_RAII_CREATE_NOEXCEPT;
# endif /*VK_USE_PLATFORM_GGP*/
# if defined( VK_USE_PLATFORM_VI_NN )
//=== VK_NN_vi_surface ===
VULKAN_HPP_NODISCARD
VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::CreateReturnType<VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::SurfaceKHR>::Type
createViSurfaceNN( VULKAN_HPP_NAMESPACE::ViSurfaceCreateInfoNN const & createInfo,
VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr ) const
VULKAN_HPP_RAII_CREATE_NOEXCEPT;
# endif /*VK_USE_PLATFORM_VI_NN*/
//=== VK_KHR_device_group_creation ===
VULKAN_HPP_NODISCARD std::vector<VULKAN_HPP_NAMESPACE::PhysicalDeviceGroupProperties> enumeratePhysicalDeviceGroupsKHR() const;
2021-04-28 11:35:14 +00:00
# if defined( VK_USE_PLATFORM_IOS_MVK )
//=== VK_MVK_ios_surface ===
VULKAN_HPP_NODISCARD
VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::CreateReturnType<VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::SurfaceKHR>::Type
createIOSSurfaceMVK( VULKAN_HPP_NAMESPACE::IOSSurfaceCreateInfoMVK const & createInfo,
VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr ) const
VULKAN_HPP_RAII_CREATE_NOEXCEPT;
# endif /*VK_USE_PLATFORM_IOS_MVK*/
# if defined( VK_USE_PLATFORM_MACOS_MVK )
//=== VK_MVK_macos_surface ===
VULKAN_HPP_NODISCARD
VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::CreateReturnType<VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::SurfaceKHR>::Type
createMacOSSurfaceMVK( VULKAN_HPP_NAMESPACE::MacOSSurfaceCreateInfoMVK const & createInfo,
VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr ) const
VULKAN_HPP_RAII_CREATE_NOEXCEPT;
# endif /*VK_USE_PLATFORM_MACOS_MVK*/
//=== VK_EXT_debug_utils ===
VULKAN_HPP_NODISCARD
VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::CreateReturnType<VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::DebugUtilsMessengerEXT>::Type
createDebugUtilsMessengerEXT( VULKAN_HPP_NAMESPACE::DebugUtilsMessengerCreateInfoEXT const & createInfo,
VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr ) const
VULKAN_HPP_RAII_CREATE_NOEXCEPT;
void submitDebugUtilsMessageEXT( VULKAN_HPP_NAMESPACE::DebugUtilsMessageSeverityFlagBitsEXT messageSeverity,
VULKAN_HPP_NAMESPACE::DebugUtilsMessageTypeFlagsEXT messageTypes,
const VULKAN_HPP_NAMESPACE::DebugUtilsMessengerCallbackDataEXT & callbackData ) const VULKAN_HPP_NOEXCEPT;
# if defined( VK_USE_PLATFORM_FUCHSIA )
//=== VK_FUCHSIA_imagepipe_surface ===
VULKAN_HPP_NODISCARD
VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::CreateReturnType<VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::SurfaceKHR>::Type
createImagePipeSurfaceFUCHSIA( VULKAN_HPP_NAMESPACE::ImagePipeSurfaceCreateInfoFUCHSIA const & createInfo,
VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr ) const
VULKAN_HPP_RAII_CREATE_NOEXCEPT;
# endif /*VK_USE_PLATFORM_FUCHSIA*/
# if defined( VK_USE_PLATFORM_METAL_EXT )
//=== VK_EXT_metal_surface ===
VULKAN_HPP_NODISCARD
VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::CreateReturnType<VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::SurfaceKHR>::Type
createMetalSurfaceEXT( VULKAN_HPP_NAMESPACE::MetalSurfaceCreateInfoEXT const & createInfo,
VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr ) const
VULKAN_HPP_RAII_CREATE_NOEXCEPT;
# endif /*VK_USE_PLATFORM_METAL_EXT*/
//=== VK_EXT_headless_surface ===
VULKAN_HPP_NODISCARD
VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::CreateReturnType<VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::SurfaceKHR>::Type
createHeadlessSurfaceEXT( VULKAN_HPP_NAMESPACE::HeadlessSurfaceCreateInfoEXT const & createInfo,
VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr ) const
VULKAN_HPP_RAII_CREATE_NOEXCEPT;
# if defined( VK_USE_PLATFORM_DIRECTFB_EXT )
//=== VK_EXT_directfb_surface ===
VULKAN_HPP_NODISCARD
VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::CreateReturnType<VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::SurfaceKHR>::Type
createDirectFBSurfaceEXT( VULKAN_HPP_NAMESPACE::DirectFBSurfaceCreateInfoEXT const & createInfo,
VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr ) const
VULKAN_HPP_RAII_CREATE_NOEXCEPT;
# endif /*VK_USE_PLATFORM_DIRECTFB_EXT*/
# if defined( VK_USE_PLATFORM_SCREEN_QNX )
//=== VK_QNX_screen_surface ===
VULKAN_HPP_NODISCARD
VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::CreateReturnType<VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::SurfaceKHR>::Type
createScreenSurfaceQNX( VULKAN_HPP_NAMESPACE::ScreenSurfaceCreateInfoQNX const & createInfo,
VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr ) const
VULKAN_HPP_RAII_CREATE_NOEXCEPT;
# endif /*VK_USE_PLATFORM_SCREEN_QNX*/
private:
VULKAN_HPP_NAMESPACE::Instance m_instance = {};
const VULKAN_HPP_NAMESPACE::AllocationCallbacks * m_allocator = {};
std::unique_ptr<VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::InstanceDispatcher> m_dispatcher;
};
class PhysicalDevice
{
public:
using CType = VkPhysicalDevice;
using CppType = VULKAN_HPP_NAMESPACE::PhysicalDevice;
static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = VULKAN_HPP_NAMESPACE::ObjectType::ePhysicalDevice;
static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType =
VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::ePhysicalDevice;
public:
PhysicalDevice( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Instance const & instance, VkPhysicalDevice physicalDevice )
: m_physicalDevice( physicalDevice ), m_dispatcher( instance.getDispatcher() )
2022-03-09 10:20:05 +00:00
{
}
PhysicalDevice( std::nullptr_t ) {}
~PhysicalDevice()
{
clear();
}
PhysicalDevice() = delete;
PhysicalDevice( PhysicalDevice const & rhs ) : m_physicalDevice( rhs.m_physicalDevice ), m_dispatcher( rhs.m_dispatcher ) {}
PhysicalDevice( PhysicalDevice && rhs ) VULKAN_HPP_NOEXCEPT
: m_physicalDevice( VULKAN_HPP_NAMESPACE::exchange( rhs.m_physicalDevice, {} ) )
, m_dispatcher( VULKAN_HPP_NAMESPACE::exchange( rhs.m_dispatcher, nullptr ) )
2022-03-09 10:20:05 +00:00
{
}
PhysicalDevice & operator=( PhysicalDevice const & rhs )
{
m_physicalDevice = rhs.m_physicalDevice;
m_dispatcher = rhs.m_dispatcher;
return *this;
}
PhysicalDevice & operator=( PhysicalDevice && rhs ) VULKAN_HPP_NOEXCEPT
{
if ( this != &rhs )
{
std::swap( m_physicalDevice, rhs.m_physicalDevice );
std::swap( m_dispatcher, rhs.m_dispatcher );
}
return *this;
}
VULKAN_HPP_NAMESPACE::PhysicalDevice const & operator*() const VULKAN_HPP_NOEXCEPT
{
return m_physicalDevice;
}
operator VULKAN_HPP_NAMESPACE::PhysicalDevice() const VULKAN_HPP_NOEXCEPT
{
return m_physicalDevice;
}
void clear() VULKAN_HPP_NOEXCEPT
{
m_physicalDevice = nullptr;
m_dispatcher = nullptr;
}
VULKAN_HPP_NAMESPACE::PhysicalDevice release()
{
m_dispatcher = nullptr;
return VULKAN_HPP_NAMESPACE::exchange( m_physicalDevice, nullptr );
}
VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::InstanceDispatcher const * getDispatcher() const
{
VULKAN_HPP_ASSERT( m_dispatcher->getVkHeaderVersion() == VK_HEADER_VERSION );
return m_dispatcher;
}
void swap( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::PhysicalDevice & rhs ) VULKAN_HPP_NOEXCEPT
{
std::swap( m_physicalDevice, rhs.m_physicalDevice );
std::swap( m_dispatcher, rhs.m_dispatcher );
}
//=== VK_VERSION_1_0 ===
2021-04-28 11:35:14 +00:00
VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::PhysicalDeviceFeatures getFeatures() const VULKAN_HPP_NOEXCEPT;
VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::FormatProperties getFormatProperties( VULKAN_HPP_NAMESPACE::Format format ) const VULKAN_HPP_NOEXCEPT;
VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::ImageFormatProperties
getImageFormatProperties( VULKAN_HPP_NAMESPACE::Format format,
VULKAN_HPP_NAMESPACE::ImageType type,
VULKAN_HPP_NAMESPACE::ImageTiling tiling,
VULKAN_HPP_NAMESPACE::ImageUsageFlags usage,
VULKAN_HPP_NAMESPACE::ImageCreateFlags flags VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT ) const;
VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::PhysicalDeviceProperties getProperties() const VULKAN_HPP_NOEXCEPT;
2021-04-28 11:35:14 +00:00
VULKAN_HPP_NODISCARD std::vector<VULKAN_HPP_NAMESPACE::QueueFamilyProperties> getQueueFamilyProperties() const;
2021-04-28 11:35:14 +00:00
VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::PhysicalDeviceMemoryProperties getMemoryProperties() const VULKAN_HPP_NOEXCEPT;
VULKAN_HPP_NODISCARD
VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::CreateReturnType<VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device>::Type
createDevice( VULKAN_HPP_NAMESPACE::DeviceCreateInfo const & createInfo,
VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr ) const
VULKAN_HPP_RAII_CREATE_NOEXCEPT;
VULKAN_HPP_NODISCARD std::vector<VULKAN_HPP_NAMESPACE::ExtensionProperties>
enumerateDeviceExtensionProperties( Optional<const std::string> layerName VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT ) const;
VULKAN_HPP_NODISCARD std::vector<VULKAN_HPP_NAMESPACE::LayerProperties> enumerateDeviceLayerProperties() const;
2021-04-28 11:35:14 +00:00
VULKAN_HPP_NODISCARD std::vector<VULKAN_HPP_NAMESPACE::SparseImageFormatProperties>
getSparseImageFormatProperties( VULKAN_HPP_NAMESPACE::Format format,
VULKAN_HPP_NAMESPACE::ImageType type,
VULKAN_HPP_NAMESPACE::SampleCountFlagBits samples,
VULKAN_HPP_NAMESPACE::ImageUsageFlags usage,
VULKAN_HPP_NAMESPACE::ImageTiling tiling ) const;
2021-04-28 11:35:14 +00:00
//=== VK_VERSION_1_1 ===
VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::PhysicalDeviceFeatures2 getFeatures2() const VULKAN_HPP_NOEXCEPT;
template <typename X, typename Y, typename... Z>
VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::StructureChain<X, Y, Z...> getFeatures2() const VULKAN_HPP_NOEXCEPT;
VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::PhysicalDeviceProperties2 getProperties2() const VULKAN_HPP_NOEXCEPT;
template <typename X, typename Y, typename... Z>
VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::StructureChain<X, Y, Z...> getProperties2() const VULKAN_HPP_NOEXCEPT;
VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::FormatProperties2 getFormatProperties2( VULKAN_HPP_NAMESPACE::Format format ) const VULKAN_HPP_NOEXCEPT;
2021-04-28 11:35:14 +00:00
template <typename X, typename Y, typename... Z>
VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::StructureChain<X, Y, Z...>
getFormatProperties2( VULKAN_HPP_NAMESPACE::Format format ) const VULKAN_HPP_NOEXCEPT;
2021-04-28 11:35:14 +00:00
VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::ImageFormatProperties2
getImageFormatProperties2( const VULKAN_HPP_NAMESPACE::PhysicalDeviceImageFormatInfo2 & imageFormatInfo ) const;
template <typename X, typename Y, typename... Z>
VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::StructureChain<X, Y, Z...>
getImageFormatProperties2( const VULKAN_HPP_NAMESPACE::PhysicalDeviceImageFormatInfo2 & imageFormatInfo ) const;
VULKAN_HPP_NODISCARD std::vector<VULKAN_HPP_NAMESPACE::QueueFamilyProperties2> getQueueFamilyProperties2() const;
template <typename StructureChain>
VULKAN_HPP_NODISCARD std::vector<StructureChain> getQueueFamilyProperties2() const;
VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::PhysicalDeviceMemoryProperties2 getMemoryProperties2() const VULKAN_HPP_NOEXCEPT;
2021-03-02 08:06:31 +00:00
template <typename X, typename Y, typename... Z>
VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::StructureChain<X, Y, Z...> getMemoryProperties2() const VULKAN_HPP_NOEXCEPT;
VULKAN_HPP_NODISCARD std::vector<VULKAN_HPP_NAMESPACE::SparseImageFormatProperties2>
getSparseImageFormatProperties2( const VULKAN_HPP_NAMESPACE::PhysicalDeviceSparseImageFormatInfo2 & formatInfo ) const;
VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::ExternalBufferProperties
getExternalBufferProperties( const VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalBufferInfo & externalBufferInfo ) const VULKAN_HPP_NOEXCEPT;
2021-04-28 11:35:14 +00:00
VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::ExternalFenceProperties
getExternalFenceProperties( const VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalFenceInfo & externalFenceInfo ) const VULKAN_HPP_NOEXCEPT;
VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::ExternalSemaphoreProperties
getExternalSemaphoreProperties( const VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalSemaphoreInfo & externalSemaphoreInfo ) const VULKAN_HPP_NOEXCEPT;
2022-01-26 00:42:08 +00:00
//=== VK_VERSION_1_3 ===
VULKAN_HPP_NODISCARD std::vector<VULKAN_HPP_NAMESPACE::PhysicalDeviceToolProperties> getToolProperties() const;
//=== VK_KHR_surface ===
VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::Bool32 getSurfaceSupportKHR( uint32_t queueFamilyIndex, VULKAN_HPP_NAMESPACE::SurfaceKHR surface ) const;
VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::SurfaceCapabilitiesKHR getSurfaceCapabilitiesKHR( VULKAN_HPP_NAMESPACE::SurfaceKHR surface ) const;
VULKAN_HPP_NODISCARD std::vector<VULKAN_HPP_NAMESPACE::SurfaceFormatKHR>
getSurfaceFormatsKHR( VULKAN_HPP_NAMESPACE::SurfaceKHR surface VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT ) const;
VULKAN_HPP_NODISCARD std::vector<VULKAN_HPP_NAMESPACE::PresentModeKHR>
getSurfacePresentModesKHR( VULKAN_HPP_NAMESPACE::SurfaceKHR surface VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT ) const;
//=== VK_KHR_swapchain ===
VULKAN_HPP_NODISCARD std::vector<VULKAN_HPP_NAMESPACE::Rect2D> getPresentRectanglesKHR( VULKAN_HPP_NAMESPACE::SurfaceKHR surface ) const;
//=== VK_KHR_display ===
2021-04-15 08:49:54 +00:00
VULKAN_HPP_NODISCARD std::vector<VULKAN_HPP_NAMESPACE::DisplayPropertiesKHR> getDisplayPropertiesKHR() const;
2021-04-15 08:49:54 +00:00
VULKAN_HPP_NODISCARD std::vector<VULKAN_HPP_NAMESPACE::DisplayPlanePropertiesKHR> getDisplayPlanePropertiesKHR() const;
VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::CreateReturnType<
std::vector<VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::DisplayKHR>>::Type
getDisplayPlaneSupportedDisplaysKHR( uint32_t planeIndex ) const;
# if defined( VK_USE_PLATFORM_XLIB_KHR )
//=== VK_KHR_xlib_surface ===
VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::Bool32
getXlibPresentationSupportKHR( uint32_t queueFamilyIndex, Display & dpy, VisualID visualID ) const VULKAN_HPP_NOEXCEPT;
# endif /*VK_USE_PLATFORM_XLIB_KHR*/
# if defined( VK_USE_PLATFORM_XCB_KHR )
//=== VK_KHR_xcb_surface ===
VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::Bool32
getXcbPresentationSupportKHR( uint32_t queueFamilyIndex, xcb_connection_t & connection, xcb_visualid_t visual_id ) const VULKAN_HPP_NOEXCEPT;
# endif /*VK_USE_PLATFORM_XCB_KHR*/
2021-04-15 08:49:54 +00:00
2021-03-23 09:52:56 +00:00
# if defined( VK_USE_PLATFORM_WAYLAND_KHR )
//=== VK_KHR_wayland_surface ===
VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::Bool32 getWaylandPresentationSupportKHR( uint32_t queueFamilyIndex,
struct wl_display & display ) const VULKAN_HPP_NOEXCEPT;
# endif /*VK_USE_PLATFORM_WAYLAND_KHR*/
2021-03-23 09:52:56 +00:00
# if defined( VK_USE_PLATFORM_WIN32_KHR )
//=== VK_KHR_win32_surface ===
VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::Bool32 getWin32PresentationSupportKHR( uint32_t queueFamilyIndex ) const VULKAN_HPP_NOEXCEPT;
# endif /*VK_USE_PLATFORM_WIN32_KHR*/
//=== VK_KHR_video_queue ===
VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::VideoCapabilitiesKHR
2022-08-23 00:11:46 +00:00
getVideoCapabilitiesKHR( const VULKAN_HPP_NAMESPACE::VideoProfileInfoKHR & videoProfile ) const;
template <typename X, typename Y, typename... Z>
VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::StructureChain<X, Y, Z...>
getVideoCapabilitiesKHR( const VULKAN_HPP_NAMESPACE::VideoProfileInfoKHR & videoProfile ) const;
VULKAN_HPP_NODISCARD std::vector<VULKAN_HPP_NAMESPACE::VideoFormatPropertiesKHR>
getVideoFormatPropertiesKHR( const VULKAN_HPP_NAMESPACE::PhysicalDeviceVideoFormatInfoKHR & videoFormatInfo ) const;
//=== VK_NV_external_memory_capabilities ===
VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::ExternalImageFormatPropertiesNV getExternalImageFormatPropertiesNV(
VULKAN_HPP_NAMESPACE::Format format,
VULKAN_HPP_NAMESPACE::ImageType type,
VULKAN_HPP_NAMESPACE::ImageTiling tiling,
VULKAN_HPP_NAMESPACE::ImageUsageFlags usage,
VULKAN_HPP_NAMESPACE::ImageCreateFlags flags VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT,
VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagsNV externalHandleType VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT ) const;
//=== VK_KHR_get_physical_device_properties2 ===
VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::PhysicalDeviceFeatures2 getFeatures2KHR() const VULKAN_HPP_NOEXCEPT;
template <typename X, typename Y, typename... Z>
VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::StructureChain<X, Y, Z...> getFeatures2KHR() const VULKAN_HPP_NOEXCEPT;
VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::PhysicalDeviceProperties2 getProperties2KHR() const VULKAN_HPP_NOEXCEPT;
template <typename X, typename Y, typename... Z>
VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::StructureChain<X, Y, Z...> getProperties2KHR() const VULKAN_HPP_NOEXCEPT;
VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::FormatProperties2 getFormatProperties2KHR( VULKAN_HPP_NAMESPACE::Format format ) const VULKAN_HPP_NOEXCEPT;
template <typename X, typename Y, typename... Z>
VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::StructureChain<X, Y, Z...>
getFormatProperties2KHR( VULKAN_HPP_NAMESPACE::Format format ) const VULKAN_HPP_NOEXCEPT;
VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::ImageFormatProperties2
getImageFormatProperties2KHR( const VULKAN_HPP_NAMESPACE::PhysicalDeviceImageFormatInfo2 & imageFormatInfo ) const;
template <typename X, typename Y, typename... Z>
VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::StructureChain<X, Y, Z...>
getImageFormatProperties2KHR( const VULKAN_HPP_NAMESPACE::PhysicalDeviceImageFormatInfo2 & imageFormatInfo ) const;
VULKAN_HPP_NODISCARD std::vector<VULKAN_HPP_NAMESPACE::QueueFamilyProperties2> getQueueFamilyProperties2KHR() const;
template <typename StructureChain>
VULKAN_HPP_NODISCARD std::vector<StructureChain> getQueueFamilyProperties2KHR() const;
VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::PhysicalDeviceMemoryProperties2 getMemoryProperties2KHR() const VULKAN_HPP_NOEXCEPT;
template <typename X, typename Y, typename... Z>
VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::StructureChain<X, Y, Z...> getMemoryProperties2KHR() const VULKAN_HPP_NOEXCEPT;
VULKAN_HPP_NODISCARD std::vector<VULKAN_HPP_NAMESPACE::SparseImageFormatProperties2>
getSparseImageFormatProperties2KHR( const VULKAN_HPP_NAMESPACE::PhysicalDeviceSparseImageFormatInfo2 & formatInfo ) const;
//=== VK_KHR_external_memory_capabilities ===
VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::ExternalBufferProperties
getExternalBufferPropertiesKHR( const VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalBufferInfo & externalBufferInfo ) const VULKAN_HPP_NOEXCEPT;
//=== VK_KHR_external_semaphore_capabilities ===
VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::ExternalSemaphoreProperties
getExternalSemaphorePropertiesKHR( const VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalSemaphoreInfo & externalSemaphoreInfo ) const VULKAN_HPP_NOEXCEPT;
# if defined( VK_USE_PLATFORM_XLIB_XRANDR_EXT )
//=== VK_EXT_acquire_xlib_display ===
void acquireXlibDisplayEXT( Display & dpy, VULKAN_HPP_NAMESPACE::DisplayKHR display ) const;
VULKAN_HPP_NODISCARD
VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::CreateReturnType<VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::DisplayKHR>::Type
getRandROutputDisplayEXT( Display & dpy, RROutput rrOutput ) const VULKAN_HPP_RAII_CREATE_NOEXCEPT;
# endif /*VK_USE_PLATFORM_XLIB_XRANDR_EXT*/
//=== VK_EXT_display_surface_counter ===
VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::SurfaceCapabilities2EXT getSurfaceCapabilities2EXT( VULKAN_HPP_NAMESPACE::SurfaceKHR surface ) const;
//=== VK_KHR_external_fence_capabilities ===
VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::ExternalFenceProperties
getExternalFencePropertiesKHR( const VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalFenceInfo & externalFenceInfo ) const VULKAN_HPP_NOEXCEPT;
//=== VK_KHR_performance_query ===
VULKAN_HPP_NODISCARD
std::pair<std::vector<VULKAN_HPP_NAMESPACE::PerformanceCounterKHR>, std::vector<VULKAN_HPP_NAMESPACE::PerformanceCounterDescriptionKHR>>
enumerateQueueFamilyPerformanceQueryCountersKHR( uint32_t queueFamilyIndex ) const;
VULKAN_HPP_NODISCARD uint32_t getQueueFamilyPerformanceQueryPassesKHR(
const VULKAN_HPP_NAMESPACE::QueryPoolPerformanceCreateInfoKHR & performanceQueryCreateInfo ) const VULKAN_HPP_NOEXCEPT;
//=== VK_KHR_get_surface_capabilities2 ===
VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::SurfaceCapabilities2KHR
getSurfaceCapabilities2KHR( const VULKAN_HPP_NAMESPACE::PhysicalDeviceSurfaceInfo2KHR & surfaceInfo ) const;
template <typename X, typename Y, typename... Z>
VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::StructureChain<X, Y, Z...>
getSurfaceCapabilities2KHR( const VULKAN_HPP_NAMESPACE::PhysicalDeviceSurfaceInfo2KHR & surfaceInfo ) const;
VULKAN_HPP_NODISCARD std::vector<VULKAN_HPP_NAMESPACE::SurfaceFormat2KHR>
getSurfaceFormats2KHR( const VULKAN_HPP_NAMESPACE::PhysicalDeviceSurfaceInfo2KHR & surfaceInfo ) const;
template <typename StructureChain>
VULKAN_HPP_NODISCARD std::vector<StructureChain> getSurfaceFormats2KHR( const VULKAN_HPP_NAMESPACE::PhysicalDeviceSurfaceInfo2KHR & surfaceInfo ) const;
2022-05-11 00:09:28 +00:00
//=== VK_KHR_get_display_properties2 ===
VULKAN_HPP_NODISCARD std::vector<VULKAN_HPP_NAMESPACE::DisplayProperties2KHR> getDisplayProperties2KHR() const;
VULKAN_HPP_NODISCARD std::vector<VULKAN_HPP_NAMESPACE::DisplayPlaneProperties2KHR> getDisplayPlaneProperties2KHR() const;
VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::DisplayPlaneCapabilities2KHR
getDisplayPlaneCapabilities2KHR( const VULKAN_HPP_NAMESPACE::DisplayPlaneInfo2KHR & displayPlaneInfo ) const;
//=== VK_EXT_sample_locations ===
VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::MultisamplePropertiesEXT
getMultisamplePropertiesEXT( VULKAN_HPP_NAMESPACE::SampleCountFlagBits samples ) const VULKAN_HPP_NOEXCEPT;
//=== VK_EXT_calibrated_timestamps ===
VULKAN_HPP_NODISCARD std::vector<VULKAN_HPP_NAMESPACE::TimeDomainKHR> getCalibrateableTimeDomainsEXT() const;
//=== VK_KHR_fragment_shading_rate ===
VULKAN_HPP_NODISCARD std::vector<VULKAN_HPP_NAMESPACE::PhysicalDeviceFragmentShadingRateKHR> getFragmentShadingRatesKHR() const;
//=== VK_EXT_tooling_info ===
2022-01-26 00:42:08 +00:00
VULKAN_HPP_NODISCARD std::vector<VULKAN_HPP_NAMESPACE::PhysicalDeviceToolProperties> getToolPropertiesEXT() const;
//=== VK_NV_cooperative_matrix ===
VULKAN_HPP_NODISCARD std::vector<VULKAN_HPP_NAMESPACE::CooperativeMatrixPropertiesNV> getCooperativeMatrixPropertiesNV() const;
//=== VK_NV_coverage_reduction_mode ===
VULKAN_HPP_NODISCARD std::vector<VULKAN_HPP_NAMESPACE::FramebufferMixedSamplesCombinationNV> getSupportedFramebufferMixedSamplesCombinationsNV() const;
# if defined( VK_USE_PLATFORM_WIN32_KHR )
//=== VK_EXT_full_screen_exclusive ===
VULKAN_HPP_NODISCARD std::vector<VULKAN_HPP_NAMESPACE::PresentModeKHR>
getSurfacePresentModes2EXT( const VULKAN_HPP_NAMESPACE::PhysicalDeviceSurfaceInfo2KHR & surfaceInfo ) const;
# endif /*VK_USE_PLATFORM_WIN32_KHR*/
2021-06-22 07:40:12 +00:00
//=== VK_EXT_acquire_drm_display ===
void acquireDrmDisplayEXT( int32_t drmFd, VULKAN_HPP_NAMESPACE::DisplayKHR display ) const;
VULKAN_HPP_NODISCARD
VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::CreateReturnType<VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::DisplayKHR>::Type
getDrmDisplayEXT( int32_t drmFd, uint32_t connectorId ) const VULKAN_HPP_RAII_CREATE_NOEXCEPT;
2023-06-11 00:11:41 +00:00
//=== VK_KHR_video_encode_queue ===
VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::VideoEncodeQualityLevelPropertiesKHR
getVideoEncodeQualityLevelPropertiesKHR( const VULKAN_HPP_NAMESPACE::PhysicalDeviceVideoEncodeQualityLevelInfoKHR & qualityLevelInfo ) const;
template <typename X, typename Y, typename... Z>
VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::StructureChain<X, Y, Z...>
getVideoEncodeQualityLevelPropertiesKHR( const VULKAN_HPP_NAMESPACE::PhysicalDeviceVideoEncodeQualityLevelInfoKHR & qualityLevelInfo ) const;
# if defined( VK_USE_PLATFORM_WIN32_KHR )
//=== VK_NV_acquire_winrt_display ===
VULKAN_HPP_NODISCARD
VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::CreateReturnType<VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::DisplayKHR>::Type
getWinrtDisplayNV( uint32_t deviceRelativeId ) const VULKAN_HPP_RAII_CREATE_NOEXCEPT;
# endif /*VK_USE_PLATFORM_WIN32_KHR*/
# if defined( VK_USE_PLATFORM_DIRECTFB_EXT )
//=== VK_EXT_directfb_surface ===
VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::Bool32 getDirectFBPresentationSupportEXT( uint32_t queueFamilyIndex,
IDirectFB & dfb ) const VULKAN_HPP_NOEXCEPT;
# endif /*VK_USE_PLATFORM_DIRECTFB_EXT*/
# if defined( VK_USE_PLATFORM_SCREEN_QNX )
//=== VK_QNX_screen_surface ===
VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::Bool32 getScreenPresentationSupportQNX( uint32_t queueFamilyIndex,
struct _screen_window & window ) const VULKAN_HPP_NOEXCEPT;
# endif /*VK_USE_PLATFORM_SCREEN_QNX*/
2022-09-30 00:20:29 +00:00
//=== VK_NV_optical_flow ===
VULKAN_HPP_NODISCARD std::vector<VULKAN_HPP_NAMESPACE::OpticalFlowImageFormatPropertiesNV>
getOpticalFlowImageFormatsNV( const VULKAN_HPP_NAMESPACE::OpticalFlowImageFormatInfoNV & opticalFlowImageFormatInfo ) const;
//=== VK_KHR_cooperative_matrix ===
VULKAN_HPP_NODISCARD std::vector<VULKAN_HPP_NAMESPACE::CooperativeMatrixPropertiesKHR> getCooperativeMatrixPropertiesKHR() const;
//=== VK_KHR_calibrated_timestamps ===
VULKAN_HPP_NODISCARD std::vector<VULKAN_HPP_NAMESPACE::TimeDomainKHR> getCalibrateableTimeDomainsKHR() const;
//=== VK_NV_cooperative_matrix2 ===
VULKAN_HPP_NODISCARD std::vector<VULKAN_HPP_NAMESPACE::CooperativeMatrixFlexibleDimensionsPropertiesNV>
getCooperativeMatrixFlexibleDimensionsPropertiesNV() const;
private:
VULKAN_HPP_NAMESPACE::PhysicalDevice m_physicalDevice = {};
VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::InstanceDispatcher const * m_dispatcher = nullptr;
};
class PhysicalDevices : public std::vector<VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::PhysicalDevice>
{
public:
# if !defined( VULKAN_HPP_RAII_NO_EXCEPTIONS )
PhysicalDevices( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Instance const & instance )
{
*this = instance.enumeratePhysicalDevices();
}
# endif
PhysicalDevices( std::nullptr_t ) {}
2022-12-06 00:11:19 +00:00
PhysicalDevices() = delete;
PhysicalDevices( PhysicalDevices const & ) = delete;
PhysicalDevices( PhysicalDevices && rhs ) = default;
PhysicalDevices & operator=( PhysicalDevices const & ) = delete;
2022-12-06 00:11:19 +00:00
PhysicalDevices & operator=( PhysicalDevices && rhs ) = default;
private:
PhysicalDevices( std::vector<VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::PhysicalDevice> && rhs )
{
std::swap( *this, rhs );
}
};
class Device
{
public:
using CType = VkDevice;
using CppType = VULKAN_HPP_NAMESPACE::Device;
static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = VULKAN_HPP_NAMESPACE::ObjectType::eDevice;
static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType =
VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eDevice;
public:
# if !defined( VULKAN_HPP_RAII_NO_EXCEPTIONS )
Device( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::PhysicalDevice const & physicalDevice,
VULKAN_HPP_NAMESPACE::DeviceCreateInfo const & createInfo,
VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr )
{
*this = physicalDevice.createDevice( createInfo, allocator );
}
# endif
Device( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::PhysicalDevice const & physicalDevice,
VkDevice device,
VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr )
: m_device( device ), m_allocator( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) )
{
m_dispatcher.reset( new VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::DeviceDispatcher( physicalDevice.getDispatcher()->vkGetDeviceProcAddr,
static_cast<VkDevice>( m_device ) ) );
}
Device( std::nullptr_t ) {}
~Device()
{
clear();
}
Device() = delete;
Device( Device const & ) = delete;
Device( Device && rhs ) VULKAN_HPP_NOEXCEPT
: m_device( VULKAN_HPP_NAMESPACE::exchange( rhs.m_device, {} ) )
, m_allocator( VULKAN_HPP_NAMESPACE::exchange( rhs.m_allocator, {} ) )
, m_dispatcher( rhs.m_dispatcher.release() )
2022-03-09 10:20:05 +00:00
{
}
Device & operator=( Device const & ) = delete;
2022-12-06 00:11:19 +00:00
Device & operator=( Device && rhs ) VULKAN_HPP_NOEXCEPT
{
if ( this != &rhs )
{
std::swap( m_device, rhs.m_device );
std::swap( m_allocator, rhs.m_allocator );
std::swap( m_dispatcher, rhs.m_dispatcher );
}
return *this;
}
VULKAN_HPP_NAMESPACE::Device const & operator*() const VULKAN_HPP_NOEXCEPT
{
return m_device;
}
operator VULKAN_HPP_NAMESPACE::Device() const VULKAN_HPP_NOEXCEPT
{
return m_device;
}
void clear() VULKAN_HPP_NOEXCEPT
{
if ( m_device )
{
getDispatcher()->vkDestroyDevice( static_cast<VkDevice>( m_device ), reinterpret_cast<const VkAllocationCallbacks *>( m_allocator ) );
}
m_device = nullptr;
m_allocator = nullptr;
m_dispatcher = nullptr;
}
VULKAN_HPP_NAMESPACE::Device release()
{
m_allocator = nullptr;
m_dispatcher = nullptr;
return VULKAN_HPP_NAMESPACE::exchange( m_device, nullptr );
}
VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::DeviceDispatcher const * getDispatcher() const
{
VULKAN_HPP_ASSERT( m_dispatcher->getVkHeaderVersion() == VK_HEADER_VERSION );
return &*m_dispatcher;
}
void swap( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device & rhs ) VULKAN_HPP_NOEXCEPT
{
std::swap( m_device, rhs.m_device );
std::swap( m_allocator, rhs.m_allocator );
std::swap( m_dispatcher, rhs.m_dispatcher );
}
//=== VK_VERSION_1_0 ===
2021-04-28 11:35:14 +00:00
VULKAN_HPP_NODISCARD PFN_vkVoidFunction getProcAddr( const std::string & name ) const VULKAN_HPP_NOEXCEPT;
VULKAN_HPP_NODISCARD
VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::CreateReturnType<VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Queue>::Type
getQueue( uint32_t queueFamilyIndex, uint32_t queueIndex ) const VULKAN_HPP_RAII_CREATE_NOEXCEPT;
void waitIdle() const;
2021-04-28 11:35:14 +00:00
VULKAN_HPP_NODISCARD
VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::CreateReturnType<VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::DeviceMemory>::Type
allocateMemory( VULKAN_HPP_NAMESPACE::MemoryAllocateInfo const & allocateInfo,
VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr ) const
VULKAN_HPP_RAII_CREATE_NOEXCEPT;
void flushMappedMemoryRanges( VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::MappedMemoryRange> const & memoryRanges ) const;
void invalidateMappedMemoryRanges( VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::MappedMemoryRange> const & memoryRanges ) const;
VULKAN_HPP_NODISCARD
VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::CreateReturnType<VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Fence>::Type
createFence( VULKAN_HPP_NAMESPACE::FenceCreateInfo const & createInfo,
VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr ) const
VULKAN_HPP_RAII_CREATE_NOEXCEPT;
void resetFences( VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::Fence> const & fences ) const;
VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::Result waitForFences( VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::Fence> const & fences,
VULKAN_HPP_NAMESPACE::Bool32 waitAll,
uint64_t timeout ) const;
VULKAN_HPP_NODISCARD
VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::CreateReturnType<VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Semaphore>::Type
createSemaphore( VULKAN_HPP_NAMESPACE::SemaphoreCreateInfo const & createInfo,
VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr ) const
VULKAN_HPP_RAII_CREATE_NOEXCEPT;
VULKAN_HPP_NODISCARD
VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::CreateReturnType<VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Event>::Type
createEvent( VULKAN_HPP_NAMESPACE::EventCreateInfo const & createInfo,
VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr ) const
VULKAN_HPP_RAII_CREATE_NOEXCEPT;
VULKAN_HPP_NODISCARD
VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::CreateReturnType<VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::QueryPool>::Type
createQueryPool( VULKAN_HPP_NAMESPACE::QueryPoolCreateInfo const & createInfo,
VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr ) const
VULKAN_HPP_RAII_CREATE_NOEXCEPT;
VULKAN_HPP_NODISCARD
VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::CreateReturnType<VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Buffer>::Type
createBuffer( VULKAN_HPP_NAMESPACE::BufferCreateInfo const & createInfo,
VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr ) const
VULKAN_HPP_RAII_CREATE_NOEXCEPT;
VULKAN_HPP_NODISCARD
VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::CreateReturnType<VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::BufferView>::Type
createBufferView( VULKAN_HPP_NAMESPACE::BufferViewCreateInfo const & createInfo,
VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr ) const
VULKAN_HPP_RAII_CREATE_NOEXCEPT;
VULKAN_HPP_NODISCARD
VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::CreateReturnType<VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Image>::Type
createImage( VULKAN_HPP_NAMESPACE::ImageCreateInfo const & createInfo,
VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr ) const
VULKAN_HPP_RAII_CREATE_NOEXCEPT;
VULKAN_HPP_NODISCARD
VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::CreateReturnType<VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::ImageView>::Type
createImageView( VULKAN_HPP_NAMESPACE::ImageViewCreateInfo const & createInfo,
VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr ) const
VULKAN_HPP_RAII_CREATE_NOEXCEPT;
VULKAN_HPP_NODISCARD
VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::CreateReturnType<VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::ShaderModule>::Type
createShaderModule( VULKAN_HPP_NAMESPACE::ShaderModuleCreateInfo const & createInfo,
VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr ) const
VULKAN_HPP_RAII_CREATE_NOEXCEPT;
VULKAN_HPP_NODISCARD
VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::CreateReturnType<VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::PipelineCache>::Type
createPipelineCache( VULKAN_HPP_NAMESPACE::PipelineCacheCreateInfo const & createInfo,
VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr ) const
VULKAN_HPP_RAII_CREATE_NOEXCEPT;
VULKAN_HPP_NODISCARD
VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::CreateReturnType<std::vector<VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Pipeline>>::Type
createGraphicsPipelines( VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::PipelineCache> const & pipelineCache,
VULKAN_HPP_NAMESPACE::ArrayProxy<VULKAN_HPP_NAMESPACE::GraphicsPipelineCreateInfo> const & createInfos,
VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr ) const;
VULKAN_HPP_NODISCARD
VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::CreateReturnType<VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Pipeline>::Type
createGraphicsPipeline( VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::PipelineCache> const & pipelineCache,
VULKAN_HPP_NAMESPACE::GraphicsPipelineCreateInfo const & createInfo,
VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr ) const
VULKAN_HPP_RAII_CREATE_NOEXCEPT;
VULKAN_HPP_NODISCARD
VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::CreateReturnType<std::vector<VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Pipeline>>::Type
createComputePipelines( VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::PipelineCache> const & pipelineCache,
VULKAN_HPP_NAMESPACE::ArrayProxy<VULKAN_HPP_NAMESPACE::ComputePipelineCreateInfo> const & createInfos,
VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr ) const;
VULKAN_HPP_NODISCARD
VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::CreateReturnType<VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Pipeline>::Type
createComputePipeline( VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::PipelineCache> const & pipelineCache,
VULKAN_HPP_NAMESPACE::ComputePipelineCreateInfo const & createInfo,
VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr ) const
VULKAN_HPP_RAII_CREATE_NOEXCEPT;
VULKAN_HPP_NODISCARD
VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::CreateReturnType<VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::PipelineLayout>::Type
createPipelineLayout( VULKAN_HPP_NAMESPACE::PipelineLayoutCreateInfo const & createInfo,
VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr ) const
VULKAN_HPP_RAII_CREATE_NOEXCEPT;
VULKAN_HPP_NODISCARD
VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::CreateReturnType<VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Sampler>::Type
createSampler( VULKAN_HPP_NAMESPACE::SamplerCreateInfo const & createInfo,
VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr ) const
VULKAN_HPP_RAII_CREATE_NOEXCEPT;
VULKAN_HPP_NODISCARD
VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::CreateReturnType<VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::DescriptorSetLayout>::Type
createDescriptorSetLayout( VULKAN_HPP_NAMESPACE::DescriptorSetLayoutCreateInfo const & createInfo,
VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr ) const
VULKAN_HPP_RAII_CREATE_NOEXCEPT;
VULKAN_HPP_NODISCARD
VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::CreateReturnType<VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::DescriptorPool>::Type
createDescriptorPool( VULKAN_HPP_NAMESPACE::DescriptorPoolCreateInfo const & createInfo,
VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr ) const
VULKAN_HPP_RAII_CREATE_NOEXCEPT;
VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::CreateReturnType<
std::vector<VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::DescriptorSet>>::Type
allocateDescriptorSets( VULKAN_HPP_NAMESPACE::DescriptorSetAllocateInfo const & allocateInfo ) const;
void updateDescriptorSets( VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::WriteDescriptorSet> const & descriptorWrites,
VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::CopyDescriptorSet> const & descriptorCopies ) const
VULKAN_HPP_NOEXCEPT;
VULKAN_HPP_NODISCARD
VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::CreateReturnType<VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Framebuffer>::Type
createFramebuffer( VULKAN_HPP_NAMESPACE::FramebufferCreateInfo const & createInfo,
VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr ) const
VULKAN_HPP_RAII_CREATE_NOEXCEPT;
VULKAN_HPP_NODISCARD
VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::CreateReturnType<VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::RenderPass>::Type
createRenderPass( VULKAN_HPP_NAMESPACE::RenderPassCreateInfo const & createInfo,
VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr ) const
VULKAN_HPP_RAII_CREATE_NOEXCEPT;
VULKAN_HPP_NODISCARD
VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::CreateReturnType<VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::CommandPool>::Type
createCommandPool( VULKAN_HPP_NAMESPACE::CommandPoolCreateInfo const & createInfo,
VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr ) const
VULKAN_HPP_RAII_CREATE_NOEXCEPT;
VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::CreateReturnType<
std::vector<VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::CommandBuffer>>::Type
allocateCommandBuffers( VULKAN_HPP_NAMESPACE::CommandBufferAllocateInfo const & allocateInfo ) const;
//=== VK_VERSION_1_1 ===
void bindBufferMemory2( VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::BindBufferMemoryInfo> const & bindInfos ) const;
void bindImageMemory2( VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::BindImageMemoryInfo> const & bindInfos ) const;
VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::PeerMemoryFeatureFlags
getGroupPeerMemoryFeatures( uint32_t heapIndex, uint32_t localDeviceIndex, uint32_t remoteDeviceIndex ) const VULKAN_HPP_NOEXCEPT;
VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::MemoryRequirements2
getImageMemoryRequirements2( const VULKAN_HPP_NAMESPACE::ImageMemoryRequirementsInfo2 & info ) const VULKAN_HPP_NOEXCEPT;
template <typename X, typename Y, typename... Z>
VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::StructureChain<X, Y, Z...>
getImageMemoryRequirements2( const VULKAN_HPP_NAMESPACE::ImageMemoryRequirementsInfo2 & info ) const VULKAN_HPP_NOEXCEPT;
2021-04-28 11:35:14 +00:00
VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::MemoryRequirements2
getBufferMemoryRequirements2( const VULKAN_HPP_NAMESPACE::BufferMemoryRequirementsInfo2 & info ) const VULKAN_HPP_NOEXCEPT;
template <typename X, typename Y, typename... Z>
VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::StructureChain<X, Y, Z...>
getBufferMemoryRequirements2( const VULKAN_HPP_NAMESPACE::BufferMemoryRequirementsInfo2 & info ) const VULKAN_HPP_NOEXCEPT;
VULKAN_HPP_NODISCARD std::vector<VULKAN_HPP_NAMESPACE::SparseImageMemoryRequirements2>
getImageSparseMemoryRequirements2( const VULKAN_HPP_NAMESPACE::ImageSparseMemoryRequirementsInfo2 & info ) const;
VULKAN_HPP_NODISCARD
VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::CreateReturnType<VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Queue>::Type
getQueue2( VULKAN_HPP_NAMESPACE::DeviceQueueInfo2 const & queueInfo ) const VULKAN_HPP_RAII_CREATE_NOEXCEPT;
VULKAN_HPP_NODISCARD
VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::CreateReturnType<VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::SamplerYcbcrConversion>::Type
createSamplerYcbcrConversion( VULKAN_HPP_NAMESPACE::SamplerYcbcrConversionCreateInfo const & createInfo,
VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr ) const
VULKAN_HPP_RAII_CREATE_NOEXCEPT;
VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::CreateReturnType<
VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::DescriptorUpdateTemplate>::Type
createDescriptorUpdateTemplate( VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplateCreateInfo const & createInfo,
VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr ) const
VULKAN_HPP_RAII_CREATE_NOEXCEPT;
VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::DescriptorSetLayoutSupport
getDescriptorSetLayoutSupport( const VULKAN_HPP_NAMESPACE::DescriptorSetLayoutCreateInfo & createInfo ) const VULKAN_HPP_NOEXCEPT;
2021-04-28 11:35:14 +00:00
template <typename X, typename Y, typename... Z>
VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::StructureChain<X, Y, Z...>
getDescriptorSetLayoutSupport( const VULKAN_HPP_NAMESPACE::DescriptorSetLayoutCreateInfo & createInfo ) const VULKAN_HPP_NOEXCEPT;
//=== VK_VERSION_1_2 ===
VULKAN_HPP_NODISCARD
VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::CreateReturnType<VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::RenderPass>::Type
createRenderPass2( VULKAN_HPP_NAMESPACE::RenderPassCreateInfo2 const & createInfo,
VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr ) const
VULKAN_HPP_RAII_CREATE_NOEXCEPT;
VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::Result waitSemaphores( const VULKAN_HPP_NAMESPACE::SemaphoreWaitInfo & waitInfo, uint64_t timeout ) const;
2021-04-28 11:35:14 +00:00
void signalSemaphore( const VULKAN_HPP_NAMESPACE::SemaphoreSignalInfo & signalInfo ) const;
2021-04-28 11:35:14 +00:00
VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::DeviceAddress
getBufferAddress( const VULKAN_HPP_NAMESPACE::BufferDeviceAddressInfo & info ) const VULKAN_HPP_NOEXCEPT;
VULKAN_HPP_NODISCARD uint64_t getBufferOpaqueCaptureAddress( const VULKAN_HPP_NAMESPACE::BufferDeviceAddressInfo & info ) const VULKAN_HPP_NOEXCEPT;
VULKAN_HPP_NODISCARD uint64_t
getMemoryOpaqueCaptureAddress( const VULKAN_HPP_NAMESPACE::DeviceMemoryOpaqueCaptureAddressInfo & info ) const VULKAN_HPP_NOEXCEPT;
2022-01-26 00:42:08 +00:00
//=== VK_VERSION_1_3 ===
VULKAN_HPP_NODISCARD
VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::CreateReturnType<VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::PrivateDataSlot>::Type
createPrivateDataSlot( VULKAN_HPP_NAMESPACE::PrivateDataSlotCreateInfo const & createInfo,
VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr ) const
VULKAN_HPP_RAII_CREATE_NOEXCEPT;
2022-01-26 00:42:08 +00:00
void setPrivateData( VULKAN_HPP_NAMESPACE::ObjectType objectType_,
uint64_t objectHandle,
VULKAN_HPP_NAMESPACE::PrivateDataSlot privateDataSlot,
uint64_t data ) const;
VULKAN_HPP_NODISCARD uint64_t getPrivateData( VULKAN_HPP_NAMESPACE::ObjectType objectType_,
uint64_t objectHandle,
VULKAN_HPP_NAMESPACE::PrivateDataSlot privateDataSlot ) const VULKAN_HPP_NOEXCEPT;
2022-01-26 00:42:08 +00:00
VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::MemoryRequirements2
getBufferMemoryRequirements( const VULKAN_HPP_NAMESPACE::DeviceBufferMemoryRequirements & info ) const VULKAN_HPP_NOEXCEPT;
2022-01-26 00:42:08 +00:00
template <typename X, typename Y, typename... Z>
VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::StructureChain<X, Y, Z...>
getBufferMemoryRequirements( const VULKAN_HPP_NAMESPACE::DeviceBufferMemoryRequirements & info ) const VULKAN_HPP_NOEXCEPT;
2022-01-26 00:42:08 +00:00
VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::MemoryRequirements2
getImageMemoryRequirements( const VULKAN_HPP_NAMESPACE::DeviceImageMemoryRequirements & info ) const VULKAN_HPP_NOEXCEPT;
2022-01-26 00:42:08 +00:00
template <typename X, typename Y, typename... Z>
VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::StructureChain<X, Y, Z...>
getImageMemoryRequirements( const VULKAN_HPP_NAMESPACE::DeviceImageMemoryRequirements & info ) const VULKAN_HPP_NOEXCEPT;
2022-01-26 00:42:08 +00:00
VULKAN_HPP_NODISCARD std::vector<VULKAN_HPP_NAMESPACE::SparseImageMemoryRequirements2>
getImageSparseMemoryRequirements( const VULKAN_HPP_NAMESPACE::DeviceImageMemoryRequirements & info ) const;
2022-01-26 00:42:08 +00:00
//=== VK_KHR_swapchain ===
2021-04-28 11:35:14 +00:00
VULKAN_HPP_NODISCARD
VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::CreateReturnType<VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::SwapchainKHR>::Type
createSwapchainKHR( VULKAN_HPP_NAMESPACE::SwapchainCreateInfoKHR const & createInfo,
VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr ) const
VULKAN_HPP_RAII_CREATE_NOEXCEPT;
VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::DeviceGroupPresentCapabilitiesKHR getGroupPresentCapabilitiesKHR() const;
VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::DeviceGroupPresentModeFlagsKHR
getGroupSurfacePresentModesKHR( VULKAN_HPP_NAMESPACE::SurfaceKHR surface ) const;
VULKAN_HPP_NODISCARD std::pair<VULKAN_HPP_NAMESPACE::Result, uint32_t>
acquireNextImage2KHR( const VULKAN_HPP_NAMESPACE::AcquireNextImageInfoKHR & acquireInfo ) const;
//=== VK_KHR_display_swapchain ===
VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::CreateReturnType<
std::vector<VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::SwapchainKHR>>::Type
createSharedSwapchainsKHR( VULKAN_HPP_NAMESPACE::ArrayProxy<VULKAN_HPP_NAMESPACE::SwapchainCreateInfoKHR> const & createInfos,
VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr ) const;
VULKAN_HPP_NODISCARD
VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::CreateReturnType<VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::SwapchainKHR>::Type
createSharedSwapchainKHR( VULKAN_HPP_NAMESPACE::SwapchainCreateInfoKHR const & createInfo,
VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr ) const
VULKAN_HPP_RAII_CREATE_NOEXCEPT;
//=== VK_EXT_debug_marker ===
2021-04-28 11:35:14 +00:00
void debugMarkerSetObjectTagEXT( const VULKAN_HPP_NAMESPACE::DebugMarkerObjectTagInfoEXT & tagInfo ) const;
2021-04-28 11:35:14 +00:00
void debugMarkerSetObjectNameEXT( const VULKAN_HPP_NAMESPACE::DebugMarkerObjectNameInfoEXT & nameInfo ) const;
//=== VK_KHR_video_queue ===
VULKAN_HPP_NODISCARD
VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::CreateReturnType<VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::VideoSessionKHR>::Type
createVideoSessionKHR( VULKAN_HPP_NAMESPACE::VideoSessionCreateInfoKHR const & createInfo,
VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr ) const
VULKAN_HPP_RAII_CREATE_NOEXCEPT;
VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::CreateReturnType<
VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::VideoSessionParametersKHR>::Type
createVideoSessionParametersKHR( VULKAN_HPP_NAMESPACE::VideoSessionParametersCreateInfoKHR const & createInfo,
VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr ) const
VULKAN_HPP_RAII_CREATE_NOEXCEPT;
//=== VK_NVX_binary_import ===
VULKAN_HPP_NODISCARD
VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::CreateReturnType<VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::CuModuleNVX>::Type
createCuModuleNVX( VULKAN_HPP_NAMESPACE::CuModuleCreateInfoNVX const & createInfo,
VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr ) const
VULKAN_HPP_RAII_CREATE_NOEXCEPT;
VULKAN_HPP_NODISCARD
VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::CreateReturnType<VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::CuFunctionNVX>::Type
createCuFunctionNVX( VULKAN_HPP_NAMESPACE::CuFunctionCreateInfoNVX const & createInfo,
VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr ) const
VULKAN_HPP_RAII_CREATE_NOEXCEPT;
//=== VK_NVX_image_view_handle ===
2021-04-28 11:35:14 +00:00
VULKAN_HPP_NODISCARD uint32_t getImageViewHandleNVX( const VULKAN_HPP_NAMESPACE::ImageViewHandleInfoNVX & info ) const VULKAN_HPP_NOEXCEPT;
//=== VK_KHR_device_group ===
VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::PeerMemoryFeatureFlags
getGroupPeerMemoryFeaturesKHR( uint32_t heapIndex, uint32_t localDeviceIndex, uint32_t remoteDeviceIndex ) const VULKAN_HPP_NOEXCEPT;
2021-03-23 09:52:56 +00:00
# if defined( VK_USE_PLATFORM_WIN32_KHR )
//=== VK_KHR_external_memory_win32 ===
VULKAN_HPP_NODISCARD HANDLE getMemoryWin32HandleKHR( const VULKAN_HPP_NAMESPACE::MemoryGetWin32HandleInfoKHR & getWin32HandleInfo ) const;
VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::MemoryWin32HandlePropertiesKHR
getMemoryWin32HandlePropertiesKHR( VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType, HANDLE handle ) const;
# endif /*VK_USE_PLATFORM_WIN32_KHR*/
//=== VK_KHR_external_memory_fd ===
2021-03-22 08:53:35 +00:00
VULKAN_HPP_NODISCARD int getMemoryFdKHR( const VULKAN_HPP_NAMESPACE::MemoryGetFdInfoKHR & getFdInfo ) const;
2021-03-22 08:53:35 +00:00
VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::MemoryFdPropertiesKHR
getMemoryFdPropertiesKHR( VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType, int fd ) const;
# if defined( VK_USE_PLATFORM_WIN32_KHR )
//=== VK_KHR_external_semaphore_win32 ===
void importSemaphoreWin32HandleKHR( const VULKAN_HPP_NAMESPACE::ImportSemaphoreWin32HandleInfoKHR & importSemaphoreWin32HandleInfo ) const;
VULKAN_HPP_NODISCARD HANDLE getSemaphoreWin32HandleKHR( const VULKAN_HPP_NAMESPACE::SemaphoreGetWin32HandleInfoKHR & getWin32HandleInfo ) const;
# endif /*VK_USE_PLATFORM_WIN32_KHR*/
//=== VK_KHR_external_semaphore_fd ===
void importSemaphoreFdKHR( const VULKAN_HPP_NAMESPACE::ImportSemaphoreFdInfoKHR & importSemaphoreFdInfo ) const;
VULKAN_HPP_NODISCARD int getSemaphoreFdKHR( const VULKAN_HPP_NAMESPACE::SemaphoreGetFdInfoKHR & getFdInfo ) const;
//=== VK_KHR_descriptor_update_template ===
VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::CreateReturnType<
VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::DescriptorUpdateTemplate>::Type
createDescriptorUpdateTemplateKHR( VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplateCreateInfo const & createInfo,
VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr ) const
VULKAN_HPP_RAII_CREATE_NOEXCEPT;
void destroyDescriptorUpdateTemplateKHR( VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate descriptorUpdateTemplate VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT,
Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator
VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
2021-03-22 08:53:35 +00:00
//=== VK_EXT_display_control ===
void displayPowerControlEXT( VULKAN_HPP_NAMESPACE::DisplayKHR display, const VULKAN_HPP_NAMESPACE::DisplayPowerInfoEXT & displayPowerInfo ) const;
VULKAN_HPP_NODISCARD
VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::CreateReturnType<VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Fence>::Type
registerEventEXT( VULKAN_HPP_NAMESPACE::DeviceEventInfoEXT const & deviceEventInfo,
VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr ) const
VULKAN_HPP_RAII_CREATE_NOEXCEPT;
VULKAN_HPP_NODISCARD
VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::CreateReturnType<VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Fence>::Type
registerDisplayEventEXT( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::DisplayKHR const & display,
VULKAN_HPP_NAMESPACE::DisplayEventInfoEXT const & displayEventInfo,
VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr ) const
VULKAN_HPP_RAII_CREATE_NOEXCEPT;
//=== VK_EXT_hdr_metadata ===
void setHdrMetadataEXT( VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::SwapchainKHR> const & swapchains,
VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::HdrMetadataEXT> const & metadata ) const;
//=== VK_KHR_create_renderpass2 ===
VULKAN_HPP_NODISCARD
VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::CreateReturnType<VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::RenderPass>::Type
createRenderPass2KHR( VULKAN_HPP_NAMESPACE::RenderPassCreateInfo2 const & createInfo,
VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr ) const
VULKAN_HPP_RAII_CREATE_NOEXCEPT;
2021-03-23 09:52:56 +00:00
# if defined( VK_USE_PLATFORM_WIN32_KHR )
//=== VK_KHR_external_fence_win32 ===
void importFenceWin32HandleKHR( const VULKAN_HPP_NAMESPACE::ImportFenceWin32HandleInfoKHR & importFenceWin32HandleInfo ) const;
VULKAN_HPP_NODISCARD HANDLE getFenceWin32HandleKHR( const VULKAN_HPP_NAMESPACE::FenceGetWin32HandleInfoKHR & getWin32HandleInfo ) const;
# endif /*VK_USE_PLATFORM_WIN32_KHR*/
//=== VK_KHR_external_fence_fd ===
void importFenceFdKHR( const VULKAN_HPP_NAMESPACE::ImportFenceFdInfoKHR & importFenceFdInfo ) const;
VULKAN_HPP_NODISCARD int getFenceFdKHR( const VULKAN_HPP_NAMESPACE::FenceGetFdInfoKHR & getFdInfo ) const;
2021-03-22 08:53:35 +00:00
//=== VK_KHR_performance_query ===
void acquireProfilingLockKHR( const VULKAN_HPP_NAMESPACE::AcquireProfilingLockInfoKHR & info ) const;
void releaseProfilingLockKHR() const VULKAN_HPP_NOEXCEPT;
//=== VK_EXT_debug_utils ===
void setDebugUtilsObjectNameEXT( const VULKAN_HPP_NAMESPACE::DebugUtilsObjectNameInfoEXT & nameInfo ) const;
void setDebugUtilsObjectTagEXT( const VULKAN_HPP_NAMESPACE::DebugUtilsObjectTagInfoEXT & tagInfo ) const;
# if defined( VK_USE_PLATFORM_ANDROID_KHR )
//=== VK_ANDROID_external_memory_android_hardware_buffer ===
VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::AndroidHardwareBufferPropertiesANDROID
getAndroidHardwareBufferPropertiesANDROID( const struct AHardwareBuffer & buffer ) const;
template <typename X, typename Y, typename... Z>
VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::StructureChain<X, Y, Z...>
getAndroidHardwareBufferPropertiesANDROID( const struct AHardwareBuffer & buffer ) const;
VULKAN_HPP_NODISCARD struct AHardwareBuffer *
getMemoryAndroidHardwareBufferANDROID( const VULKAN_HPP_NAMESPACE::MemoryGetAndroidHardwareBufferInfoANDROID & info ) const;
# endif /*VK_USE_PLATFORM_ANDROID_KHR*/
2021-04-28 11:35:14 +00:00
# if defined( VK_ENABLE_BETA_EXTENSIONS )
//=== VK_AMDX_shader_enqueue ===
VULKAN_HPP_NODISCARD
VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::CreateReturnType<std::vector<VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Pipeline>>::Type
createExecutionGraphPipelinesAMDX(
VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::PipelineCache> const & pipelineCache,
VULKAN_HPP_NAMESPACE::ArrayProxy<VULKAN_HPP_NAMESPACE::ExecutionGraphPipelineCreateInfoAMDX> const & createInfos,
VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr ) const;
VULKAN_HPP_NODISCARD
VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::CreateReturnType<VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Pipeline>::Type
createExecutionGraphPipelineAMDX(
VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::PipelineCache> const & pipelineCache,
VULKAN_HPP_NAMESPACE::ExecutionGraphPipelineCreateInfoAMDX const & createInfo,
VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr ) const VULKAN_HPP_RAII_CREATE_NOEXCEPT;
# endif /*VK_ENABLE_BETA_EXTENSIONS*/
//=== VK_KHR_get_memory_requirements2 ===
VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::MemoryRequirements2
getImageMemoryRequirements2KHR( const VULKAN_HPP_NAMESPACE::ImageMemoryRequirementsInfo2 & info ) const VULKAN_HPP_NOEXCEPT;
template <typename X, typename Y, typename... Z>
VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::StructureChain<X, Y, Z...>
getImageMemoryRequirements2KHR( const VULKAN_HPP_NAMESPACE::ImageMemoryRequirementsInfo2 & info ) const VULKAN_HPP_NOEXCEPT;
VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::MemoryRequirements2
getBufferMemoryRequirements2KHR( const VULKAN_HPP_NAMESPACE::BufferMemoryRequirementsInfo2 & info ) const VULKAN_HPP_NOEXCEPT;
template <typename X, typename Y, typename... Z>
VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::StructureChain<X, Y, Z...>
getBufferMemoryRequirements2KHR( const VULKAN_HPP_NAMESPACE::BufferMemoryRequirementsInfo2 & info ) const VULKAN_HPP_NOEXCEPT;
VULKAN_HPP_NODISCARD std::vector<VULKAN_HPP_NAMESPACE::SparseImageMemoryRequirements2>
getImageSparseMemoryRequirements2KHR( const VULKAN_HPP_NAMESPACE::ImageSparseMemoryRequirementsInfo2 & info ) const;
//=== VK_KHR_acceleration_structure ===
VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::CreateReturnType<
VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::AccelerationStructureKHR>::Type
createAccelerationStructureKHR( VULKAN_HPP_NAMESPACE::AccelerationStructureCreateInfoKHR const & createInfo,
VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr ) const
VULKAN_HPP_RAII_CREATE_NOEXCEPT;
VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::Result buildAccelerationStructuresKHR(
VULKAN_HPP_NAMESPACE::DeferredOperationKHR deferredOperation,
VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::AccelerationStructureBuildGeometryInfoKHR> const & infos,
VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::AccelerationStructureBuildRangeInfoKHR * const> const & pBuildRangeInfos ) const;
VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::Result
copyAccelerationStructureKHR( VULKAN_HPP_NAMESPACE::DeferredOperationKHR deferredOperation,
const VULKAN_HPP_NAMESPACE::CopyAccelerationStructureInfoKHR & info ) const;
VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::Result
copyAccelerationStructureToMemoryKHR( VULKAN_HPP_NAMESPACE::DeferredOperationKHR deferredOperation,
const VULKAN_HPP_NAMESPACE::CopyAccelerationStructureToMemoryInfoKHR & info ) const;
VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::Result
copyMemoryToAccelerationStructureKHR( VULKAN_HPP_NAMESPACE::DeferredOperationKHR deferredOperation,
const VULKAN_HPP_NAMESPACE::CopyMemoryToAccelerationStructureInfoKHR & info ) const;
2021-04-28 11:35:14 +00:00
template <typename DataType>
VULKAN_HPP_NODISCARD std::vector<DataType> writeAccelerationStructuresPropertiesKHR(
VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::AccelerationStructureKHR> const & accelerationStructures,
VULKAN_HPP_NAMESPACE::QueryType queryType,
size_t dataSize,
size_t stride ) const;
template <typename DataType>
VULKAN_HPP_NODISCARD DataType writeAccelerationStructuresPropertyKHR(
VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::AccelerationStructureKHR> const & accelerationStructures,
VULKAN_HPP_NAMESPACE::QueryType queryType,
size_t stride ) const;
VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::DeviceAddress
getAccelerationStructureAddressKHR( const VULKAN_HPP_NAMESPACE::AccelerationStructureDeviceAddressInfoKHR & info ) const VULKAN_HPP_NOEXCEPT;
VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::AccelerationStructureCompatibilityKHR
getAccelerationStructureCompatibilityKHR( const VULKAN_HPP_NAMESPACE::AccelerationStructureVersionInfoKHR & versionInfo ) const VULKAN_HPP_NOEXCEPT;
VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::AccelerationStructureBuildSizesInfoKHR getAccelerationStructureBuildSizesKHR(
VULKAN_HPP_NAMESPACE::AccelerationStructureBuildTypeKHR buildType,
const VULKAN_HPP_NAMESPACE::AccelerationStructureBuildGeometryInfoKHR & buildInfo,
VULKAN_HPP_NAMESPACE::ArrayProxy<const uint32_t> const & maxPrimitiveCounts VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT ) const;
2023-03-01 09:17:02 +00:00
//=== VK_KHR_ray_tracing_pipeline ===
VULKAN_HPP_NODISCARD
VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::CreateReturnType<std::vector<VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Pipeline>>::Type
createRayTracingPipelinesKHR(
VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::DeferredOperationKHR> const & deferredOperation,
VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::PipelineCache> const & pipelineCache,
VULKAN_HPP_NAMESPACE::ArrayProxy<VULKAN_HPP_NAMESPACE::RayTracingPipelineCreateInfoKHR> const & createInfos,
VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr ) const;
VULKAN_HPP_NODISCARD
VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::CreateReturnType<VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Pipeline>::Type
createRayTracingPipelineKHR(
VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::DeferredOperationKHR> const & deferredOperation,
VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::PipelineCache> const & pipelineCache,
VULKAN_HPP_NAMESPACE::RayTracingPipelineCreateInfoKHR const & createInfo,
VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr ) const VULKAN_HPP_RAII_CREATE_NOEXCEPT;
2023-03-01 09:17:02 +00:00
//=== VK_KHR_sampler_ycbcr_conversion ===
VULKAN_HPP_NODISCARD
VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::CreateReturnType<VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::SamplerYcbcrConversion>::Type
createSamplerYcbcrConversionKHR( VULKAN_HPP_NAMESPACE::SamplerYcbcrConversionCreateInfo const & createInfo,
VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr ) const
VULKAN_HPP_RAII_CREATE_NOEXCEPT;
void destroySamplerYcbcrConversionKHR( VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion ycbcrConversion VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT,
Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator
VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
//=== VK_KHR_bind_memory2 ===
void bindBufferMemory2KHR( VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::BindBufferMemoryInfo> const & bindInfos ) const;
void bindImageMemory2KHR( VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::BindImageMemoryInfo> const & bindInfos ) const;
//=== VK_EXT_validation_cache ===
VULKAN_HPP_NODISCARD
VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::CreateReturnType<VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::ValidationCacheEXT>::Type
createValidationCacheEXT( VULKAN_HPP_NAMESPACE::ValidationCacheCreateInfoEXT const & createInfo,
VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr ) const
VULKAN_HPP_RAII_CREATE_NOEXCEPT;
//=== VK_NV_ray_tracing ===
VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::CreateReturnType<
VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::AccelerationStructureNV>::Type
createAccelerationStructureNV( VULKAN_HPP_NAMESPACE::AccelerationStructureCreateInfoNV const & createInfo,
VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr ) const
VULKAN_HPP_RAII_CREATE_NOEXCEPT;
VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::MemoryRequirements2KHR getAccelerationStructureMemoryRequirementsNV(
const VULKAN_HPP_NAMESPACE::AccelerationStructureMemoryRequirementsInfoNV & info ) const VULKAN_HPP_NOEXCEPT;
template <typename X, typename Y, typename... Z>
VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::StructureChain<X, Y, Z...> getAccelerationStructureMemoryRequirementsNV(
const VULKAN_HPP_NAMESPACE::AccelerationStructureMemoryRequirementsInfoNV & info ) const VULKAN_HPP_NOEXCEPT;
void bindAccelerationStructureMemoryNV(
VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::BindAccelerationStructureMemoryInfoNV> const & bindInfos ) const;
VULKAN_HPP_NODISCARD
VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::CreateReturnType<std::vector<VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Pipeline>>::Type
createRayTracingPipelinesNV( VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::PipelineCache> const & pipelineCache,
VULKAN_HPP_NAMESPACE::ArrayProxy<VULKAN_HPP_NAMESPACE::RayTracingPipelineCreateInfoNV> const & createInfos,
VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr ) const;
VULKAN_HPP_NODISCARD
VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::CreateReturnType<VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Pipeline>::Type
createRayTracingPipelineNV( VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::PipelineCache> const & pipelineCache,
VULKAN_HPP_NAMESPACE::RayTracingPipelineCreateInfoNV const & createInfo,
VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr ) const
VULKAN_HPP_RAII_CREATE_NOEXCEPT;
//=== VK_KHR_maintenance3 ===
VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::DescriptorSetLayoutSupport
getDescriptorSetLayoutSupportKHR( const VULKAN_HPP_NAMESPACE::DescriptorSetLayoutCreateInfo & createInfo ) const VULKAN_HPP_NOEXCEPT;
template <typename X, typename Y, typename... Z>
VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::StructureChain<X, Y, Z...>
getDescriptorSetLayoutSupportKHR( const VULKAN_HPP_NAMESPACE::DescriptorSetLayoutCreateInfo & createInfo ) const VULKAN_HPP_NOEXCEPT;
//=== VK_EXT_external_memory_host ===
VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::MemoryHostPointerPropertiesEXT
getMemoryHostPointerPropertiesEXT( VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType, const void * pHostPointer ) const;
//=== VK_EXT_calibrated_timestamps ===
VULKAN_HPP_NODISCARD std::pair<std::vector<uint64_t>, uint64_t>
getCalibratedTimestampsEXT( VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::CalibratedTimestampInfoKHR> const & timestampInfos ) const;
VULKAN_HPP_NODISCARD std::pair<uint64_t, uint64_t>
getCalibratedTimestampEXT( const VULKAN_HPP_NAMESPACE::CalibratedTimestampInfoKHR & timestampInfo ) const;
//=== VK_KHR_timeline_semaphore ===
VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::Result waitSemaphoresKHR( const VULKAN_HPP_NAMESPACE::SemaphoreWaitInfo & waitInfo, uint64_t timeout ) const;
void signalSemaphoreKHR( const VULKAN_HPP_NAMESPACE::SemaphoreSignalInfo & signalInfo ) const;
//=== VK_INTEL_performance_query ===
void initializePerformanceApiINTEL( const VULKAN_HPP_NAMESPACE::InitializePerformanceApiInfoINTEL & initializeInfo ) const;
void uninitializePerformanceApiINTEL() const VULKAN_HPP_NOEXCEPT;
VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::CreateReturnType<
VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::PerformanceConfigurationINTEL>::Type
acquirePerformanceConfigurationINTEL( VULKAN_HPP_NAMESPACE::PerformanceConfigurationAcquireInfoINTEL const & acquireInfo ) const
VULKAN_HPP_RAII_CREATE_NOEXCEPT;
VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::PerformanceValueINTEL
getPerformanceParameterINTEL( VULKAN_HPP_NAMESPACE::PerformanceParameterTypeINTEL parameter ) const;
//=== VK_EXT_buffer_device_address ===
VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::DeviceAddress
getBufferAddressEXT( const VULKAN_HPP_NAMESPACE::BufferDeviceAddressInfo & info ) const VULKAN_HPP_NOEXCEPT;
# if defined( VK_USE_PLATFORM_WIN32_KHR )
//=== VK_EXT_full_screen_exclusive ===
VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::DeviceGroupPresentModeFlagsKHR
getGroupSurfacePresentModes2EXT( const VULKAN_HPP_NAMESPACE::PhysicalDeviceSurfaceInfo2KHR & surfaceInfo ) const;
# endif /*VK_USE_PLATFORM_WIN32_KHR*/
//=== VK_KHR_buffer_device_address ===
VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::DeviceAddress
getBufferAddressKHR( const VULKAN_HPP_NAMESPACE::BufferDeviceAddressInfo & info ) const VULKAN_HPP_NOEXCEPT;
VULKAN_HPP_NODISCARD uint64_t getBufferOpaqueCaptureAddressKHR( const VULKAN_HPP_NAMESPACE::BufferDeviceAddressInfo & info ) const VULKAN_HPP_NOEXCEPT;
VULKAN_HPP_NODISCARD uint64_t
getMemoryOpaqueCaptureAddressKHR( const VULKAN_HPP_NAMESPACE::DeviceMemoryOpaqueCaptureAddressInfo & info ) const VULKAN_HPP_NOEXCEPT;
//=== VK_KHR_deferred_host_operations ===
VULKAN_HPP_NODISCARD
VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::CreateReturnType<VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::DeferredOperationKHR>::Type
createDeferredOperationKHR( VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr ) const
VULKAN_HPP_RAII_CREATE_NOEXCEPT;
//=== VK_KHR_pipeline_executable_properties ===
VULKAN_HPP_NODISCARD std::vector<VULKAN_HPP_NAMESPACE::PipelineExecutablePropertiesKHR>
getPipelineExecutablePropertiesKHR( const VULKAN_HPP_NAMESPACE::PipelineInfoKHR & pipelineInfo ) const;
VULKAN_HPP_NODISCARD std::vector<VULKAN_HPP_NAMESPACE::PipelineExecutableStatisticKHR>
getPipelineExecutableStatisticsKHR( const VULKAN_HPP_NAMESPACE::PipelineExecutableInfoKHR & executableInfo ) const;
VULKAN_HPP_NODISCARD std::vector<VULKAN_HPP_NAMESPACE::PipelineExecutableInternalRepresentationKHR>
getPipelineExecutableInternalRepresentationsKHR( const VULKAN_HPP_NAMESPACE::PipelineExecutableInfoKHR & executableInfo ) const;
//=== VK_EXT_host_image_copy ===
void copyMemoryToImageEXT( const VULKAN_HPP_NAMESPACE::CopyMemoryToImageInfoEXT & copyMemoryToImageInfo ) const;
void copyImageToMemoryEXT( const VULKAN_HPP_NAMESPACE::CopyImageToMemoryInfoEXT & copyImageToMemoryInfo ) const;
void copyImageToImageEXT( const VULKAN_HPP_NAMESPACE::CopyImageToImageInfoEXT & copyImageToImageInfo ) const;
void transitionImageLayoutEXT( VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::HostImageLayoutTransitionInfoEXT> const & transitions ) const;
//=== VK_KHR_map_memory2 ===
VULKAN_HPP_NODISCARD void * mapMemory2KHR( const VULKAN_HPP_NAMESPACE::MemoryMapInfoKHR & memoryMapInfo ) const;
void unmapMemory2KHR( const VULKAN_HPP_NAMESPACE::MemoryUnmapInfoKHR & memoryUnmapInfo ) const;
//=== VK_EXT_swapchain_maintenance1 ===
void releaseSwapchainImagesEXT( const VULKAN_HPP_NAMESPACE::ReleaseSwapchainImagesInfoEXT & releaseInfo ) const;
//=== VK_NV_device_generated_commands ===
VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::MemoryRequirements2
getGeneratedCommandsMemoryRequirementsNV( const VULKAN_HPP_NAMESPACE::GeneratedCommandsMemoryRequirementsInfoNV & info ) const VULKAN_HPP_NOEXCEPT;
template <typename X, typename Y, typename... Z>
VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::StructureChain<X, Y, Z...>
getGeneratedCommandsMemoryRequirementsNV( const VULKAN_HPP_NAMESPACE::GeneratedCommandsMemoryRequirementsInfoNV & info ) const VULKAN_HPP_NOEXCEPT;
VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::CreateReturnType<
VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::IndirectCommandsLayoutNV>::Type
createIndirectCommandsLayoutNV( VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutCreateInfoNV const & createInfo,
VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr ) const
VULKAN_HPP_RAII_CREATE_NOEXCEPT;
//=== VK_EXT_private_data ===
VULKAN_HPP_NODISCARD
VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::CreateReturnType<VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::PrivateDataSlot>::Type
createPrivateDataSlotEXT( VULKAN_HPP_NAMESPACE::PrivateDataSlotCreateInfo const & createInfo,
VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr ) const
VULKAN_HPP_RAII_CREATE_NOEXCEPT;
void destroyPrivateDataSlotEXT( VULKAN_HPP_NAMESPACE::PrivateDataSlot privateDataSlot VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT,
2022-01-26 00:42:08 +00:00
Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator
VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
void setPrivateDataEXT( VULKAN_HPP_NAMESPACE::ObjectType objectType_,
uint64_t objectHandle,
VULKAN_HPP_NAMESPACE::PrivateDataSlot privateDataSlot,
uint64_t data ) const;
VULKAN_HPP_NODISCARD uint64_t getPrivateDataEXT( VULKAN_HPP_NAMESPACE::ObjectType objectType_,
uint64_t objectHandle,
VULKAN_HPP_NAMESPACE::PrivateDataSlot privateDataSlot ) const VULKAN_HPP_NOEXCEPT;
2023-06-11 00:11:41 +00:00
//=== VK_KHR_video_encode_queue ===
VULKAN_HPP_NODISCARD std::pair<VULKAN_HPP_NAMESPACE::VideoEncodeSessionParametersFeedbackInfoKHR, std::vector<uint8_t>>
getEncodedVideoSessionParametersKHR( const VULKAN_HPP_NAMESPACE::VideoEncodeSessionParametersGetInfoKHR & videoSessionParametersInfo ) const;
template <typename X, typename Y, typename... Z>
VULKAN_HPP_NODISCARD std::pair<VULKAN_HPP_NAMESPACE::StructureChain<X, Y, Z...>, std::vector<uint8_t>>
getEncodedVideoSessionParametersKHR( const VULKAN_HPP_NAMESPACE::VideoEncodeSessionParametersGetInfoKHR & videoSessionParametersInfo ) const;
# if defined( VK_ENABLE_BETA_EXTENSIONS )
//=== VK_NV_cuda_kernel_launch ===
VULKAN_HPP_NODISCARD
VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::CreateReturnType<VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::CudaModuleNV>::Type
createCudaModuleNV( VULKAN_HPP_NAMESPACE::CudaModuleCreateInfoNV const & createInfo,
VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr ) const
VULKAN_HPP_RAII_CREATE_NOEXCEPT;
VULKAN_HPP_NODISCARD
VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::CreateReturnType<VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::CudaFunctionNV>::Type
createCudaFunctionNV( VULKAN_HPP_NAMESPACE::CudaFunctionCreateInfoNV const & createInfo,
VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr ) const
VULKAN_HPP_RAII_CREATE_NOEXCEPT;
# endif /*VK_ENABLE_BETA_EXTENSIONS*/
2022-06-10 00:09:44 +00:00
# if defined( VK_USE_PLATFORM_METAL_EXT )
//=== VK_EXT_metal_objects ===
VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::ExportMetalObjectsInfoEXT exportMetalObjectsEXT() const VULKAN_HPP_NOEXCEPT;
template <typename X, typename Y, typename... Z>
VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::StructureChain<X, Y, Z...> exportMetalObjectsEXT() const VULKAN_HPP_NOEXCEPT;
2022-06-10 00:09:44 +00:00
# endif /*VK_USE_PLATFORM_METAL_EXT*/
2022-11-18 00:12:31 +00:00
//=== VK_EXT_descriptor_buffer ===
void getDescriptorEXT( const VULKAN_HPP_NAMESPACE::DescriptorGetInfoEXT & descriptorInfo, size_t dataSize, void * pDescriptor ) const VULKAN_HPP_NOEXCEPT;
2022-11-18 00:12:31 +00:00
template <typename DescriptorType>
VULKAN_HPP_NODISCARD DescriptorType getDescriptorEXT( const VULKAN_HPP_NAMESPACE::DescriptorGetInfoEXT & descriptorInfo ) const VULKAN_HPP_NOEXCEPT;
template <typename DataType>
VULKAN_HPP_NODISCARD DataType getBufferOpaqueCaptureDescriptorDataEXT( const VULKAN_HPP_NAMESPACE::BufferCaptureDescriptorDataInfoEXT & info ) const;
template <typename DataType>
VULKAN_HPP_NODISCARD DataType getImageOpaqueCaptureDescriptorDataEXT( const VULKAN_HPP_NAMESPACE::ImageCaptureDescriptorDataInfoEXT & info ) const;
template <typename DataType>
VULKAN_HPP_NODISCARD DataType
getImageViewOpaqueCaptureDescriptorDataEXT( const VULKAN_HPP_NAMESPACE::ImageViewCaptureDescriptorDataInfoEXT & info ) const;
template <typename DataType>
VULKAN_HPP_NODISCARD DataType getSamplerOpaqueCaptureDescriptorDataEXT( const VULKAN_HPP_NAMESPACE::SamplerCaptureDescriptorDataInfoEXT & info ) const;
template <typename DataType>
VULKAN_HPP_NODISCARD DataType
getAccelerationStructureOpaqueCaptureDescriptorDataEXT( const VULKAN_HPP_NAMESPACE::AccelerationStructureCaptureDescriptorDataInfoEXT & info ) const;
2022-09-30 00:20:29 +00:00
//=== VK_EXT_device_fault ===
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
VULKAN_HPP_NODISCARD Result getFaultInfoEXT( VULKAN_HPP_NAMESPACE::DeviceFaultCountsEXT * pFaultCounts,
VULKAN_HPP_NAMESPACE::DeviceFaultInfoEXT * pFaultInfo,
Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
# if defined( VK_USE_PLATFORM_FUCHSIA )
//=== VK_FUCHSIA_external_memory ===
VULKAN_HPP_NODISCARD zx_handle_t getMemoryZirconHandleFUCHSIA( const VULKAN_HPP_NAMESPACE::MemoryGetZirconHandleInfoFUCHSIA & getZirconHandleInfo ) const;
VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::MemoryZirconHandlePropertiesFUCHSIA
getMemoryZirconHandlePropertiesFUCHSIA( VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType, zx_handle_t zirconHandle ) const;
# endif /*VK_USE_PLATFORM_FUCHSIA*/
# if defined( VK_USE_PLATFORM_FUCHSIA )
//=== VK_FUCHSIA_external_semaphore ===
void importSemaphoreZirconHandleFUCHSIA( const VULKAN_HPP_NAMESPACE::ImportSemaphoreZirconHandleInfoFUCHSIA & importSemaphoreZirconHandleInfo ) const;
VULKAN_HPP_NODISCARD zx_handle_t
getSemaphoreZirconHandleFUCHSIA( const VULKAN_HPP_NAMESPACE::SemaphoreGetZirconHandleInfoFUCHSIA & getZirconHandleInfo ) const;
# endif /*VK_USE_PLATFORM_FUCHSIA*/
# if defined( VK_USE_PLATFORM_FUCHSIA )
//=== VK_FUCHSIA_buffer_collection ===
VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::CreateReturnType<
VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::BufferCollectionFUCHSIA>::Type
createBufferCollectionFUCHSIA( VULKAN_HPP_NAMESPACE::BufferCollectionCreateInfoFUCHSIA const & createInfo,
VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr ) const
VULKAN_HPP_RAII_CREATE_NOEXCEPT;
# endif /*VK_USE_PLATFORM_FUCHSIA*/
2021-07-06 07:03:42 +00:00
//=== VK_NV_external_memory_rdma ===
VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::RemoteAddressNV
getMemoryRemoteAddressNV( const VULKAN_HPP_NAMESPACE::MemoryGetRemoteAddressInfoNV & memoryGetRemoteAddressInfo ) const;
2021-07-06 07:03:42 +00:00
2022-05-11 00:09:28 +00:00
//=== VK_EXT_pipeline_properties ===
VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::BaseOutStructure getPipelinePropertiesEXT( const VULKAN_HPP_NAMESPACE::PipelineInfoEXT & pipelineInfo ) const;
2022-09-30 00:20:29 +00:00
//=== VK_EXT_opacity_micromap ===
VULKAN_HPP_NODISCARD
VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::CreateReturnType<VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::MicromapEXT>::Type
createMicromapEXT( VULKAN_HPP_NAMESPACE::MicromapCreateInfoEXT const & createInfo,
VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr ) const
VULKAN_HPP_RAII_CREATE_NOEXCEPT;
2022-09-30 00:20:29 +00:00
VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::Result
buildMicromapsEXT( VULKAN_HPP_NAMESPACE::DeferredOperationKHR deferredOperation,
VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::MicromapBuildInfoEXT> const & infos ) const;
VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::Result copyMicromapEXT( VULKAN_HPP_NAMESPACE::DeferredOperationKHR deferredOperation,
const VULKAN_HPP_NAMESPACE::CopyMicromapInfoEXT & info ) const;
VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::Result copyMicromapToMemoryEXT( VULKAN_HPP_NAMESPACE::DeferredOperationKHR deferredOperation,
const VULKAN_HPP_NAMESPACE::CopyMicromapToMemoryInfoEXT & info ) const;
VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::Result copyMemoryToMicromapEXT( VULKAN_HPP_NAMESPACE::DeferredOperationKHR deferredOperation,
const VULKAN_HPP_NAMESPACE::CopyMemoryToMicromapInfoEXT & info ) const;
template <typename DataType>
VULKAN_HPP_NODISCARD std::vector<DataType>
writeMicromapsPropertiesEXT( VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::MicromapEXT> const & micromaps,
VULKAN_HPP_NAMESPACE::QueryType queryType,
size_t dataSize,
size_t stride ) const;
template <typename DataType>
VULKAN_HPP_NODISCARD DataType writeMicromapsPropertyEXT( VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::MicromapEXT> const & micromaps,
VULKAN_HPP_NAMESPACE::QueryType queryType,
size_t stride ) const;
VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::AccelerationStructureCompatibilityKHR
getMicromapCompatibilityEXT( const VULKAN_HPP_NAMESPACE::MicromapVersionInfoEXT & versionInfo ) const VULKAN_HPP_NOEXCEPT;
VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::MicromapBuildSizesInfoEXT
getMicromapBuildSizesEXT( VULKAN_HPP_NAMESPACE::AccelerationStructureBuildTypeKHR buildType,
const VULKAN_HPP_NAMESPACE::MicromapBuildInfoEXT & buildInfo ) const VULKAN_HPP_NOEXCEPT;
2021-10-06 00:37:42 +00:00
//=== VK_KHR_maintenance4 ===
VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::MemoryRequirements2
getBufferMemoryRequirementsKHR( const VULKAN_HPP_NAMESPACE::DeviceBufferMemoryRequirements & info ) const VULKAN_HPP_NOEXCEPT;
2021-10-06 00:37:42 +00:00
template <typename X, typename Y, typename... Z>
VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::StructureChain<X, Y, Z...>
getBufferMemoryRequirementsKHR( const VULKAN_HPP_NAMESPACE::DeviceBufferMemoryRequirements & info ) const VULKAN_HPP_NOEXCEPT;
2021-10-06 00:37:42 +00:00
VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::MemoryRequirements2
getImageMemoryRequirementsKHR( const VULKAN_HPP_NAMESPACE::DeviceImageMemoryRequirements & info ) const VULKAN_HPP_NOEXCEPT;
2021-10-06 00:37:42 +00:00
template <typename X, typename Y, typename... Z>
VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::StructureChain<X, Y, Z...>
getImageMemoryRequirementsKHR( const VULKAN_HPP_NAMESPACE::DeviceImageMemoryRequirements & info ) const VULKAN_HPP_NOEXCEPT;
2021-10-06 00:37:42 +00:00
VULKAN_HPP_NODISCARD std::vector<VULKAN_HPP_NAMESPACE::SparseImageMemoryRequirements2>
getImageSparseMemoryRequirementsKHR( const VULKAN_HPP_NAMESPACE::DeviceImageMemoryRequirements & info ) const;
2021-10-06 00:37:42 +00:00
2022-03-16 08:09:01 +00:00
//=== VK_VALVE_descriptor_set_host_mapping ===
VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::DescriptorSetLayoutHostMappingInfoVALVE getDescriptorSetLayoutHostMappingInfoVALVE(
const VULKAN_HPP_NAMESPACE::DescriptorSetBindingReferenceVALVE & bindingReference ) const VULKAN_HPP_NOEXCEPT;
//=== VK_NV_device_generated_commands_compute ===
VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::MemoryRequirements2
getPipelineIndirectMemoryRequirementsNV( const VULKAN_HPP_NAMESPACE::ComputePipelineCreateInfo & createInfo ) const VULKAN_HPP_NOEXCEPT;
template <typename X, typename Y, typename... Z>
VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::StructureChain<X, Y, Z...>
getPipelineIndirectMemoryRequirementsNV( const VULKAN_HPP_NAMESPACE::ComputePipelineCreateInfo & createInfo ) const VULKAN_HPP_NOEXCEPT;
VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::DeviceAddress
getPipelineIndirectAddressNV( const VULKAN_HPP_NAMESPACE::PipelineIndirectDeviceAddressInfoNV & info ) const VULKAN_HPP_NOEXCEPT;
2022-07-01 00:11:04 +00:00
//=== VK_EXT_shader_module_identifier ===
VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::ShaderModuleIdentifierEXT
getShaderModuleCreateInfoIdentifierEXT( const VULKAN_HPP_NAMESPACE::ShaderModuleCreateInfo & createInfo ) const VULKAN_HPP_NOEXCEPT;
2022-09-30 00:20:29 +00:00
//=== VK_NV_optical_flow ===
VULKAN_HPP_NODISCARD
VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::CreateReturnType<VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::OpticalFlowSessionNV>::Type
createOpticalFlowSessionNV( VULKAN_HPP_NAMESPACE::OpticalFlowSessionCreateInfoNV const & createInfo,
VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr ) const
VULKAN_HPP_RAII_CREATE_NOEXCEPT;
2022-09-30 00:20:29 +00:00
//=== VK_KHR_maintenance5 ===
VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::Extent2D
getRenderingAreaGranularityKHR( const VULKAN_HPP_NAMESPACE::RenderingAreaInfoKHR & renderingAreaInfo ) const VULKAN_HPP_NOEXCEPT;
VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::SubresourceLayout2KHR
getImageSubresourceLayoutKHR( const VULKAN_HPP_NAMESPACE::DeviceImageSubresourceInfoKHR & info ) const VULKAN_HPP_NOEXCEPT;
template <typename X, typename Y, typename... Z>
VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::StructureChain<X, Y, Z...>
getImageSubresourceLayoutKHR( const VULKAN_HPP_NAMESPACE::DeviceImageSubresourceInfoKHR & info ) const VULKAN_HPP_NOEXCEPT;
//=== VK_AMD_anti_lag ===
void antiLagUpdateAMD( const VULKAN_HPP_NAMESPACE::AntiLagDataAMD & data ) const VULKAN_HPP_NOEXCEPT;
//=== VK_EXT_shader_object ===
VULKAN_HPP_NODISCARD
VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::CreateReturnType<std::vector<VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::ShaderEXT>>::Type
createShadersEXT( VULKAN_HPP_NAMESPACE::ArrayProxy<VULKAN_HPP_NAMESPACE::ShaderCreateInfoEXT> const & createInfos,
VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr ) const;
VULKAN_HPP_NODISCARD
VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::CreateReturnType<VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::ShaderEXT>::Type
createShaderEXT( VULKAN_HPP_NAMESPACE::ShaderCreateInfoEXT const & createInfo,
VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr ) const
VULKAN_HPP_RAII_CREATE_NOEXCEPT;
//=== VK_KHR_pipeline_binary ===
VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::CreateReturnType<
std::vector<VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::PipelineBinaryKHR>>::Type
createPipelineBinariesKHR( VULKAN_HPP_NAMESPACE::PipelineBinaryCreateInfoKHR const & createInfo,
VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr ) const;
VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::PipelineBinaryKeyKHR getPipelineKeyKHR(
Optional<const VULKAN_HPP_NAMESPACE::PipelineCreateInfoKHR> pipelineCreateInfo VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT ) const;
VULKAN_HPP_NODISCARD std::pair<VULKAN_HPP_NAMESPACE::PipelineBinaryKeyKHR, std::vector<uint8_t>>
getPipelineBinaryDataKHR( const VULKAN_HPP_NAMESPACE::PipelineBinaryDataInfoKHR & info ) const;
void releaseCapturedPipelineDataKHR( const VULKAN_HPP_NAMESPACE::ReleaseCapturedPipelineDataInfoKHR & info,
Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator
VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
2022-07-22 00:12:13 +00:00
//=== VK_QCOM_tile_properties ===
VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::TilePropertiesQCOM
getDynamicRenderingTilePropertiesQCOM( const VULKAN_HPP_NAMESPACE::RenderingInfo & renderingInfo ) const VULKAN_HPP_NOEXCEPT;
# if defined( VK_USE_PLATFORM_SCREEN_QNX )
//=== VK_QNX_external_memory_screen_buffer ===
VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::ScreenBufferPropertiesQNX getScreenBufferPropertiesQNX( const struct _screen_buffer & buffer ) const;
template <typename X, typename Y, typename... Z>
VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::StructureChain<X, Y, Z...> getScreenBufferPropertiesQNX( const struct _screen_buffer & buffer ) const;
# endif /*VK_USE_PLATFORM_SCREEN_QNX*/
//=== VK_KHR_calibrated_timestamps ===
VULKAN_HPP_NODISCARD std::pair<std::vector<uint64_t>, uint64_t>
getCalibratedTimestampsKHR( VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::CalibratedTimestampInfoKHR> const & timestampInfos ) const;
VULKAN_HPP_NODISCARD std::pair<uint64_t, uint64_t>
getCalibratedTimestampKHR( const VULKAN_HPP_NAMESPACE::CalibratedTimestampInfoKHR & timestampInfo ) const;
2024-10-05 00:12:51 +00:00
//=== VK_EXT_device_generated_commands ===
VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::MemoryRequirements2
getGeneratedCommandsMemoryRequirementsEXT( const VULKAN_HPP_NAMESPACE::GeneratedCommandsMemoryRequirementsInfoEXT & info ) const VULKAN_HPP_NOEXCEPT;
template <typename X, typename Y, typename... Z>
VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::StructureChain<X, Y, Z...>
getGeneratedCommandsMemoryRequirementsEXT( const VULKAN_HPP_NAMESPACE::GeneratedCommandsMemoryRequirementsInfoEXT & info ) const VULKAN_HPP_NOEXCEPT;
VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::CreateReturnType<
VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::IndirectCommandsLayoutEXT>::Type
2024-10-05 00:12:51 +00:00
createIndirectCommandsLayoutEXT( VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutCreateInfoEXT const & createInfo,
VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr ) const
VULKAN_HPP_RAII_CREATE_NOEXCEPT;
VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::CreateReturnType<
VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::IndirectExecutionSetEXT>::Type
2024-10-05 00:12:51 +00:00
createIndirectExecutionSetEXT( VULKAN_HPP_NAMESPACE::IndirectExecutionSetCreateInfoEXT const & createInfo,
VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr ) const
VULKAN_HPP_RAII_CREATE_NOEXCEPT;
private:
VULKAN_HPP_NAMESPACE::Device m_device = {};
const VULKAN_HPP_NAMESPACE::AllocationCallbacks * m_allocator = {};
std::unique_ptr<VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::DeviceDispatcher> m_dispatcher;
};
class AccelerationStructureKHR
{
public:
using CType = VkAccelerationStructureKHR;
using CppType = VULKAN_HPP_NAMESPACE::AccelerationStructureKHR;
static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = VULKAN_HPP_NAMESPACE::ObjectType::eAccelerationStructureKHR;
static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType =
VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eAccelerationStructureKHR;
public:
# if !defined( VULKAN_HPP_RAII_NO_EXCEPTIONS )
AccelerationStructureKHR( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device,
VULKAN_HPP_NAMESPACE::AccelerationStructureCreateInfoKHR const & createInfo,
VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr )
{
*this = device.createAccelerationStructureKHR( createInfo, allocator );
}
# endif
AccelerationStructureKHR( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device,
VkAccelerationStructureKHR accelerationStructure,
VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr )
: m_device( device )
, m_accelerationStructure( accelerationStructure )
, m_allocator( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) )
, m_dispatcher( device.getDispatcher() )
2022-03-09 10:20:05 +00:00
{
}
AccelerationStructureKHR( std::nullptr_t ) {}
~AccelerationStructureKHR()
{
clear();
}
AccelerationStructureKHR() = delete;
AccelerationStructureKHR( AccelerationStructureKHR const & ) = delete;
AccelerationStructureKHR( AccelerationStructureKHR && rhs ) VULKAN_HPP_NOEXCEPT
: m_device( VULKAN_HPP_NAMESPACE::exchange( rhs.m_device, {} ) )
, m_accelerationStructure( VULKAN_HPP_NAMESPACE::exchange( rhs.m_accelerationStructure, {} ) )
, m_allocator( VULKAN_HPP_NAMESPACE::exchange( rhs.m_allocator, {} ) )
, m_dispatcher( VULKAN_HPP_NAMESPACE::exchange( rhs.m_dispatcher, nullptr ) )
2022-03-09 10:20:05 +00:00
{
}
AccelerationStructureKHR & operator=( AccelerationStructureKHR const & ) = delete;
2022-12-06 00:11:19 +00:00
AccelerationStructureKHR & operator=( AccelerationStructureKHR && rhs ) VULKAN_HPP_NOEXCEPT
{
if ( this != &rhs )
{
std::swap( m_device, rhs.m_device );
std::swap( m_accelerationStructure, rhs.m_accelerationStructure );
std::swap( m_allocator, rhs.m_allocator );
std::swap( m_dispatcher, rhs.m_dispatcher );
}
return *this;
}
VULKAN_HPP_NAMESPACE::AccelerationStructureKHR const & operator*() const VULKAN_HPP_NOEXCEPT
{
return m_accelerationStructure;
}
operator VULKAN_HPP_NAMESPACE::AccelerationStructureKHR() const VULKAN_HPP_NOEXCEPT
{
return m_accelerationStructure;
}
void clear() VULKAN_HPP_NOEXCEPT
{
if ( m_accelerationStructure )
{
getDispatcher()->vkDestroyAccelerationStructureKHR( static_cast<VkDevice>( m_device ),
static_cast<VkAccelerationStructureKHR>( m_accelerationStructure ),
reinterpret_cast<const VkAllocationCallbacks *>( m_allocator ) );
}
m_device = nullptr;
m_accelerationStructure = nullptr;
m_allocator = nullptr;
m_dispatcher = nullptr;
}
VULKAN_HPP_NAMESPACE::AccelerationStructureKHR release()
{
m_device = nullptr;
m_allocator = nullptr;
m_dispatcher = nullptr;
return VULKAN_HPP_NAMESPACE::exchange( m_accelerationStructure, nullptr );
}
VULKAN_HPP_NAMESPACE::Device getDevice() const
{
return m_device;
}
VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::DeviceDispatcher const * getDispatcher() const
{
VULKAN_HPP_ASSERT( m_dispatcher->getVkHeaderVersion() == VK_HEADER_VERSION );
return m_dispatcher;
}
void swap( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::AccelerationStructureKHR & rhs ) VULKAN_HPP_NOEXCEPT
{
std::swap( m_device, rhs.m_device );
std::swap( m_accelerationStructure, rhs.m_accelerationStructure );
std::swap( m_allocator, rhs.m_allocator );
std::swap( m_dispatcher, rhs.m_dispatcher );
}
private:
VULKAN_HPP_NAMESPACE::Device m_device = {};
VULKAN_HPP_NAMESPACE::AccelerationStructureKHR m_accelerationStructure = {};
const VULKAN_HPP_NAMESPACE::AllocationCallbacks * m_allocator = {};
VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::DeviceDispatcher const * m_dispatcher = nullptr;
};
class AccelerationStructureNV
{
public:
using CType = VkAccelerationStructureNV;
using CppType = VULKAN_HPP_NAMESPACE::AccelerationStructureNV;
static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = VULKAN_HPP_NAMESPACE::ObjectType::eAccelerationStructureNV;
static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType =
VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eAccelerationStructureNV;
public:
# if !defined( VULKAN_HPP_RAII_NO_EXCEPTIONS )
AccelerationStructureNV( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device,
VULKAN_HPP_NAMESPACE::AccelerationStructureCreateInfoNV const & createInfo,
VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr )
{
*this = device.createAccelerationStructureNV( createInfo, allocator );
}
# endif
AccelerationStructureNV( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device,
VkAccelerationStructureNV accelerationStructure,
VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr )
: m_device( device )
, m_accelerationStructure( accelerationStructure )
, m_allocator( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) )
, m_dispatcher( device.getDispatcher() )
2022-03-09 10:20:05 +00:00
{
}
AccelerationStructureNV( std::nullptr_t ) {}
~AccelerationStructureNV()
{
clear();
}
AccelerationStructureNV() = delete;
AccelerationStructureNV( AccelerationStructureNV const & ) = delete;
AccelerationStructureNV( AccelerationStructureNV && rhs ) VULKAN_HPP_NOEXCEPT
: m_device( VULKAN_HPP_NAMESPACE::exchange( rhs.m_device, {} ) )
, m_accelerationStructure( VULKAN_HPP_NAMESPACE::exchange( rhs.m_accelerationStructure, {} ) )
, m_allocator( VULKAN_HPP_NAMESPACE::exchange( rhs.m_allocator, {} ) )
, m_dispatcher( VULKAN_HPP_NAMESPACE::exchange( rhs.m_dispatcher, nullptr ) )
2022-03-09 10:20:05 +00:00
{
}
AccelerationStructureNV & operator=( AccelerationStructureNV const & ) = delete;
2022-12-06 00:11:19 +00:00
AccelerationStructureNV & operator=( AccelerationStructureNV && rhs ) VULKAN_HPP_NOEXCEPT
{
if ( this != &rhs )
{
std::swap( m_device, rhs.m_device );
std::swap( m_accelerationStructure, rhs.m_accelerationStructure );
std::swap( m_allocator, rhs.m_allocator );
std::swap( m_dispatcher, rhs.m_dispatcher );
}
return *this;
}
VULKAN_HPP_NAMESPACE::AccelerationStructureNV const & operator*() const VULKAN_HPP_NOEXCEPT
{
return m_accelerationStructure;
}
operator VULKAN_HPP_NAMESPACE::AccelerationStructureNV() const VULKAN_HPP_NOEXCEPT
{
return m_accelerationStructure;
}
void clear() VULKAN_HPP_NOEXCEPT
{
if ( m_accelerationStructure )
{
getDispatcher()->vkDestroyAccelerationStructureNV( static_cast<VkDevice>( m_device ),
static_cast<VkAccelerationStructureNV>( m_accelerationStructure ),
reinterpret_cast<const VkAllocationCallbacks *>( m_allocator ) );
}
m_device = nullptr;
m_accelerationStructure = nullptr;
m_allocator = nullptr;
m_dispatcher = nullptr;
}
VULKAN_HPP_NAMESPACE::AccelerationStructureNV release()
{
m_device = nullptr;
m_allocator = nullptr;
m_dispatcher = nullptr;
return VULKAN_HPP_NAMESPACE::exchange( m_accelerationStructure, nullptr );
}
VULKAN_HPP_NAMESPACE::Device getDevice() const
{
return m_device;
}
VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::DeviceDispatcher const * getDispatcher() const
{
VULKAN_HPP_ASSERT( m_dispatcher->getVkHeaderVersion() == VK_HEADER_VERSION );
return m_dispatcher;
}
void swap( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::AccelerationStructureNV & rhs ) VULKAN_HPP_NOEXCEPT
{
std::swap( m_device, rhs.m_device );
std::swap( m_accelerationStructure, rhs.m_accelerationStructure );
std::swap( m_allocator, rhs.m_allocator );
std::swap( m_dispatcher, rhs.m_dispatcher );
}
//=== VK_NV_ray_tracing ===
template <typename DataType>
VULKAN_HPP_NODISCARD std::vector<DataType> getHandle( size_t dataSize ) const;
template <typename DataType>
VULKAN_HPP_NODISCARD DataType getHandle() const;
private:
VULKAN_HPP_NAMESPACE::Device m_device = {};
VULKAN_HPP_NAMESPACE::AccelerationStructureNV m_accelerationStructure = {};
const VULKAN_HPP_NAMESPACE::AllocationCallbacks * m_allocator = {};
VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::DeviceDispatcher const * m_dispatcher = nullptr;
};
class Buffer
{
public:
using CType = VkBuffer;
using CppType = VULKAN_HPP_NAMESPACE::Buffer;
static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = VULKAN_HPP_NAMESPACE::ObjectType::eBuffer;
static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType =
VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eBuffer;
public:
# if !defined( VULKAN_HPP_RAII_NO_EXCEPTIONS )
Buffer( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device,
VULKAN_HPP_NAMESPACE::BufferCreateInfo const & createInfo,
VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr )
{
*this = device.createBuffer( createInfo, allocator );
}
# endif
Buffer( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device,
VkBuffer buffer,
VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr )
: m_device( device )
, m_buffer( buffer )
, m_allocator( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) )
, m_dispatcher( device.getDispatcher() )
2022-03-09 10:20:05 +00:00
{
}
Buffer( std::nullptr_t ) {}
~Buffer()
{
clear();
}
Buffer() = delete;
Buffer( Buffer const & ) = delete;
Buffer( Buffer && rhs ) VULKAN_HPP_NOEXCEPT
: m_device( VULKAN_HPP_NAMESPACE::exchange( rhs.m_device, {} ) )
, m_buffer( VULKAN_HPP_NAMESPACE::exchange( rhs.m_buffer, {} ) )
, m_allocator( VULKAN_HPP_NAMESPACE::exchange( rhs.m_allocator, {} ) )
, m_dispatcher( VULKAN_HPP_NAMESPACE::exchange( rhs.m_dispatcher, nullptr ) )
2022-03-09 10:20:05 +00:00
{
}
Buffer & operator=( Buffer const & ) = delete;
2022-12-06 00:11:19 +00:00
Buffer & operator=( Buffer && rhs ) VULKAN_HPP_NOEXCEPT
{
if ( this != &rhs )
{
std::swap( m_device, rhs.m_device );
std::swap( m_buffer, rhs.m_buffer );
std::swap( m_allocator, rhs.m_allocator );
std::swap( m_dispatcher, rhs.m_dispatcher );
}
return *this;
}
VULKAN_HPP_NAMESPACE::Buffer const & operator*() const VULKAN_HPP_NOEXCEPT
{
return m_buffer;
}
operator VULKAN_HPP_NAMESPACE::Buffer() const VULKAN_HPP_NOEXCEPT
{
return m_buffer;
}
void clear() VULKAN_HPP_NOEXCEPT
{
if ( m_buffer )
{
getDispatcher()->vkDestroyBuffer(
static_cast<VkDevice>( m_device ), static_cast<VkBuffer>( m_buffer ), reinterpret_cast<const VkAllocationCallbacks *>( m_allocator ) );
}
m_device = nullptr;
m_buffer = nullptr;
m_allocator = nullptr;
m_dispatcher = nullptr;
}
VULKAN_HPP_NAMESPACE::Buffer release()
{
m_device = nullptr;
m_allocator = nullptr;
m_dispatcher = nullptr;
return VULKAN_HPP_NAMESPACE::exchange( m_buffer, nullptr );
}
VULKAN_HPP_NAMESPACE::Device getDevice() const
{
return m_device;
}
VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::DeviceDispatcher const * getDispatcher() const
{
VULKAN_HPP_ASSERT( m_dispatcher->getVkHeaderVersion() == VK_HEADER_VERSION );
return m_dispatcher;
}
void swap( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Buffer & rhs ) VULKAN_HPP_NOEXCEPT
{
std::swap( m_device, rhs.m_device );
std::swap( m_buffer, rhs.m_buffer );
std::swap( m_allocator, rhs.m_allocator );
std::swap( m_dispatcher, rhs.m_dispatcher );
}
//=== VK_VERSION_1_0 ===
void bindMemory( VULKAN_HPP_NAMESPACE::DeviceMemory memory, VULKAN_HPP_NAMESPACE::DeviceSize memoryOffset ) const;
VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::MemoryRequirements getMemoryRequirements() const VULKAN_HPP_NOEXCEPT;
private:
VULKAN_HPP_NAMESPACE::Device m_device = {};
VULKAN_HPP_NAMESPACE::Buffer m_buffer = {};
const VULKAN_HPP_NAMESPACE::AllocationCallbacks * m_allocator = {};
VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::DeviceDispatcher const * m_dispatcher = nullptr;
};
2021-09-29 00:34:11 +00:00
# if defined( VK_USE_PLATFORM_FUCHSIA )
class BufferCollectionFUCHSIA
{
public:
using CType = VkBufferCollectionFUCHSIA;
using CppType = VULKAN_HPP_NAMESPACE::BufferCollectionFUCHSIA;
2021-09-29 00:34:11 +00:00
static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = VULKAN_HPP_NAMESPACE::ObjectType::eBufferCollectionFUCHSIA;
2021-09-29 00:34:11 +00:00
static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType =
VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eBufferCollectionFUCHSIA;
public:
# if !defined( VULKAN_HPP_RAII_NO_EXCEPTIONS )
BufferCollectionFUCHSIA( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device,
VULKAN_HPP_NAMESPACE::BufferCollectionCreateInfoFUCHSIA const & createInfo,
VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr )
2021-09-29 00:34:11 +00:00
{
*this = device.createBufferCollectionFUCHSIA( createInfo, allocator );
2021-09-29 00:34:11 +00:00
}
# endif
2021-09-29 00:34:11 +00:00
BufferCollectionFUCHSIA( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device,
VkBufferCollectionFUCHSIA collection,
VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr )
: m_device( device )
, m_collection( collection )
, m_allocator( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) )
2021-09-29 00:34:11 +00:00
, m_dispatcher( device.getDispatcher() )
2022-03-09 10:20:05 +00:00
{
}
2021-09-29 00:34:11 +00:00
BufferCollectionFUCHSIA( std::nullptr_t ) {}
2021-09-29 00:34:11 +00:00
~BufferCollectionFUCHSIA()
{
clear();
2021-09-29 00:34:11 +00:00
}
BufferCollectionFUCHSIA() = delete;
2021-09-29 00:34:11 +00:00
BufferCollectionFUCHSIA( BufferCollectionFUCHSIA const & ) = delete;
2021-09-29 00:34:11 +00:00
BufferCollectionFUCHSIA( BufferCollectionFUCHSIA && rhs ) VULKAN_HPP_NOEXCEPT
: m_device( VULKAN_HPP_NAMESPACE::exchange( rhs.m_device, {} ) )
, m_collection( VULKAN_HPP_NAMESPACE::exchange( rhs.m_collection, {} ) )
, m_allocator( VULKAN_HPP_NAMESPACE::exchange( rhs.m_allocator, {} ) )
, m_dispatcher( VULKAN_HPP_NAMESPACE::exchange( rhs.m_dispatcher, nullptr ) )
2022-03-09 10:20:05 +00:00
{
}
2021-09-29 00:34:11 +00:00
BufferCollectionFUCHSIA & operator=( BufferCollectionFUCHSIA const & ) = delete;
2022-12-06 00:11:19 +00:00
BufferCollectionFUCHSIA & operator=( BufferCollectionFUCHSIA && rhs ) VULKAN_HPP_NOEXCEPT
2021-09-29 00:34:11 +00:00
{
if ( this != &rhs )
{
std::swap( m_device, rhs.m_device );
std::swap( m_collection, rhs.m_collection );
std::swap( m_allocator, rhs.m_allocator );
std::swap( m_dispatcher, rhs.m_dispatcher );
2021-09-29 00:34:11 +00:00
}
return *this;
}
VULKAN_HPP_NAMESPACE::BufferCollectionFUCHSIA const & operator*() const VULKAN_HPP_NOEXCEPT
{
return m_collection;
2021-09-29 00:34:11 +00:00
}
operator VULKAN_HPP_NAMESPACE::BufferCollectionFUCHSIA() const VULKAN_HPP_NOEXCEPT
{
return m_collection;
}
void clear() VULKAN_HPP_NOEXCEPT
{
if ( m_collection )
{
getDispatcher()->vkDestroyBufferCollectionFUCHSIA( static_cast<VkDevice>( m_device ),
static_cast<VkBufferCollectionFUCHSIA>( m_collection ),
reinterpret_cast<const VkAllocationCallbacks *>( m_allocator ) );
}
m_device = nullptr;
m_collection = nullptr;
m_allocator = nullptr;
m_dispatcher = nullptr;
}
VULKAN_HPP_NAMESPACE::BufferCollectionFUCHSIA release()
{
m_device = nullptr;
m_allocator = nullptr;
m_dispatcher = nullptr;
return VULKAN_HPP_NAMESPACE::exchange( m_collection, nullptr );
}
VULKAN_HPP_NAMESPACE::Device getDevice() const
{
return m_device;
}
VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::DeviceDispatcher const * getDispatcher() const
2021-09-29 00:34:11 +00:00
{
VULKAN_HPP_ASSERT( m_dispatcher->getVkHeaderVersion() == VK_HEADER_VERSION );
return m_dispatcher;
}
void swap( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::BufferCollectionFUCHSIA & rhs ) VULKAN_HPP_NOEXCEPT
{
std::swap( m_device, rhs.m_device );
std::swap( m_collection, rhs.m_collection );
std::swap( m_allocator, rhs.m_allocator );
std::swap( m_dispatcher, rhs.m_dispatcher );
}
2021-09-29 00:34:11 +00:00
//=== VK_FUCHSIA_buffer_collection ===
void setImageConstraints( const VULKAN_HPP_NAMESPACE::ImageConstraintsInfoFUCHSIA & imageConstraintsInfo ) const;
2021-09-29 00:34:11 +00:00
void setBufferConstraints( const VULKAN_HPP_NAMESPACE::BufferConstraintsInfoFUCHSIA & bufferConstraintsInfo ) const;
2021-09-29 00:34:11 +00:00
VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::BufferCollectionPropertiesFUCHSIA getProperties() const;
private:
VULKAN_HPP_NAMESPACE::Device m_device = {};
VULKAN_HPP_NAMESPACE::BufferCollectionFUCHSIA m_collection = {};
const VULKAN_HPP_NAMESPACE::AllocationCallbacks * m_allocator = {};
VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::DeviceDispatcher const * m_dispatcher = nullptr;
2021-09-29 00:34:11 +00:00
};
# endif /*VK_USE_PLATFORM_FUCHSIA*/
class BufferView
{
public:
using CType = VkBufferView;
using CppType = VULKAN_HPP_NAMESPACE::BufferView;
static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = VULKAN_HPP_NAMESPACE::ObjectType::eBufferView;
static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType =
VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eBufferView;
public:
# if !defined( VULKAN_HPP_RAII_NO_EXCEPTIONS )
BufferView( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device,
VULKAN_HPP_NAMESPACE::BufferViewCreateInfo const & createInfo,
VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr )
{
*this = device.createBufferView( createInfo, allocator );
}
# endif
BufferView( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device,
VkBufferView bufferView,
VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr )
: m_device( device )
, m_bufferView( bufferView )
, m_allocator( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) )
, m_dispatcher( device.getDispatcher() )
2022-03-09 10:20:05 +00:00
{
}
BufferView( std::nullptr_t ) {}
~BufferView()
{
clear();
}
BufferView() = delete;
BufferView( BufferView const & ) = delete;
BufferView( BufferView && rhs ) VULKAN_HPP_NOEXCEPT
: m_device( VULKAN_HPP_NAMESPACE::exchange( rhs.m_device, {} ) )
, m_bufferView( VULKAN_HPP_NAMESPACE::exchange( rhs.m_bufferView, {} ) )
, m_allocator( VULKAN_HPP_NAMESPACE::exchange( rhs.m_allocator, {} ) )
, m_dispatcher( VULKAN_HPP_NAMESPACE::exchange( rhs.m_dispatcher, nullptr ) )
2022-03-09 10:20:05 +00:00
{
}
BufferView & operator=( BufferView const & ) = delete;
2022-12-06 00:11:19 +00:00
BufferView & operator=( BufferView && rhs ) VULKAN_HPP_NOEXCEPT
{
if ( this != &rhs )
{
std::swap( m_device, rhs.m_device );
std::swap( m_bufferView, rhs.m_bufferView );
std::swap( m_allocator, rhs.m_allocator );
std::swap( m_dispatcher, rhs.m_dispatcher );
}
return *this;
}
VULKAN_HPP_NAMESPACE::BufferView const & operator*() const VULKAN_HPP_NOEXCEPT
{
return m_bufferView;
}
operator VULKAN_HPP_NAMESPACE::BufferView() const VULKAN_HPP_NOEXCEPT
{
return m_bufferView;
}
void clear() VULKAN_HPP_NOEXCEPT
{
if ( m_bufferView )
{
getDispatcher()->vkDestroyBufferView(
static_cast<VkDevice>( m_device ), static_cast<VkBufferView>( m_bufferView ), reinterpret_cast<const VkAllocationCallbacks *>( m_allocator ) );
}
m_device = nullptr;
m_bufferView = nullptr;
m_allocator = nullptr;
m_dispatcher = nullptr;
}
VULKAN_HPP_NAMESPACE::BufferView release()
{
m_device = nullptr;
m_allocator = nullptr;
m_dispatcher = nullptr;
return VULKAN_HPP_NAMESPACE::exchange( m_bufferView, nullptr );
}
VULKAN_HPP_NAMESPACE::Device getDevice() const
{
return m_device;
}
VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::DeviceDispatcher const * getDispatcher() const
{
VULKAN_HPP_ASSERT( m_dispatcher->getVkHeaderVersion() == VK_HEADER_VERSION );
return m_dispatcher;
}
void swap( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::BufferView & rhs ) VULKAN_HPP_NOEXCEPT
{
std::swap( m_device, rhs.m_device );
std::swap( m_bufferView, rhs.m_bufferView );
std::swap( m_allocator, rhs.m_allocator );
std::swap( m_dispatcher, rhs.m_dispatcher );
}
private:
VULKAN_HPP_NAMESPACE::Device m_device = {};
VULKAN_HPP_NAMESPACE::BufferView m_bufferView = {};
const VULKAN_HPP_NAMESPACE::AllocationCallbacks * m_allocator = {};
VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::DeviceDispatcher const * m_dispatcher = nullptr;
};
class CommandPool
{
public:
using CType = VkCommandPool;
using CppType = VULKAN_HPP_NAMESPACE::CommandPool;
static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = VULKAN_HPP_NAMESPACE::ObjectType::eCommandPool;
static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType =
VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eCommandPool;
public:
# if !defined( VULKAN_HPP_RAII_NO_EXCEPTIONS )
CommandPool( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device,
VULKAN_HPP_NAMESPACE::CommandPoolCreateInfo const & createInfo,
VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr )
{
*this = device.createCommandPool( createInfo, allocator );
}
# endif
CommandPool( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device,
VkCommandPool commandPool,
VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr )
: m_device( device )
, m_commandPool( commandPool )
, m_allocator( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) )
, m_dispatcher( device.getDispatcher() )
2022-03-09 10:20:05 +00:00
{
}
CommandPool( std::nullptr_t ) {}
~CommandPool()
{
clear();
}
CommandPool() = delete;
CommandPool( CommandPool const & ) = delete;
CommandPool( CommandPool && rhs ) VULKAN_HPP_NOEXCEPT
: m_device( VULKAN_HPP_NAMESPACE::exchange( rhs.m_device, {} ) )
, m_commandPool( VULKAN_HPP_NAMESPACE::exchange( rhs.m_commandPool, {} ) )
, m_allocator( VULKAN_HPP_NAMESPACE::exchange( rhs.m_allocator, {} ) )
, m_dispatcher( VULKAN_HPP_NAMESPACE::exchange( rhs.m_dispatcher, nullptr ) )
2022-03-09 10:20:05 +00:00
{
}
CommandPool & operator=( CommandPool const & ) = delete;
2022-12-06 00:11:19 +00:00
CommandPool & operator=( CommandPool && rhs ) VULKAN_HPP_NOEXCEPT
{
if ( this != &rhs )
{
std::swap( m_device, rhs.m_device );
std::swap( m_commandPool, rhs.m_commandPool );
std::swap( m_allocator, rhs.m_allocator );
std::swap( m_dispatcher, rhs.m_dispatcher );
}
return *this;
}
VULKAN_HPP_NAMESPACE::CommandPool const & operator*() const VULKAN_HPP_NOEXCEPT
{
return m_commandPool;
}
operator VULKAN_HPP_NAMESPACE::CommandPool() const VULKAN_HPP_NOEXCEPT
{
return m_commandPool;
}
void clear() VULKAN_HPP_NOEXCEPT
{
if ( m_commandPool )
{
getDispatcher()->vkDestroyCommandPool(
static_cast<VkDevice>( m_device ), static_cast<VkCommandPool>( m_commandPool ), reinterpret_cast<const VkAllocationCallbacks *>( m_allocator ) );
}
m_device = nullptr;
m_commandPool = nullptr;
m_allocator = nullptr;
m_dispatcher = nullptr;
}
VULKAN_HPP_NAMESPACE::CommandPool release()
{
m_device = nullptr;
m_allocator = nullptr;
m_dispatcher = nullptr;
return VULKAN_HPP_NAMESPACE::exchange( m_commandPool, nullptr );
}
VULKAN_HPP_NAMESPACE::Device getDevice() const
{
return m_device;
}
VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::DeviceDispatcher const * getDispatcher() const
{
VULKAN_HPP_ASSERT( m_dispatcher->getVkHeaderVersion() == VK_HEADER_VERSION );
return m_dispatcher;
}
void swap( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::CommandPool & rhs ) VULKAN_HPP_NOEXCEPT
{
std::swap( m_device, rhs.m_device );
std::swap( m_commandPool, rhs.m_commandPool );
std::swap( m_allocator, rhs.m_allocator );
std::swap( m_dispatcher, rhs.m_dispatcher );
}
//=== VK_VERSION_1_0 ===
void reset( VULKAN_HPP_NAMESPACE::CommandPoolResetFlags flags VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT ) const;
//=== VK_VERSION_1_1 ===
void trim( VULKAN_HPP_NAMESPACE::CommandPoolTrimFlags flags VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
//=== VK_KHR_maintenance1 ===
void trimKHR( VULKAN_HPP_NAMESPACE::CommandPoolTrimFlags flags VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
2021-04-28 11:35:14 +00:00
private:
VULKAN_HPP_NAMESPACE::Device m_device = {};
VULKAN_HPP_NAMESPACE::CommandPool m_commandPool = {};
const VULKAN_HPP_NAMESPACE::AllocationCallbacks * m_allocator = {};
VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::DeviceDispatcher const * m_dispatcher = nullptr;
};
class CommandBuffer
{
public:
using CType = VkCommandBuffer;
using CppType = VULKAN_HPP_NAMESPACE::CommandBuffer;
static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = VULKAN_HPP_NAMESPACE::ObjectType::eCommandBuffer;
static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType =
VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eCommandBuffer;
public:
CommandBuffer( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device, VkCommandBuffer commandBuffer, VkCommandPool commandPool )
: m_device( device ), m_commandPool( commandPool ), m_commandBuffer( commandBuffer ), m_dispatcher( device.getDispatcher() )
2022-03-09 10:20:05 +00:00
{
}
CommandBuffer( std::nullptr_t ) {}
~CommandBuffer()
{
clear();
}
CommandBuffer() = delete;
CommandBuffer( CommandBuffer const & ) = delete;
CommandBuffer( CommandBuffer && rhs ) VULKAN_HPP_NOEXCEPT
: m_device( VULKAN_HPP_NAMESPACE::exchange( rhs.m_device, {} ) )
, m_commandPool( VULKAN_HPP_NAMESPACE::exchange( rhs.m_commandPool, {} ) )
, m_commandBuffer( VULKAN_HPP_NAMESPACE::exchange( rhs.m_commandBuffer, {} ) )
, m_dispatcher( VULKAN_HPP_NAMESPACE::exchange( rhs.m_dispatcher, nullptr ) )
2022-03-09 10:20:05 +00:00
{
}
CommandBuffer & operator=( CommandBuffer const & ) = delete;
2022-12-06 00:11:19 +00:00
CommandBuffer & operator=( CommandBuffer && rhs ) VULKAN_HPP_NOEXCEPT
{
if ( this != &rhs )
{
std::swap( m_device, rhs.m_device );
std::swap( m_commandPool, rhs.m_commandPool );
std::swap( m_commandBuffer, rhs.m_commandBuffer );
std::swap( m_dispatcher, rhs.m_dispatcher );
}
return *this;
}
VULKAN_HPP_NAMESPACE::CommandBuffer const & operator*() const VULKAN_HPP_NOEXCEPT
{
return m_commandBuffer;
}
operator VULKAN_HPP_NAMESPACE::CommandBuffer() const VULKAN_HPP_NOEXCEPT
{
return m_commandBuffer;
}
void clear() VULKAN_HPP_NOEXCEPT
{
if ( m_commandBuffer )
{
getDispatcher()->vkFreeCommandBuffers(
static_cast<VkDevice>( m_device ), static_cast<VkCommandPool>( m_commandPool ), 1, reinterpret_cast<VkCommandBuffer const *>( &m_commandBuffer ) );
}
m_device = nullptr;
m_commandPool = nullptr;
m_commandBuffer = nullptr;
m_dispatcher = nullptr;
}
VULKAN_HPP_NAMESPACE::CommandBuffer release()
{
m_device = nullptr;
m_commandPool = nullptr;
m_dispatcher = nullptr;
return VULKAN_HPP_NAMESPACE::exchange( m_commandBuffer, nullptr );
}
VULKAN_HPP_NAMESPACE::Device getDevice() const
{
return m_device;
}
VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::DeviceDispatcher const * getDispatcher() const
{
VULKAN_HPP_ASSERT( m_dispatcher->getVkHeaderVersion() == VK_HEADER_VERSION );
return m_dispatcher;
}
void swap( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::CommandBuffer & rhs ) VULKAN_HPP_NOEXCEPT
{
std::swap( m_device, rhs.m_device );
std::swap( m_commandPool, rhs.m_commandPool );
std::swap( m_commandBuffer, rhs.m_commandBuffer );
std::swap( m_dispatcher, rhs.m_dispatcher );
}
//=== VK_VERSION_1_0 ===
void begin( const VULKAN_HPP_NAMESPACE::CommandBufferBeginInfo & beginInfo ) const;
void end() const;
void reset( VULKAN_HPP_NAMESPACE::CommandBufferResetFlags flags VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT ) const;
void bindPipeline( VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint, VULKAN_HPP_NAMESPACE::Pipeline pipeline ) const VULKAN_HPP_NOEXCEPT;
void setViewport( uint32_t firstViewport,
VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::Viewport> const & viewports ) const VULKAN_HPP_NOEXCEPT;
void setScissor( uint32_t firstScissor, VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::Rect2D> const & scissors ) const VULKAN_HPP_NOEXCEPT;
void setLineWidth( float lineWidth ) const VULKAN_HPP_NOEXCEPT;
void setDepthBias( float depthBiasConstantFactor, float depthBiasClamp, float depthBiasSlopeFactor ) const VULKAN_HPP_NOEXCEPT;
2021-04-28 11:35:14 +00:00
void setBlendConstants( const float blendConstants[4] ) const VULKAN_HPP_NOEXCEPT;
void setDepthBounds( float minDepthBounds, float maxDepthBounds ) const VULKAN_HPP_NOEXCEPT;
void setStencilCompareMask( VULKAN_HPP_NAMESPACE::StencilFaceFlags faceMask, uint32_t compareMask ) const VULKAN_HPP_NOEXCEPT;
void setStencilWriteMask( VULKAN_HPP_NAMESPACE::StencilFaceFlags faceMask, uint32_t writeMask ) const VULKAN_HPP_NOEXCEPT;
void setStencilReference( VULKAN_HPP_NAMESPACE::StencilFaceFlags faceMask, uint32_t reference ) const VULKAN_HPP_NOEXCEPT;
2021-04-15 08:49:54 +00:00
void bindDescriptorSets( VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint,
VULKAN_HPP_NAMESPACE::PipelineLayout layout,
uint32_t firstSet,
VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::DescriptorSet> const & descriptorSets,
VULKAN_HPP_NAMESPACE::ArrayProxy<const uint32_t> const & dynamicOffsets ) const VULKAN_HPP_NOEXCEPT;
void bindIndexBuffer( VULKAN_HPP_NAMESPACE::Buffer buffer,
VULKAN_HPP_NAMESPACE::DeviceSize offset,
VULKAN_HPP_NAMESPACE::IndexType indexType ) const VULKAN_HPP_NOEXCEPT;
void bindVertexBuffers( uint32_t firstBinding,
VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::Buffer> const & buffers,
VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::DeviceSize> const & offsets ) const;
void draw( uint32_t vertexCount, uint32_t instanceCount, uint32_t firstVertex, uint32_t firstInstance ) const VULKAN_HPP_NOEXCEPT;
void
drawIndexed( uint32_t indexCount, uint32_t instanceCount, uint32_t firstIndex, int32_t vertexOffset, uint32_t firstInstance ) const VULKAN_HPP_NOEXCEPT;
void drawIndirect( VULKAN_HPP_NAMESPACE::Buffer buffer,
VULKAN_HPP_NAMESPACE::DeviceSize offset,
uint32_t drawCount,
uint32_t stride ) const VULKAN_HPP_NOEXCEPT;
void drawIndexedIndirect( VULKAN_HPP_NAMESPACE::Buffer buffer,
VULKAN_HPP_NAMESPACE::DeviceSize offset,
uint32_t drawCount,
uint32_t stride ) const VULKAN_HPP_NOEXCEPT;
void dispatch( uint32_t groupCountX, uint32_t groupCountY, uint32_t groupCountZ ) const VULKAN_HPP_NOEXCEPT;
void dispatchIndirect( VULKAN_HPP_NAMESPACE::Buffer buffer, VULKAN_HPP_NAMESPACE::DeviceSize offset ) const VULKAN_HPP_NOEXCEPT;
void copyBuffer( VULKAN_HPP_NAMESPACE::Buffer srcBuffer,
VULKAN_HPP_NAMESPACE::Buffer dstBuffer,
VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::BufferCopy> const & regions ) const VULKAN_HPP_NOEXCEPT;
void copyImage( VULKAN_HPP_NAMESPACE::Image srcImage,
VULKAN_HPP_NAMESPACE::ImageLayout srcImageLayout,
VULKAN_HPP_NAMESPACE::Image dstImage,
VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout,
VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::ImageCopy> const & regions ) const VULKAN_HPP_NOEXCEPT;
void blitImage( VULKAN_HPP_NAMESPACE::Image srcImage,
VULKAN_HPP_NAMESPACE::ImageLayout srcImageLayout,
VULKAN_HPP_NAMESPACE::Image dstImage,
VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout,
VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::ImageBlit> const & regions,
VULKAN_HPP_NAMESPACE::Filter filter ) const VULKAN_HPP_NOEXCEPT;
void copyBufferToImage( VULKAN_HPP_NAMESPACE::Buffer srcBuffer,
VULKAN_HPP_NAMESPACE::Image dstImage,
VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout,
VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::BufferImageCopy> const & regions ) const VULKAN_HPP_NOEXCEPT;
void copyImageToBuffer( VULKAN_HPP_NAMESPACE::Image srcImage,
VULKAN_HPP_NAMESPACE::ImageLayout srcImageLayout,
VULKAN_HPP_NAMESPACE::Buffer dstBuffer,
VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::BufferImageCopy> const & regions ) const VULKAN_HPP_NOEXCEPT;
template <typename DataType>
void updateBuffer( VULKAN_HPP_NAMESPACE::Buffer dstBuffer,
VULKAN_HPP_NAMESPACE::DeviceSize dstOffset,
VULKAN_HPP_NAMESPACE::ArrayProxy<const DataType> const & data ) const VULKAN_HPP_NOEXCEPT;
void fillBuffer( VULKAN_HPP_NAMESPACE::Buffer dstBuffer,
VULKAN_HPP_NAMESPACE::DeviceSize dstOffset,
VULKAN_HPP_NAMESPACE::DeviceSize size,
uint32_t data ) const VULKAN_HPP_NOEXCEPT;
void clearColorImage( VULKAN_HPP_NAMESPACE::Image image,
VULKAN_HPP_NAMESPACE::ImageLayout imageLayout,
const VULKAN_HPP_NAMESPACE::ClearColorValue & color,
VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::ImageSubresourceRange> const & ranges ) const VULKAN_HPP_NOEXCEPT;
void
clearDepthStencilImage( VULKAN_HPP_NAMESPACE::Image image,
VULKAN_HPP_NAMESPACE::ImageLayout imageLayout,
const VULKAN_HPP_NAMESPACE::ClearDepthStencilValue & depthStencil,
VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::ImageSubresourceRange> const & ranges ) const VULKAN_HPP_NOEXCEPT;
void clearAttachments( VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::ClearAttachment> const & attachments,
VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::ClearRect> const & rects ) const VULKAN_HPP_NOEXCEPT;
void resolveImage( VULKAN_HPP_NAMESPACE::Image srcImage,
VULKAN_HPP_NAMESPACE::ImageLayout srcImageLayout,
VULKAN_HPP_NAMESPACE::Image dstImage,
VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout,
VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::ImageResolve> const & regions ) const VULKAN_HPP_NOEXCEPT;
void setEvent( VULKAN_HPP_NAMESPACE::Event event,
VULKAN_HPP_NAMESPACE::PipelineStageFlags stageMask VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
void resetEvent( VULKAN_HPP_NAMESPACE::Event event,
VULKAN_HPP_NAMESPACE::PipelineStageFlags stageMask VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
void waitEvents( VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::Event> const & events,
VULKAN_HPP_NAMESPACE::PipelineStageFlags srcStageMask,
VULKAN_HPP_NAMESPACE::PipelineStageFlags dstStageMask,
VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::MemoryBarrier> const & memoryBarriers,
VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::BufferMemoryBarrier> const & bufferMemoryBarriers,
VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::ImageMemoryBarrier> const & imageMemoryBarriers ) const VULKAN_HPP_NOEXCEPT;
void pipelineBarrier( VULKAN_HPP_NAMESPACE::PipelineStageFlags srcStageMask,
VULKAN_HPP_NAMESPACE::PipelineStageFlags dstStageMask,
VULKAN_HPP_NAMESPACE::DependencyFlags dependencyFlags,
VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::MemoryBarrier> const & memoryBarriers,
VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::BufferMemoryBarrier> const & bufferMemoryBarriers,
VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::ImageMemoryBarrier> const & imageMemoryBarriers ) const
VULKAN_HPP_NOEXCEPT;
void beginQuery( VULKAN_HPP_NAMESPACE::QueryPool queryPool,
uint32_t query,
VULKAN_HPP_NAMESPACE::QueryControlFlags flags VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
void endQuery( VULKAN_HPP_NAMESPACE::QueryPool queryPool, uint32_t query ) const VULKAN_HPP_NOEXCEPT;
void resetQueryPool( VULKAN_HPP_NAMESPACE::QueryPool queryPool, uint32_t firstQuery, uint32_t queryCount ) const VULKAN_HPP_NOEXCEPT;
void writeTimestamp( VULKAN_HPP_NAMESPACE::PipelineStageFlagBits pipelineStage,
VULKAN_HPP_NAMESPACE::QueryPool queryPool,
uint32_t query ) const VULKAN_HPP_NOEXCEPT;
void copyQueryPoolResults( VULKAN_HPP_NAMESPACE::QueryPool queryPool,
uint32_t firstQuery,
uint32_t queryCount,
VULKAN_HPP_NAMESPACE::Buffer dstBuffer,
VULKAN_HPP_NAMESPACE::DeviceSize dstOffset,
VULKAN_HPP_NAMESPACE::DeviceSize stride,
VULKAN_HPP_NAMESPACE::QueryResultFlags flags VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
template <typename ValuesType>
void pushConstants( VULKAN_HPP_NAMESPACE::PipelineLayout layout,
VULKAN_HPP_NAMESPACE::ShaderStageFlags stageFlags,
uint32_t offset,
VULKAN_HPP_NAMESPACE::ArrayProxy<const ValuesType> const & values ) const VULKAN_HPP_NOEXCEPT;
2021-05-11 07:15:45 +00:00
void beginRenderPass( const VULKAN_HPP_NAMESPACE::RenderPassBeginInfo & renderPassBegin,
VULKAN_HPP_NAMESPACE::SubpassContents contents ) const VULKAN_HPP_NOEXCEPT;
void nextSubpass( VULKAN_HPP_NAMESPACE::SubpassContents contents ) const VULKAN_HPP_NOEXCEPT;
void endRenderPass() const VULKAN_HPP_NOEXCEPT;
void executeCommands( VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::CommandBuffer> const & commandBuffers ) const VULKAN_HPP_NOEXCEPT;
2021-04-15 08:49:54 +00:00
//=== VK_VERSION_1_1 ===
void setDeviceMask( uint32_t deviceMask ) const VULKAN_HPP_NOEXCEPT;
void dispatchBase( uint32_t baseGroupX, uint32_t baseGroupY, uint32_t baseGroupZ, uint32_t groupCountX, uint32_t groupCountY, uint32_t groupCountZ ) const
VULKAN_HPP_NOEXCEPT;
//=== VK_VERSION_1_2 ===
void drawIndirectCount( VULKAN_HPP_NAMESPACE::Buffer buffer,
VULKAN_HPP_NAMESPACE::DeviceSize offset,
VULKAN_HPP_NAMESPACE::Buffer countBuffer,
VULKAN_HPP_NAMESPACE::DeviceSize countBufferOffset,
uint32_t maxDrawCount,
uint32_t stride ) const VULKAN_HPP_NOEXCEPT;
void drawIndexedIndirectCount( VULKAN_HPP_NAMESPACE::Buffer buffer,
VULKAN_HPP_NAMESPACE::DeviceSize offset,
VULKAN_HPP_NAMESPACE::Buffer countBuffer,
VULKAN_HPP_NAMESPACE::DeviceSize countBufferOffset,
uint32_t maxDrawCount,
uint32_t stride ) const VULKAN_HPP_NOEXCEPT;
void beginRenderPass2( const VULKAN_HPP_NAMESPACE::RenderPassBeginInfo & renderPassBegin,
const VULKAN_HPP_NAMESPACE::SubpassBeginInfo & subpassBeginInfo ) const VULKAN_HPP_NOEXCEPT;
2021-04-28 11:35:14 +00:00
void nextSubpass2( const VULKAN_HPP_NAMESPACE::SubpassBeginInfo & subpassBeginInfo,
const VULKAN_HPP_NAMESPACE::SubpassEndInfo & subpassEndInfo ) const VULKAN_HPP_NOEXCEPT;
2021-04-28 11:35:14 +00:00
void endRenderPass2( const VULKAN_HPP_NAMESPACE::SubpassEndInfo & subpassEndInfo ) const VULKAN_HPP_NOEXCEPT;
2022-01-26 00:42:08 +00:00
//=== VK_VERSION_1_3 ===
void setEvent2( VULKAN_HPP_NAMESPACE::Event event, const VULKAN_HPP_NAMESPACE::DependencyInfo & dependencyInfo ) const VULKAN_HPP_NOEXCEPT;
2022-01-26 00:42:08 +00:00
void resetEvent2( VULKAN_HPP_NAMESPACE::Event event,
VULKAN_HPP_NAMESPACE::PipelineStageFlags2 stageMask VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
2022-01-26 00:42:08 +00:00
void waitEvents2( VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::Event> const & events,
VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::DependencyInfo> const & dependencyInfos ) const;
2022-01-26 00:42:08 +00:00
void pipelineBarrier2( const VULKAN_HPP_NAMESPACE::DependencyInfo & dependencyInfo ) const VULKAN_HPP_NOEXCEPT;
void
writeTimestamp2( VULKAN_HPP_NAMESPACE::PipelineStageFlags2 stage, VULKAN_HPP_NAMESPACE::QueryPool queryPool, uint32_t query ) const VULKAN_HPP_NOEXCEPT;
2022-01-26 00:42:08 +00:00
void copyBuffer2( const VULKAN_HPP_NAMESPACE::CopyBufferInfo2 & copyBufferInfo ) const VULKAN_HPP_NOEXCEPT;
void copyImage2( const VULKAN_HPP_NAMESPACE::CopyImageInfo2 & copyImageInfo ) const VULKAN_HPP_NOEXCEPT;
void copyBufferToImage2( const VULKAN_HPP_NAMESPACE::CopyBufferToImageInfo2 & copyBufferToImageInfo ) const VULKAN_HPP_NOEXCEPT;
2022-01-26 00:42:08 +00:00
void copyImageToBuffer2( const VULKAN_HPP_NAMESPACE::CopyImageToBufferInfo2 & copyImageToBufferInfo ) const VULKAN_HPP_NOEXCEPT;
2022-01-26 00:42:08 +00:00
void blitImage2( const VULKAN_HPP_NAMESPACE::BlitImageInfo2 & blitImageInfo ) const VULKAN_HPP_NOEXCEPT;
void resolveImage2( const VULKAN_HPP_NAMESPACE::ResolveImageInfo2 & resolveImageInfo ) const VULKAN_HPP_NOEXCEPT;
void beginRendering( const VULKAN_HPP_NAMESPACE::RenderingInfo & renderingInfo ) const VULKAN_HPP_NOEXCEPT;
void endRendering() const VULKAN_HPP_NOEXCEPT;
void setCullMode( VULKAN_HPP_NAMESPACE::CullModeFlags cullMode VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
2022-01-26 00:42:08 +00:00
void setFrontFace( VULKAN_HPP_NAMESPACE::FrontFace frontFace ) const VULKAN_HPP_NOEXCEPT;
void setPrimitiveTopology( VULKAN_HPP_NAMESPACE::PrimitiveTopology primitiveTopology ) const VULKAN_HPP_NOEXCEPT;
void setViewportWithCount( VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::Viewport> const & viewports ) const VULKAN_HPP_NOEXCEPT;
2022-01-26 00:42:08 +00:00
void setScissorWithCount( VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::Rect2D> const & scissors ) const VULKAN_HPP_NOEXCEPT;
2022-01-26 00:42:08 +00:00
void bindVertexBuffers2(
uint32_t firstBinding,
VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::Buffer> const & buffers,
VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::DeviceSize> const & offsets,
VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::DeviceSize> const & sizes VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::DeviceSize> const & strides VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT ) const;
2022-01-26 00:42:08 +00:00
void setDepthTestEnable( VULKAN_HPP_NAMESPACE::Bool32 depthTestEnable ) const VULKAN_HPP_NOEXCEPT;
void setDepthWriteEnable( VULKAN_HPP_NAMESPACE::Bool32 depthWriteEnable ) const VULKAN_HPP_NOEXCEPT;
void setDepthCompareOp( VULKAN_HPP_NAMESPACE::CompareOp depthCompareOp ) const VULKAN_HPP_NOEXCEPT;
void setDepthBoundsTestEnable( VULKAN_HPP_NAMESPACE::Bool32 depthBoundsTestEnable ) const VULKAN_HPP_NOEXCEPT;
void setStencilTestEnable( VULKAN_HPP_NAMESPACE::Bool32 stencilTestEnable ) const VULKAN_HPP_NOEXCEPT;
void setStencilOp( VULKAN_HPP_NAMESPACE::StencilFaceFlags faceMask,
VULKAN_HPP_NAMESPACE::StencilOp failOp,
VULKAN_HPP_NAMESPACE::StencilOp passOp,
VULKAN_HPP_NAMESPACE::StencilOp depthFailOp,
VULKAN_HPP_NAMESPACE::CompareOp compareOp ) const VULKAN_HPP_NOEXCEPT;
void setRasterizerDiscardEnable( VULKAN_HPP_NAMESPACE::Bool32 rasterizerDiscardEnable ) const VULKAN_HPP_NOEXCEPT;
void setDepthBiasEnable( VULKAN_HPP_NAMESPACE::Bool32 depthBiasEnable ) const VULKAN_HPP_NOEXCEPT;
void setPrimitiveRestartEnable( VULKAN_HPP_NAMESPACE::Bool32 primitiveRestartEnable ) const VULKAN_HPP_NOEXCEPT;
//=== VK_EXT_debug_marker ===
void debugMarkerBeginEXT( const VULKAN_HPP_NAMESPACE::DebugMarkerMarkerInfoEXT & markerInfo ) const VULKAN_HPP_NOEXCEPT;
void debugMarkerEndEXT() const VULKAN_HPP_NOEXCEPT;
2021-04-28 11:35:14 +00:00
void debugMarkerInsertEXT( const VULKAN_HPP_NAMESPACE::DebugMarkerMarkerInfoEXT & markerInfo ) const VULKAN_HPP_NOEXCEPT;
2021-04-28 11:35:14 +00:00
//=== VK_KHR_video_queue ===
void beginVideoCodingKHR( const VULKAN_HPP_NAMESPACE::VideoBeginCodingInfoKHR & beginInfo ) const VULKAN_HPP_NOEXCEPT;
void endVideoCodingKHR( const VULKAN_HPP_NAMESPACE::VideoEndCodingInfoKHR & endCodingInfo ) const VULKAN_HPP_NOEXCEPT;
void controlVideoCodingKHR( const VULKAN_HPP_NAMESPACE::VideoCodingControlInfoKHR & codingControlInfo ) const VULKAN_HPP_NOEXCEPT;
2021-04-15 08:49:54 +00:00
//=== VK_KHR_video_decode_queue ===
2022-09-23 00:12:32 +00:00
void decodeVideoKHR( const VULKAN_HPP_NAMESPACE::VideoDecodeInfoKHR & decodeInfo ) const VULKAN_HPP_NOEXCEPT;
//=== VK_EXT_transform_feedback ===
void bindTransformFeedbackBuffersEXT( uint32_t firstBinding,
VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::Buffer> const & buffers,
VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::DeviceSize> const & offsets,
VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::DeviceSize> const & sizes
VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT ) const;
void beginTransformFeedbackEXT( uint32_t firstCounterBuffer,
VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::Buffer> const & counterBuffers,
VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::DeviceSize> const & counterBufferOffsets
VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT ) const;
2021-04-28 11:35:14 +00:00
void endTransformFeedbackEXT( uint32_t firstCounterBuffer,
VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::Buffer> const & counterBuffers,
VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::DeviceSize> const & counterBufferOffsets
VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT ) const;
void beginQueryIndexedEXT( VULKAN_HPP_NAMESPACE::QueryPool queryPool,
uint32_t query,
VULKAN_HPP_NAMESPACE::QueryControlFlags flags,
uint32_t index ) const VULKAN_HPP_NOEXCEPT;
2021-04-15 08:49:54 +00:00
void endQueryIndexedEXT( VULKAN_HPP_NAMESPACE::QueryPool queryPool, uint32_t query, uint32_t index ) const VULKAN_HPP_NOEXCEPT;
void drawIndirectByteCountEXT( uint32_t instanceCount,
uint32_t firstInstance,
VULKAN_HPP_NAMESPACE::Buffer counterBuffer,
VULKAN_HPP_NAMESPACE::DeviceSize counterBufferOffset,
uint32_t counterOffset,
uint32_t vertexStride ) const VULKAN_HPP_NOEXCEPT;
//=== VK_NVX_binary_import ===
void cuLaunchKernelNVX( const VULKAN_HPP_NAMESPACE::CuLaunchInfoNVX & launchInfo ) const VULKAN_HPP_NOEXCEPT;
//=== VK_AMD_draw_indirect_count ===
void drawIndirectCountAMD( VULKAN_HPP_NAMESPACE::Buffer buffer,
VULKAN_HPP_NAMESPACE::DeviceSize offset,
VULKAN_HPP_NAMESPACE::Buffer countBuffer,
VULKAN_HPP_NAMESPACE::DeviceSize countBufferOffset,
uint32_t maxDrawCount,
uint32_t stride ) const VULKAN_HPP_NOEXCEPT;
void drawIndexedIndirectCountAMD( VULKAN_HPP_NAMESPACE::Buffer buffer,
VULKAN_HPP_NAMESPACE::DeviceSize offset,
VULKAN_HPP_NAMESPACE::Buffer countBuffer,
VULKAN_HPP_NAMESPACE::DeviceSize countBufferOffset,
uint32_t maxDrawCount,
uint32_t stride ) const VULKAN_HPP_NOEXCEPT;
2021-04-28 11:35:14 +00:00
2021-11-03 00:34:42 +00:00
//=== VK_KHR_dynamic_rendering ===
2022-01-26 00:42:08 +00:00
void beginRenderingKHR( const VULKAN_HPP_NAMESPACE::RenderingInfo & renderingInfo ) const VULKAN_HPP_NOEXCEPT;
2021-11-03 00:34:42 +00:00
void endRenderingKHR() const VULKAN_HPP_NOEXCEPT;
//=== VK_KHR_device_group ===
void setDeviceMaskKHR( uint32_t deviceMask ) const VULKAN_HPP_NOEXCEPT;
void dispatchBaseKHR( uint32_t baseGroupX, uint32_t baseGroupY, uint32_t baseGroupZ, uint32_t groupCountX, uint32_t groupCountY, uint32_t groupCountZ )
const VULKAN_HPP_NOEXCEPT;
//=== VK_KHR_push_descriptor ===
void pushDescriptorSetKHR( VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint,
VULKAN_HPP_NAMESPACE::PipelineLayout layout,
uint32_t set,
VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::WriteDescriptorSet> const & descriptorWrites ) const
VULKAN_HPP_NOEXCEPT;
template <typename DataType>
void pushDescriptorSetWithTemplateKHR( VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate descriptorUpdateTemplate,
VULKAN_HPP_NAMESPACE::PipelineLayout layout,
uint32_t set,
DataType const & data ) const VULKAN_HPP_NOEXCEPT;
//=== VK_EXT_conditional_rendering ===
void beginConditionalRenderingEXT( const VULKAN_HPP_NAMESPACE::ConditionalRenderingBeginInfoEXT & conditionalRenderingBegin ) const VULKAN_HPP_NOEXCEPT;
void endConditionalRenderingEXT() const VULKAN_HPP_NOEXCEPT;
//=== VK_NV_clip_space_w_scaling ===
void setViewportWScalingNV( uint32_t firstViewport,
VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::ViewportWScalingNV> const & viewportWScalings ) const
VULKAN_HPP_NOEXCEPT;
//=== VK_EXT_discard_rectangles ===
2021-04-28 11:35:14 +00:00
void setDiscardRectangleEXT( uint32_t firstDiscardRectangle,
VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::Rect2D> const & discardRectangles ) const VULKAN_HPP_NOEXCEPT;
void setDiscardRectangleEnableEXT( VULKAN_HPP_NAMESPACE::Bool32 discardRectangleEnable ) const VULKAN_HPP_NOEXCEPT;
void setDiscardRectangleModeEXT( VULKAN_HPP_NAMESPACE::DiscardRectangleModeEXT discardRectangleMode ) const VULKAN_HPP_NOEXCEPT;
//=== VK_KHR_create_renderpass2 ===
2021-04-19 07:29:52 +00:00
void beginRenderPass2KHR( const VULKAN_HPP_NAMESPACE::RenderPassBeginInfo & renderPassBegin,
const VULKAN_HPP_NAMESPACE::SubpassBeginInfo & subpassBeginInfo ) const VULKAN_HPP_NOEXCEPT;
2021-04-19 07:29:52 +00:00
void nextSubpass2KHR( const VULKAN_HPP_NAMESPACE::SubpassBeginInfo & subpassBeginInfo,
const VULKAN_HPP_NAMESPACE::SubpassEndInfo & subpassEndInfo ) const VULKAN_HPP_NOEXCEPT;
void endRenderPass2KHR( const VULKAN_HPP_NAMESPACE::SubpassEndInfo & subpassEndInfo ) const VULKAN_HPP_NOEXCEPT;
//=== VK_EXT_debug_utils ===
void beginDebugUtilsLabelEXT( const VULKAN_HPP_NAMESPACE::DebugUtilsLabelEXT & labelInfo ) const VULKAN_HPP_NOEXCEPT;
2021-04-19 07:29:52 +00:00
void endDebugUtilsLabelEXT() const VULKAN_HPP_NOEXCEPT;
void insertDebugUtilsLabelEXT( const VULKAN_HPP_NAMESPACE::DebugUtilsLabelEXT & labelInfo ) const VULKAN_HPP_NOEXCEPT;
2021-04-19 07:29:52 +00:00
# if defined( VK_ENABLE_BETA_EXTENSIONS )
//=== VK_AMDX_shader_enqueue ===
void initializeGraphScratchMemoryAMDX( VULKAN_HPP_NAMESPACE::Pipeline executionGraph,
VULKAN_HPP_NAMESPACE::DeviceAddress scratch,
VULKAN_HPP_NAMESPACE::DeviceSize scratchSize ) const VULKAN_HPP_NOEXCEPT;
void dispatchGraphAMDX( VULKAN_HPP_NAMESPACE::DeviceAddress scratch,
VULKAN_HPP_NAMESPACE::DeviceSize scratchSize,
const VULKAN_HPP_NAMESPACE::DispatchGraphCountInfoAMDX & countInfo ) const VULKAN_HPP_NOEXCEPT;
void dispatchGraphIndirectAMDX( VULKAN_HPP_NAMESPACE::DeviceAddress scratch,
VULKAN_HPP_NAMESPACE::DeviceSize scratchSize,
const VULKAN_HPP_NAMESPACE::DispatchGraphCountInfoAMDX & countInfo ) const VULKAN_HPP_NOEXCEPT;
void dispatchGraphIndirectCountAMDX( VULKAN_HPP_NAMESPACE::DeviceAddress scratch,
VULKAN_HPP_NAMESPACE::DeviceSize scratchSize,
VULKAN_HPP_NAMESPACE::DeviceAddress countInfo ) const VULKAN_HPP_NOEXCEPT;
# endif /*VK_ENABLE_BETA_EXTENSIONS*/
//=== VK_EXT_sample_locations ===
void setSampleLocationsEXT( const VULKAN_HPP_NAMESPACE::SampleLocationsInfoEXT & sampleLocationsInfo ) const VULKAN_HPP_NOEXCEPT;
//=== VK_KHR_acceleration_structure ===
void buildAccelerationStructuresKHR(
VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::AccelerationStructureBuildGeometryInfoKHR> const & infos,
VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::AccelerationStructureBuildRangeInfoKHR * const> const & pBuildRangeInfos ) const;
void buildAccelerationStructuresIndirectKHR(
VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::AccelerationStructureBuildGeometryInfoKHR> const & infos,
VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::DeviceAddress> const & indirectDeviceAddresses,
VULKAN_HPP_NAMESPACE::ArrayProxy<const uint32_t> const & indirectStrides,
VULKAN_HPP_NAMESPACE::ArrayProxy<const uint32_t * const> const & pMaxPrimitiveCounts ) const;
void copyAccelerationStructureKHR( const VULKAN_HPP_NAMESPACE::CopyAccelerationStructureInfoKHR & info ) const VULKAN_HPP_NOEXCEPT;
void copyAccelerationStructureToMemoryKHR( const VULKAN_HPP_NAMESPACE::CopyAccelerationStructureToMemoryInfoKHR & info ) const VULKAN_HPP_NOEXCEPT;
void copyMemoryToAccelerationStructureKHR( const VULKAN_HPP_NAMESPACE::CopyMemoryToAccelerationStructureInfoKHR & info ) const VULKAN_HPP_NOEXCEPT;
void writeAccelerationStructuresPropertiesKHR(
VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::AccelerationStructureKHR> const & accelerationStructures,
VULKAN_HPP_NAMESPACE::QueryType queryType,
VULKAN_HPP_NAMESPACE::QueryPool queryPool,
uint32_t firstQuery ) const VULKAN_HPP_NOEXCEPT;
2023-03-01 09:17:02 +00:00
//=== VK_KHR_ray_tracing_pipeline ===
void traceRaysKHR( const VULKAN_HPP_NAMESPACE::StridedDeviceAddressRegionKHR & raygenShaderBindingTable,
const VULKAN_HPP_NAMESPACE::StridedDeviceAddressRegionKHR & missShaderBindingTable,
const VULKAN_HPP_NAMESPACE::StridedDeviceAddressRegionKHR & hitShaderBindingTable,
const VULKAN_HPP_NAMESPACE::StridedDeviceAddressRegionKHR & callableShaderBindingTable,
uint32_t width,
uint32_t height,
uint32_t depth ) const VULKAN_HPP_NOEXCEPT;
void traceRaysIndirectKHR( const VULKAN_HPP_NAMESPACE::StridedDeviceAddressRegionKHR & raygenShaderBindingTable,
const VULKAN_HPP_NAMESPACE::StridedDeviceAddressRegionKHR & missShaderBindingTable,
const VULKAN_HPP_NAMESPACE::StridedDeviceAddressRegionKHR & hitShaderBindingTable,
const VULKAN_HPP_NAMESPACE::StridedDeviceAddressRegionKHR & callableShaderBindingTable,
VULKAN_HPP_NAMESPACE::DeviceAddress indirectDeviceAddress ) const VULKAN_HPP_NOEXCEPT;
void setRayTracingPipelineStackSizeKHR( uint32_t pipelineStackSize ) const VULKAN_HPP_NOEXCEPT;
//=== VK_NV_shading_rate_image ===
2021-04-15 08:49:54 +00:00
void bindShadingRateImageNV( VULKAN_HPP_NAMESPACE::ImageView imageView, VULKAN_HPP_NAMESPACE::ImageLayout imageLayout ) const VULKAN_HPP_NOEXCEPT;
void setViewportShadingRatePaletteNV(
uint32_t firstViewport,
VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::ShadingRatePaletteNV> const & shadingRatePalettes ) const VULKAN_HPP_NOEXCEPT;
void setCoarseSampleOrderNV( VULKAN_HPP_NAMESPACE::CoarseSampleOrderTypeNV sampleOrderType,
VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::CoarseSampleOrderCustomNV> const & customSampleOrders ) const
VULKAN_HPP_NOEXCEPT;
//=== VK_NV_ray_tracing ===
void buildAccelerationStructureNV( const VULKAN_HPP_NAMESPACE::AccelerationStructureInfoNV & info,
VULKAN_HPP_NAMESPACE::Buffer instanceData,
VULKAN_HPP_NAMESPACE::DeviceSize instanceOffset,
VULKAN_HPP_NAMESPACE::Bool32 update,
VULKAN_HPP_NAMESPACE::AccelerationStructureNV dst,
VULKAN_HPP_NAMESPACE::AccelerationStructureNV src,
VULKAN_HPP_NAMESPACE::Buffer scratch,
VULKAN_HPP_NAMESPACE::DeviceSize scratchOffset ) const VULKAN_HPP_NOEXCEPT;
void copyAccelerationStructureNV( VULKAN_HPP_NAMESPACE::AccelerationStructureNV dst,
VULKAN_HPP_NAMESPACE::AccelerationStructureNV src,
VULKAN_HPP_NAMESPACE::CopyAccelerationStructureModeKHR mode ) const VULKAN_HPP_NOEXCEPT;
void traceRaysNV( VULKAN_HPP_NAMESPACE::Buffer raygenShaderBindingTableBuffer,
VULKAN_HPP_NAMESPACE::DeviceSize raygenShaderBindingOffset,
VULKAN_HPP_NAMESPACE::Buffer missShaderBindingTableBuffer,
VULKAN_HPP_NAMESPACE::DeviceSize missShaderBindingOffset,
VULKAN_HPP_NAMESPACE::DeviceSize missShaderBindingStride,
VULKAN_HPP_NAMESPACE::Buffer hitShaderBindingTableBuffer,
VULKAN_HPP_NAMESPACE::DeviceSize hitShaderBindingOffset,
VULKAN_HPP_NAMESPACE::DeviceSize hitShaderBindingStride,
VULKAN_HPP_NAMESPACE::Buffer callableShaderBindingTableBuffer,
VULKAN_HPP_NAMESPACE::DeviceSize callableShaderBindingOffset,
VULKAN_HPP_NAMESPACE::DeviceSize callableShaderBindingStride,
uint32_t width,
uint32_t height,
uint32_t depth ) const VULKAN_HPP_NOEXCEPT;
void writeAccelerationStructuresPropertiesNV(
VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::AccelerationStructureNV> const & accelerationStructures,
VULKAN_HPP_NAMESPACE::QueryType queryType,
VULKAN_HPP_NAMESPACE::QueryPool queryPool,
uint32_t firstQuery ) const VULKAN_HPP_NOEXCEPT;
//=== VK_KHR_draw_indirect_count ===
void drawIndirectCountKHR( VULKAN_HPP_NAMESPACE::Buffer buffer,
VULKAN_HPP_NAMESPACE::DeviceSize offset,
VULKAN_HPP_NAMESPACE::Buffer countBuffer,
VULKAN_HPP_NAMESPACE::DeviceSize countBufferOffset,
uint32_t maxDrawCount,
uint32_t stride ) const VULKAN_HPP_NOEXCEPT;
void drawIndexedIndirectCountKHR( VULKAN_HPP_NAMESPACE::Buffer buffer,
VULKAN_HPP_NAMESPACE::DeviceSize offset,
VULKAN_HPP_NAMESPACE::Buffer countBuffer,
VULKAN_HPP_NAMESPACE::DeviceSize countBufferOffset,
uint32_t maxDrawCount,
uint32_t stride ) const VULKAN_HPP_NOEXCEPT;
//=== VK_AMD_buffer_marker ===
void writeBufferMarkerAMD( VULKAN_HPP_NAMESPACE::PipelineStageFlagBits pipelineStage,
VULKAN_HPP_NAMESPACE::Buffer dstBuffer,
VULKAN_HPP_NAMESPACE::DeviceSize dstOffset,
uint32_t marker ) const VULKAN_HPP_NOEXCEPT;
void writeBufferMarker2AMD( VULKAN_HPP_NAMESPACE::PipelineStageFlags2 stage,
VULKAN_HPP_NAMESPACE::Buffer dstBuffer,
VULKAN_HPP_NAMESPACE::DeviceSize dstOffset,
uint32_t marker ) const VULKAN_HPP_NOEXCEPT;
//=== VK_NV_mesh_shader ===
void drawMeshTasksNV( uint32_t taskCount, uint32_t firstTask ) const VULKAN_HPP_NOEXCEPT;
void drawMeshTasksIndirectNV( VULKAN_HPP_NAMESPACE::Buffer buffer,
VULKAN_HPP_NAMESPACE::DeviceSize offset,
uint32_t drawCount,
uint32_t stride ) const VULKAN_HPP_NOEXCEPT;
void drawMeshTasksIndirectCountNV( VULKAN_HPP_NAMESPACE::Buffer buffer,
VULKAN_HPP_NAMESPACE::DeviceSize offset,
VULKAN_HPP_NAMESPACE::Buffer countBuffer,
VULKAN_HPP_NAMESPACE::DeviceSize countBufferOffset,
uint32_t maxDrawCount,
uint32_t stride ) const VULKAN_HPP_NOEXCEPT;
//=== VK_NV_scissor_exclusive ===
void setExclusiveScissorEnableNV( uint32_t firstExclusiveScissor,
VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::Bool32> const & exclusiveScissorEnables ) const
VULKAN_HPP_NOEXCEPT;
void setExclusiveScissorNV( uint32_t firstExclusiveScissor,
VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::Rect2D> const & exclusiveScissors ) const VULKAN_HPP_NOEXCEPT;
//=== VK_NV_device_diagnostic_checkpoints ===
template <typename CheckpointMarkerType>
void setCheckpointNV( CheckpointMarkerType const & checkpointMarker ) const VULKAN_HPP_NOEXCEPT;
//=== VK_INTEL_performance_query ===
void setPerformanceMarkerINTEL( const VULKAN_HPP_NAMESPACE::PerformanceMarkerInfoINTEL & markerInfo ) const;
void setPerformanceStreamMarkerINTEL( const VULKAN_HPP_NAMESPACE::PerformanceStreamMarkerInfoINTEL & markerInfo ) const;
void setPerformanceOverrideINTEL( const VULKAN_HPP_NAMESPACE::PerformanceOverrideInfoINTEL & overrideInfo ) const;
//=== VK_KHR_fragment_shading_rate ===
void setFragmentShadingRateKHR( const VULKAN_HPP_NAMESPACE::Extent2D & fragmentSize,
const VULKAN_HPP_NAMESPACE::FragmentShadingRateCombinerOpKHR combinerOps[2] ) const VULKAN_HPP_NOEXCEPT;
//=== VK_KHR_dynamic_rendering_local_read ===
void setRenderingAttachmentLocationsKHR( const VULKAN_HPP_NAMESPACE::RenderingAttachmentLocationInfoKHR & locationInfo ) const VULKAN_HPP_NOEXCEPT;
void setRenderingInputAttachmentIndicesKHR( const VULKAN_HPP_NAMESPACE::RenderingInputAttachmentIndexInfoKHR & inputAttachmentIndexInfo ) const
VULKAN_HPP_NOEXCEPT;
//=== VK_EXT_line_rasterization ===
void setLineStippleEXT( uint32_t lineStippleFactor, uint16_t lineStipplePattern ) const VULKAN_HPP_NOEXCEPT;
//=== VK_EXT_extended_dynamic_state ===
void setCullModeEXT( VULKAN_HPP_NAMESPACE::CullModeFlags cullMode VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
void setFrontFaceEXT( VULKAN_HPP_NAMESPACE::FrontFace frontFace ) const VULKAN_HPP_NOEXCEPT;
void setPrimitiveTopologyEXT( VULKAN_HPP_NAMESPACE::PrimitiveTopology primitiveTopology ) const VULKAN_HPP_NOEXCEPT;
void setViewportWithCountEXT( VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::Viewport> const & viewports ) const VULKAN_HPP_NOEXCEPT;
void setScissorWithCountEXT( VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::Rect2D> const & scissors ) const VULKAN_HPP_NOEXCEPT;
void bindVertexBuffers2EXT(
uint32_t firstBinding,
VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::Buffer> const & buffers,
VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::DeviceSize> const & offsets,
VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::DeviceSize> const & sizes VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::DeviceSize> const & strides VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT ) const;
void setDepthTestEnableEXT( VULKAN_HPP_NAMESPACE::Bool32 depthTestEnable ) const VULKAN_HPP_NOEXCEPT;
void setDepthWriteEnableEXT( VULKAN_HPP_NAMESPACE::Bool32 depthWriteEnable ) const VULKAN_HPP_NOEXCEPT;
void setDepthCompareOpEXT( VULKAN_HPP_NAMESPACE::CompareOp depthCompareOp ) const VULKAN_HPP_NOEXCEPT;
void setDepthBoundsTestEnableEXT( VULKAN_HPP_NAMESPACE::Bool32 depthBoundsTestEnable ) const VULKAN_HPP_NOEXCEPT;
void setStencilTestEnableEXT( VULKAN_HPP_NAMESPACE::Bool32 stencilTestEnable ) const VULKAN_HPP_NOEXCEPT;
void setStencilOpEXT( VULKAN_HPP_NAMESPACE::StencilFaceFlags faceMask,
VULKAN_HPP_NAMESPACE::StencilOp failOp,
VULKAN_HPP_NAMESPACE::StencilOp passOp,
VULKAN_HPP_NAMESPACE::StencilOp depthFailOp,
VULKAN_HPP_NAMESPACE::CompareOp compareOp ) const VULKAN_HPP_NOEXCEPT;
//=== VK_NV_device_generated_commands ===
void preprocessGeneratedCommandsNV( const VULKAN_HPP_NAMESPACE::GeneratedCommandsInfoNV & generatedCommandsInfo ) const VULKAN_HPP_NOEXCEPT;
void executeGeneratedCommandsNV( VULKAN_HPP_NAMESPACE::Bool32 isPreprocessed,
const VULKAN_HPP_NAMESPACE::GeneratedCommandsInfoNV & generatedCommandsInfo ) const VULKAN_HPP_NOEXCEPT;
void bindPipelineShaderGroupNV( VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint,
VULKAN_HPP_NAMESPACE::Pipeline pipeline,
uint32_t groupIndex ) const VULKAN_HPP_NOEXCEPT;
//=== VK_EXT_depth_bias_control ===
void setDepthBias2EXT( const VULKAN_HPP_NAMESPACE::DepthBiasInfoEXT & depthBiasInfo ) const VULKAN_HPP_NOEXCEPT;
//=== VK_KHR_video_encode_queue ===
void encodeVideoKHR( const VULKAN_HPP_NAMESPACE::VideoEncodeInfoKHR & encodeInfo ) const VULKAN_HPP_NOEXCEPT;
# if defined( VK_ENABLE_BETA_EXTENSIONS )
//=== VK_NV_cuda_kernel_launch ===
void cudaLaunchKernelNV( const VULKAN_HPP_NAMESPACE::CudaLaunchInfoNV & launchInfo ) const VULKAN_HPP_NOEXCEPT;
# endif /*VK_ENABLE_BETA_EXTENSIONS*/
//=== VK_KHR_synchronization2 ===
void setEvent2KHR( VULKAN_HPP_NAMESPACE::Event event, const VULKAN_HPP_NAMESPACE::DependencyInfo & dependencyInfo ) const VULKAN_HPP_NOEXCEPT;
void resetEvent2KHR( VULKAN_HPP_NAMESPACE::Event event,
VULKAN_HPP_NAMESPACE::PipelineStageFlags2 stageMask VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
void waitEvents2KHR( VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::Event> const & events,
VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::DependencyInfo> const & dependencyInfos ) const;
2022-01-26 00:42:08 +00:00
void pipelineBarrier2KHR( const VULKAN_HPP_NAMESPACE::DependencyInfo & dependencyInfo ) const VULKAN_HPP_NOEXCEPT;
2022-01-26 00:42:08 +00:00
void writeTimestamp2KHR( VULKAN_HPP_NAMESPACE::PipelineStageFlags2 stage,
VULKAN_HPP_NAMESPACE::QueryPool queryPool,
uint32_t query ) const VULKAN_HPP_NOEXCEPT;
2022-11-18 00:12:31 +00:00
//=== VK_EXT_descriptor_buffer ===
void bindDescriptorBuffersEXT( VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::DescriptorBufferBindingInfoEXT> const & bindingInfos ) const
VULKAN_HPP_NOEXCEPT;
void setDescriptorBufferOffsetsEXT( VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint,
VULKAN_HPP_NAMESPACE::PipelineLayout layout,
uint32_t firstSet,
VULKAN_HPP_NAMESPACE::ArrayProxy<const uint32_t> const & bufferIndices,
VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::DeviceSize> const & offsets ) const;
void bindDescriptorBufferEmbeddedSamplersEXT( VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint,
VULKAN_HPP_NAMESPACE::PipelineLayout layout,
uint32_t set ) const VULKAN_HPP_NOEXCEPT;
//=== VK_NV_fragment_shading_rate_enums ===
void setFragmentShadingRateEnumNV( VULKAN_HPP_NAMESPACE::FragmentShadingRateNV shadingRate,
const VULKAN_HPP_NAMESPACE::FragmentShadingRateCombinerOpKHR combinerOps[2] ) const VULKAN_HPP_NOEXCEPT;
2022-09-02 00:12:10 +00:00
//=== VK_EXT_mesh_shader ===
void drawMeshTasksEXT( uint32_t groupCountX, uint32_t groupCountY, uint32_t groupCountZ ) const VULKAN_HPP_NOEXCEPT;
void drawMeshTasksIndirectEXT( VULKAN_HPP_NAMESPACE::Buffer buffer,
VULKAN_HPP_NAMESPACE::DeviceSize offset,
uint32_t drawCount,
uint32_t stride ) const VULKAN_HPP_NOEXCEPT;
void drawMeshTasksIndirectCountEXT( VULKAN_HPP_NAMESPACE::Buffer buffer,
VULKAN_HPP_NAMESPACE::DeviceSize offset,
VULKAN_HPP_NAMESPACE::Buffer countBuffer,
VULKAN_HPP_NAMESPACE::DeviceSize countBufferOffset,
uint32_t maxDrawCount,
uint32_t stride ) const VULKAN_HPP_NOEXCEPT;
//=== VK_KHR_copy_commands2 ===
2022-01-26 00:42:08 +00:00
void copyBuffer2KHR( const VULKAN_HPP_NAMESPACE::CopyBufferInfo2 & copyBufferInfo ) const VULKAN_HPP_NOEXCEPT;
2022-01-26 00:42:08 +00:00
void copyImage2KHR( const VULKAN_HPP_NAMESPACE::CopyImageInfo2 & copyImageInfo ) const VULKAN_HPP_NOEXCEPT;
void copyBufferToImage2KHR( const VULKAN_HPP_NAMESPACE::CopyBufferToImageInfo2 & copyBufferToImageInfo ) const VULKAN_HPP_NOEXCEPT;
void copyImageToBuffer2KHR( const VULKAN_HPP_NAMESPACE::CopyImageToBufferInfo2 & copyImageToBufferInfo ) const VULKAN_HPP_NOEXCEPT;
2022-01-26 00:42:08 +00:00
void blitImage2KHR( const VULKAN_HPP_NAMESPACE::BlitImageInfo2 & blitImageInfo ) const VULKAN_HPP_NOEXCEPT;
void resolveImage2KHR( const VULKAN_HPP_NAMESPACE::ResolveImageInfo2 & resolveImageInfo ) const VULKAN_HPP_NOEXCEPT;
//=== VK_EXT_vertex_input_dynamic_state ===
void setVertexInputEXT( VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::VertexInputBindingDescription2EXT> const & vertexBindingDescriptions,
VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::VertexInputAttributeDescription2EXT> const &
vertexAttributeDescriptions ) const VULKAN_HPP_NOEXCEPT;
2021-06-22 07:40:12 +00:00
//=== VK_HUAWEI_subpass_shading ===
void subpassShadingHUAWEI() const VULKAN_HPP_NOEXCEPT;
2021-07-21 07:13:46 +00:00
//=== VK_HUAWEI_invocation_mask ===
void bindInvocationMaskHUAWEI( VULKAN_HPP_NAMESPACE::ImageView imageView, VULKAN_HPP_NAMESPACE::ImageLayout imageLayout ) const VULKAN_HPP_NOEXCEPT;
2021-07-21 07:13:46 +00:00
//=== VK_EXT_extended_dynamic_state2 ===
void setPatchControlPointsEXT( uint32_t patchControlPoints ) const VULKAN_HPP_NOEXCEPT;
void setRasterizerDiscardEnableEXT( VULKAN_HPP_NAMESPACE::Bool32 rasterizerDiscardEnable ) const VULKAN_HPP_NOEXCEPT;
void setDepthBiasEnableEXT( VULKAN_HPP_NAMESPACE::Bool32 depthBiasEnable ) const VULKAN_HPP_NOEXCEPT;
void setLogicOpEXT( VULKAN_HPP_NAMESPACE::LogicOp logicOp ) const VULKAN_HPP_NOEXCEPT;
void setPrimitiveRestartEnableEXT( VULKAN_HPP_NAMESPACE::Bool32 primitiveRestartEnable ) const VULKAN_HPP_NOEXCEPT;
//=== VK_EXT_color_write_enable ===
void setColorWriteEnableEXT( VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::Bool32> const & colorWriteEnables ) const VULKAN_HPP_NOEXCEPT;
2022-05-11 00:09:28 +00:00
//=== VK_KHR_ray_tracing_maintenance1 ===
void traceRaysIndirect2KHR( VULKAN_HPP_NAMESPACE::DeviceAddress indirectDeviceAddress ) const VULKAN_HPP_NOEXCEPT;
2021-06-22 07:40:12 +00:00
//=== VK_EXT_multi_draw ===
void drawMultiEXT( VULKAN_HPP_NAMESPACE::StridedArrayProxy<const VULKAN_HPP_NAMESPACE::MultiDrawInfoEXT> const & vertexInfo,
uint32_t instanceCount,
uint32_t firstInstance ) const VULKAN_HPP_NOEXCEPT;
2021-06-22 07:40:12 +00:00
void drawMultiIndexedEXT( VULKAN_HPP_NAMESPACE::StridedArrayProxy<const VULKAN_HPP_NAMESPACE::MultiDrawIndexedInfoEXT> const & indexInfo,
uint32_t instanceCount,
uint32_t firstInstance,
Optional<const int32_t> vertexOffset VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
2021-06-22 07:40:12 +00:00
2022-09-30 00:20:29 +00:00
//=== VK_EXT_opacity_micromap ===
void buildMicromapsEXT( VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::MicromapBuildInfoEXT> const & infos ) const VULKAN_HPP_NOEXCEPT;
void copyMicromapEXT( const VULKAN_HPP_NAMESPACE::CopyMicromapInfoEXT & info ) const VULKAN_HPP_NOEXCEPT;
void copyMicromapToMemoryEXT( const VULKAN_HPP_NAMESPACE::CopyMicromapToMemoryInfoEXT & info ) const VULKAN_HPP_NOEXCEPT;
void copyMemoryToMicromapEXT( const VULKAN_HPP_NAMESPACE::CopyMemoryToMicromapInfoEXT & info ) const VULKAN_HPP_NOEXCEPT;
void writeMicromapsPropertiesEXT( VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::MicromapEXT> const & micromaps,
VULKAN_HPP_NAMESPACE::QueryType queryType,
VULKAN_HPP_NAMESPACE::QueryPool queryPool,
uint32_t firstQuery ) const VULKAN_HPP_NOEXCEPT;
//=== VK_HUAWEI_cluster_culling_shader ===
void drawClusterHUAWEI( uint32_t groupCountX, uint32_t groupCountY, uint32_t groupCountZ ) const VULKAN_HPP_NOEXCEPT;
void drawClusterIndirectHUAWEI( VULKAN_HPP_NAMESPACE::Buffer buffer, VULKAN_HPP_NAMESPACE::DeviceSize offset ) const VULKAN_HPP_NOEXCEPT;
2022-11-07 00:12:52 +00:00
//=== VK_NV_copy_memory_indirect ===
void copyMemoryIndirectNV( VULKAN_HPP_NAMESPACE::DeviceAddress copyBufferAddress, uint32_t copyCount, uint32_t stride ) const VULKAN_HPP_NOEXCEPT;
void copyMemoryToImageIndirectNV( VULKAN_HPP_NAMESPACE::DeviceAddress copyBufferAddress,
uint32_t stride,
VULKAN_HPP_NAMESPACE::Image dstImage,
VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout,
VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::ImageSubresourceLayers> const & imageSubresources ) const
VULKAN_HPP_NOEXCEPT;
//=== VK_NV_memory_decompression ===
void decompressMemoryNV( VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::DecompressMemoryRegionNV> const & decompressMemoryRegions ) const
VULKAN_HPP_NOEXCEPT;
void decompressMemoryIndirectCountNV( VULKAN_HPP_NAMESPACE::DeviceAddress indirectCommandsAddress,
VULKAN_HPP_NAMESPACE::DeviceAddress indirectCommandsCountAddress,
uint32_t stride ) const VULKAN_HPP_NOEXCEPT;
//=== VK_NV_device_generated_commands_compute ===
void updatePipelineIndirectBufferNV( VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint,
VULKAN_HPP_NAMESPACE::Pipeline pipeline ) const VULKAN_HPP_NOEXCEPT;
2022-09-30 00:20:29 +00:00
//=== VK_EXT_extended_dynamic_state3 ===
void setDepthClampEnableEXT( VULKAN_HPP_NAMESPACE::Bool32 depthClampEnable ) const VULKAN_HPP_NOEXCEPT;
void setPolygonModeEXT( VULKAN_HPP_NAMESPACE::PolygonMode polygonMode ) const VULKAN_HPP_NOEXCEPT;
void setRasterizationSamplesEXT( VULKAN_HPP_NAMESPACE::SampleCountFlagBits rasterizationSamples ) const VULKAN_HPP_NOEXCEPT;
void setSampleMaskEXT( VULKAN_HPP_NAMESPACE::SampleCountFlagBits samples,
VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::SampleMask> const & sampleMask ) const;
2022-09-30 00:20:29 +00:00
void setAlphaToCoverageEnableEXT( VULKAN_HPP_NAMESPACE::Bool32 alphaToCoverageEnable ) const VULKAN_HPP_NOEXCEPT;
void setAlphaToOneEnableEXT( VULKAN_HPP_NAMESPACE::Bool32 alphaToOneEnable ) const VULKAN_HPP_NOEXCEPT;
void setLogicOpEnableEXT( VULKAN_HPP_NAMESPACE::Bool32 logicOpEnable ) const VULKAN_HPP_NOEXCEPT;
void setColorBlendEnableEXT( uint32_t firstAttachment,
VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::Bool32> const & colorBlendEnables ) const VULKAN_HPP_NOEXCEPT;
void setColorBlendEquationEXT( uint32_t firstAttachment,
VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::ColorBlendEquationEXT> const & colorBlendEquations ) const
VULKAN_HPP_NOEXCEPT;
void setColorWriteMaskEXT( uint32_t firstAttachment,
VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::ColorComponentFlags> const & colorWriteMasks ) const
VULKAN_HPP_NOEXCEPT;
void setTessellationDomainOriginEXT( VULKAN_HPP_NAMESPACE::TessellationDomainOrigin domainOrigin ) const VULKAN_HPP_NOEXCEPT;
2022-09-30 00:20:29 +00:00
void setRasterizationStreamEXT( uint32_t rasterizationStream ) const VULKAN_HPP_NOEXCEPT;
void
setConservativeRasterizationModeEXT( VULKAN_HPP_NAMESPACE::ConservativeRasterizationModeEXT conservativeRasterizationMode ) const VULKAN_HPP_NOEXCEPT;
void setExtraPrimitiveOverestimationSizeEXT( float extraPrimitiveOverestimationSize ) const VULKAN_HPP_NOEXCEPT;
void setDepthClipEnableEXT( VULKAN_HPP_NAMESPACE::Bool32 depthClipEnable ) const VULKAN_HPP_NOEXCEPT;
void setSampleLocationsEnableEXT( VULKAN_HPP_NAMESPACE::Bool32 sampleLocationsEnable ) const VULKAN_HPP_NOEXCEPT;
void setColorBlendAdvancedEXT( uint32_t firstAttachment,
VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::ColorBlendAdvancedEXT> const & colorBlendAdvanced ) const
VULKAN_HPP_NOEXCEPT;
void setProvokingVertexModeEXT( VULKAN_HPP_NAMESPACE::ProvokingVertexModeEXT provokingVertexMode ) const VULKAN_HPP_NOEXCEPT;
void setLineRasterizationModeEXT( VULKAN_HPP_NAMESPACE::LineRasterizationModeEXT lineRasterizationMode ) const VULKAN_HPP_NOEXCEPT;
void setLineStippleEnableEXT( VULKAN_HPP_NAMESPACE::Bool32 stippledLineEnable ) const VULKAN_HPP_NOEXCEPT;
void setDepthClipNegativeOneToOneEXT( VULKAN_HPP_NAMESPACE::Bool32 negativeOneToOne ) const VULKAN_HPP_NOEXCEPT;
void setViewportWScalingEnableNV( VULKAN_HPP_NAMESPACE::Bool32 viewportWScalingEnable ) const VULKAN_HPP_NOEXCEPT;
void setViewportSwizzleNV( uint32_t firstViewport,
VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::ViewportSwizzleNV> const & viewportSwizzles ) const
VULKAN_HPP_NOEXCEPT;
void setCoverageToColorEnableNV( VULKAN_HPP_NAMESPACE::Bool32 coverageToColorEnable ) const VULKAN_HPP_NOEXCEPT;
void setCoverageToColorLocationNV( uint32_t coverageToColorLocation ) const VULKAN_HPP_NOEXCEPT;
void setCoverageModulationModeNV( VULKAN_HPP_NAMESPACE::CoverageModulationModeNV coverageModulationMode ) const VULKAN_HPP_NOEXCEPT;
void setCoverageModulationTableEnableNV( VULKAN_HPP_NAMESPACE::Bool32 coverageModulationTableEnable ) const VULKAN_HPP_NOEXCEPT;
void setCoverageModulationTableNV( VULKAN_HPP_NAMESPACE::ArrayProxy<const float> const & coverageModulationTable ) const VULKAN_HPP_NOEXCEPT;
void setShadingRateImageEnableNV( VULKAN_HPP_NAMESPACE::Bool32 shadingRateImageEnable ) const VULKAN_HPP_NOEXCEPT;
void setRepresentativeFragmentTestEnableNV( VULKAN_HPP_NAMESPACE::Bool32 representativeFragmentTestEnable ) const VULKAN_HPP_NOEXCEPT;
void setCoverageReductionModeNV( VULKAN_HPP_NAMESPACE::CoverageReductionModeNV coverageReductionMode ) const VULKAN_HPP_NOEXCEPT;
//=== VK_NV_optical_flow ===
void opticalFlowExecuteNV( VULKAN_HPP_NAMESPACE::OpticalFlowSessionNV session,
const VULKAN_HPP_NAMESPACE::OpticalFlowExecuteInfoNV & executeInfo ) const VULKAN_HPP_NOEXCEPT;
//=== VK_KHR_maintenance5 ===
void bindIndexBuffer2KHR( VULKAN_HPP_NAMESPACE::Buffer buffer,
VULKAN_HPP_NAMESPACE::DeviceSize offset,
VULKAN_HPP_NAMESPACE::DeviceSize size,
VULKAN_HPP_NAMESPACE::IndexType indexType ) const VULKAN_HPP_NOEXCEPT;
//=== VK_EXT_shader_object ===
void bindShadersEXT( VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::ShaderStageFlagBits> const & stages,
VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::ShaderEXT> const & shaders ) const;
2024-10-05 00:12:51 +00:00
void setDepthClampRangeEXT( VULKAN_HPP_NAMESPACE::DepthClampModeEXT depthClampMode,
Optional<const VULKAN_HPP_NAMESPACE::DepthClampRangeEXT> depthClampRange
VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
//=== VK_EXT_attachment_feedback_loop_dynamic_state ===
2023-05-29 00:12:09 +00:00
void setAttachmentFeedbackLoopEnableEXT( VULKAN_HPP_NAMESPACE::ImageAspectFlags aspectMask VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT ) const
VULKAN_HPP_NOEXCEPT;
//=== VK_KHR_line_rasterization ===
void setLineStippleKHR( uint32_t lineStippleFactor, uint16_t lineStipplePattern ) const VULKAN_HPP_NOEXCEPT;
//=== VK_KHR_maintenance6 ===
void bindDescriptorSets2KHR( const VULKAN_HPP_NAMESPACE::BindDescriptorSetsInfoKHR & bindDescriptorSetsInfo ) const VULKAN_HPP_NOEXCEPT;
void pushConstants2KHR( const VULKAN_HPP_NAMESPACE::PushConstantsInfoKHR & pushConstantsInfo ) const VULKAN_HPP_NOEXCEPT;
void pushDescriptorSet2KHR( const VULKAN_HPP_NAMESPACE::PushDescriptorSetInfoKHR & pushDescriptorSetInfo ) const VULKAN_HPP_NOEXCEPT;
void pushDescriptorSetWithTemplate2KHR( const VULKAN_HPP_NAMESPACE::PushDescriptorSetWithTemplateInfoKHR & pushDescriptorSetWithTemplateInfo ) const
VULKAN_HPP_NOEXCEPT;
void setDescriptorBufferOffsets2EXT( const VULKAN_HPP_NAMESPACE::SetDescriptorBufferOffsetsInfoEXT & setDescriptorBufferOffsetsInfo ) const
VULKAN_HPP_NOEXCEPT;
void bindDescriptorBufferEmbeddedSamplers2EXT(
const VULKAN_HPP_NAMESPACE::BindDescriptorBufferEmbeddedSamplersInfoEXT & bindDescriptorBufferEmbeddedSamplersInfo ) const VULKAN_HPP_NOEXCEPT;
2024-10-05 00:12:51 +00:00
//=== VK_EXT_device_generated_commands ===
void preprocessGeneratedCommandsEXT( const VULKAN_HPP_NAMESPACE::GeneratedCommandsInfoEXT & generatedCommandsInfo,
VULKAN_HPP_NAMESPACE::CommandBuffer stateCommandBuffer ) const VULKAN_HPP_NOEXCEPT;
void executeGeneratedCommandsEXT( VULKAN_HPP_NAMESPACE::Bool32 isPreprocessed,
const VULKAN_HPP_NAMESPACE::GeneratedCommandsInfoEXT & generatedCommandsInfo ) const VULKAN_HPP_NOEXCEPT;
private:
VULKAN_HPP_NAMESPACE::Device m_device = {};
VULKAN_HPP_NAMESPACE::CommandPool m_commandPool = {};
VULKAN_HPP_NAMESPACE::CommandBuffer m_commandBuffer = {};
VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::DeviceDispatcher const * m_dispatcher = nullptr;
};
class CommandBuffers : public std::vector<VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::CommandBuffer>
{
public:
# if !defined( VULKAN_HPP_RAII_NO_EXCEPTIONS )
CommandBuffers( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device,
VULKAN_HPP_NAMESPACE::CommandBufferAllocateInfo const & allocateInfo )
{
*this = device.allocateCommandBuffers( allocateInfo );
}
# endif
CommandBuffers( std::nullptr_t ) {}
2022-12-06 00:11:19 +00:00
CommandBuffers() = delete;
CommandBuffers( CommandBuffers const & ) = delete;
CommandBuffers( CommandBuffers && rhs ) = default;
CommandBuffers & operator=( CommandBuffers const & ) = delete;
2022-12-06 00:11:19 +00:00
CommandBuffers & operator=( CommandBuffers && rhs ) = default;
private:
CommandBuffers( std::vector<VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::CommandBuffer> && rhs )
{
std::swap( *this, rhs );
}
};
2021-05-11 07:15:45 +00:00
class CuFunctionNVX
{
public:
using CType = VkCuFunctionNVX;
using CppType = VULKAN_HPP_NAMESPACE::CuFunctionNVX;
2021-05-11 07:15:45 +00:00
static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = VULKAN_HPP_NAMESPACE::ObjectType::eCuFunctionNVX;
2021-05-11 07:15:45 +00:00
static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType =
VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eCuFunctionNVX;
public:
# if !defined( VULKAN_HPP_RAII_NO_EXCEPTIONS )
CuFunctionNVX( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device,
VULKAN_HPP_NAMESPACE::CuFunctionCreateInfoNVX const & createInfo,
VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr )
2021-05-11 07:15:45 +00:00
{
*this = device.createCuFunctionNVX( createInfo, allocator );
2021-05-11 07:15:45 +00:00
}
# endif
2021-05-11 07:15:45 +00:00
CuFunctionNVX( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device,
VkCuFunctionNVX function,
VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr )
: m_device( device )
, m_function( function )
, m_allocator( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) )
2021-05-11 07:15:45 +00:00
, m_dispatcher( device.getDispatcher() )
2022-03-09 10:20:05 +00:00
{
}
2021-05-11 07:15:45 +00:00
CuFunctionNVX( std::nullptr_t ) {}
2021-05-11 07:15:45 +00:00
~CuFunctionNVX()
{
clear();
2021-05-11 07:15:45 +00:00
}
CuFunctionNVX() = delete;
2021-05-11 07:15:45 +00:00
CuFunctionNVX( CuFunctionNVX const & ) = delete;
CuFunctionNVX( CuFunctionNVX && rhs ) VULKAN_HPP_NOEXCEPT
: m_device( VULKAN_HPP_NAMESPACE::exchange( rhs.m_device, {} ) )
, m_function( VULKAN_HPP_NAMESPACE::exchange( rhs.m_function, {} ) )
, m_allocator( VULKAN_HPP_NAMESPACE::exchange( rhs.m_allocator, {} ) )
, m_dispatcher( VULKAN_HPP_NAMESPACE::exchange( rhs.m_dispatcher, nullptr ) )
2022-03-09 10:20:05 +00:00
{
}
2021-05-11 07:15:45 +00:00
CuFunctionNVX & operator=( CuFunctionNVX const & ) = delete;
2022-12-06 00:11:19 +00:00
CuFunctionNVX & operator=( CuFunctionNVX && rhs ) VULKAN_HPP_NOEXCEPT
2021-05-11 07:15:45 +00:00
{
if ( this != &rhs )
{
std::swap( m_device, rhs.m_device );
std::swap( m_function, rhs.m_function );
std::swap( m_allocator, rhs.m_allocator );
std::swap( m_dispatcher, rhs.m_dispatcher );
2021-05-11 07:15:45 +00:00
}
return *this;
}
VULKAN_HPP_NAMESPACE::CuFunctionNVX const & operator*() const VULKAN_HPP_NOEXCEPT
{
return m_function;
2021-05-11 07:15:45 +00:00
}
operator VULKAN_HPP_NAMESPACE::CuFunctionNVX() const VULKAN_HPP_NOEXCEPT
{
return m_function;
}
void clear() VULKAN_HPP_NOEXCEPT
{
if ( m_function )
{
getDispatcher()->vkDestroyCuFunctionNVX(
static_cast<VkDevice>( m_device ), static_cast<VkCuFunctionNVX>( m_function ), reinterpret_cast<const VkAllocationCallbacks *>( m_allocator ) );
}
m_device = nullptr;
m_function = nullptr;
m_allocator = nullptr;
m_dispatcher = nullptr;
}
VULKAN_HPP_NAMESPACE::CuFunctionNVX release()
{
m_device = nullptr;
m_allocator = nullptr;
m_dispatcher = nullptr;
return VULKAN_HPP_NAMESPACE::exchange( m_function, nullptr );
}
VULKAN_HPP_NAMESPACE::Device getDevice() const
{
return m_device;
}
VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::DeviceDispatcher const * getDispatcher() const
2021-05-11 07:15:45 +00:00
{
VULKAN_HPP_ASSERT( m_dispatcher->getVkHeaderVersion() == VK_HEADER_VERSION );
2021-05-11 07:15:45 +00:00
return m_dispatcher;
}
void swap( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::CuFunctionNVX & rhs ) VULKAN_HPP_NOEXCEPT
{
std::swap( m_device, rhs.m_device );
std::swap( m_function, rhs.m_function );
std::swap( m_allocator, rhs.m_allocator );
std::swap( m_dispatcher, rhs.m_dispatcher );
}
2021-05-11 07:15:45 +00:00
private:
VULKAN_HPP_NAMESPACE::Device m_device = {};
VULKAN_HPP_NAMESPACE::CuFunctionNVX m_function = {};
const VULKAN_HPP_NAMESPACE::AllocationCallbacks * m_allocator = {};
VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::DeviceDispatcher const * m_dispatcher = nullptr;
2021-05-11 07:15:45 +00:00
};
class CuModuleNVX
{
public:
using CType = VkCuModuleNVX;
using CppType = VULKAN_HPP_NAMESPACE::CuModuleNVX;
2021-05-11 07:15:45 +00:00
static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = VULKAN_HPP_NAMESPACE::ObjectType::eCuModuleNVX;
2021-05-11 07:15:45 +00:00
static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType =
VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eCuModuleNVX;
public:
# if !defined( VULKAN_HPP_RAII_NO_EXCEPTIONS )
2021-05-11 07:15:45 +00:00
CuModuleNVX( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device,
VULKAN_HPP_NAMESPACE::CuModuleCreateInfoNVX const & createInfo,
VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr )
{
*this = device.createCuModuleNVX( createInfo, allocator );
2021-05-11 07:15:45 +00:00
}
# endif
2021-05-11 07:15:45 +00:00
CuModuleNVX( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device,
VkCuModuleNVX module,
2021-05-11 07:15:45 +00:00
VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr )
: m_device( device )
, m_module( module )
, m_allocator( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) )
2021-05-11 07:15:45 +00:00
, m_dispatcher( device.getDispatcher() )
2022-03-09 10:20:05 +00:00
{
}
2021-05-11 07:15:45 +00:00
CuModuleNVX( std::nullptr_t ) {}
2021-05-11 07:15:45 +00:00
~CuModuleNVX()
{
clear();
2021-05-11 07:15:45 +00:00
}
CuModuleNVX() = delete;
2021-05-11 07:15:45 +00:00
CuModuleNVX( CuModuleNVX const & ) = delete;
CuModuleNVX( CuModuleNVX && rhs ) VULKAN_HPP_NOEXCEPT
: m_device( VULKAN_HPP_NAMESPACE::exchange( rhs.m_device, {} ) )
, m_module( VULKAN_HPP_NAMESPACE::exchange( rhs.m_module, {} ) )
, m_allocator( VULKAN_HPP_NAMESPACE::exchange( rhs.m_allocator, {} ) )
, m_dispatcher( VULKAN_HPP_NAMESPACE::exchange( rhs.m_dispatcher, nullptr ) )
2022-03-09 10:20:05 +00:00
{
}
2021-05-11 07:15:45 +00:00
CuModuleNVX & operator=( CuModuleNVX const & ) = delete;
2022-12-06 00:11:19 +00:00
CuModuleNVX & operator=( CuModuleNVX && rhs ) VULKAN_HPP_NOEXCEPT
2021-05-11 07:15:45 +00:00
{
if ( this != &rhs )
{
std::swap( m_device, rhs.m_device );
std::swap( m_module, rhs.m_module );
std::swap( m_allocator, rhs.m_allocator );
std::swap( m_dispatcher, rhs.m_dispatcher );
2021-05-11 07:15:45 +00:00
}
return *this;
}
VULKAN_HPP_NAMESPACE::CuModuleNVX const & operator*() const VULKAN_HPP_NOEXCEPT
{
return m_module;
2021-05-11 07:15:45 +00:00
}
operator VULKAN_HPP_NAMESPACE::CuModuleNVX() const VULKAN_HPP_NOEXCEPT
{
return m_module;
}
void clear() VULKAN_HPP_NOEXCEPT
{
if ( m_module )
{
getDispatcher()->vkDestroyCuModuleNVX(
static_cast<VkDevice>( m_device ), static_cast<VkCuModuleNVX>( m_module ), reinterpret_cast<const VkAllocationCallbacks *>( m_allocator ) );
}
m_device = nullptr;
m_module = nullptr;
m_allocator = nullptr;
m_dispatcher = nullptr;
}
VULKAN_HPP_NAMESPACE::CuModuleNVX release()
{
m_device = nullptr;
m_allocator = nullptr;
m_dispatcher = nullptr;
return VULKAN_HPP_NAMESPACE::exchange( m_module, nullptr );
}
VULKAN_HPP_NAMESPACE::Device getDevice() const
{
return m_device;
}
VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::DeviceDispatcher const * getDispatcher() const
2021-05-11 07:15:45 +00:00
{
VULKAN_HPP_ASSERT( m_dispatcher->getVkHeaderVersion() == VK_HEADER_VERSION );
2021-05-11 07:15:45 +00:00
return m_dispatcher;
}
void swap( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::CuModuleNVX & rhs ) VULKAN_HPP_NOEXCEPT
{
std::swap( m_device, rhs.m_device );
std::swap( m_module, rhs.m_module );
std::swap( m_allocator, rhs.m_allocator );
std::swap( m_dispatcher, rhs.m_dispatcher );
}
2021-05-11 07:15:45 +00:00
private:
VULKAN_HPP_NAMESPACE::Device m_device = {};
VULKAN_HPP_NAMESPACE::CuModuleNVX m_module = {};
const VULKAN_HPP_NAMESPACE::AllocationCallbacks * m_allocator = {};
VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::DeviceDispatcher const * m_dispatcher = nullptr;
2021-05-11 07:15:45 +00:00
};
# if defined( VK_ENABLE_BETA_EXTENSIONS )
class CudaFunctionNV
{
public:
using CType = VkCudaFunctionNV;
using CppType = VULKAN_HPP_NAMESPACE::CudaFunctionNV;
static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = VULKAN_HPP_NAMESPACE::ObjectType::eCudaFunctionNV;
static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType =
2023-11-28 00:10:55 +00:00
VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eCudaFunctionNV;
public:
# if !defined( VULKAN_HPP_RAII_NO_EXCEPTIONS )
CudaFunctionNV( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device,
VULKAN_HPP_NAMESPACE::CudaFunctionCreateInfoNV const & createInfo,
VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr )
{
*this = device.createCudaFunctionNV( createInfo, allocator );
}
# endif
CudaFunctionNV( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device,
VkCudaFunctionNV function,
VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr )
: m_device( device )
, m_function( function )
, m_allocator( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) )
, m_dispatcher( device.getDispatcher() )
{
}
CudaFunctionNV( std::nullptr_t ) {}
~CudaFunctionNV()
{
clear();
}
CudaFunctionNV() = delete;
CudaFunctionNV( CudaFunctionNV const & ) = delete;
CudaFunctionNV( CudaFunctionNV && rhs ) VULKAN_HPP_NOEXCEPT
: m_device( VULKAN_HPP_NAMESPACE::exchange( rhs.m_device, {} ) )
, m_function( VULKAN_HPP_NAMESPACE::exchange( rhs.m_function, {} ) )
, m_allocator( VULKAN_HPP_NAMESPACE::exchange( rhs.m_allocator, {} ) )
, m_dispatcher( VULKAN_HPP_NAMESPACE::exchange( rhs.m_dispatcher, nullptr ) )
{
}
CudaFunctionNV & operator=( CudaFunctionNV const & ) = delete;
CudaFunctionNV & operator=( CudaFunctionNV && rhs ) VULKAN_HPP_NOEXCEPT
{
if ( this != &rhs )
{
std::swap( m_device, rhs.m_device );
std::swap( m_function, rhs.m_function );
std::swap( m_allocator, rhs.m_allocator );
std::swap( m_dispatcher, rhs.m_dispatcher );
}
return *this;
}
VULKAN_HPP_NAMESPACE::CudaFunctionNV const & operator*() const VULKAN_HPP_NOEXCEPT
{
return m_function;
}
operator VULKAN_HPP_NAMESPACE::CudaFunctionNV() const VULKAN_HPP_NOEXCEPT
{
return m_function;
}
void clear() VULKAN_HPP_NOEXCEPT
{
if ( m_function )
{
getDispatcher()->vkDestroyCudaFunctionNV(
static_cast<VkDevice>( m_device ), static_cast<VkCudaFunctionNV>( m_function ), reinterpret_cast<const VkAllocationCallbacks *>( m_allocator ) );
}
m_device = nullptr;
m_function = nullptr;
m_allocator = nullptr;
m_dispatcher = nullptr;
}
VULKAN_HPP_NAMESPACE::CudaFunctionNV release()
{
m_device = nullptr;
m_allocator = nullptr;
m_dispatcher = nullptr;
return VULKAN_HPP_NAMESPACE::exchange( m_function, nullptr );
}
VULKAN_HPP_NAMESPACE::Device getDevice() const
{
return m_device;
}
VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::DeviceDispatcher const * getDispatcher() const
{
VULKAN_HPP_ASSERT( m_dispatcher->getVkHeaderVersion() == VK_HEADER_VERSION );
return m_dispatcher;
}
void swap( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::CudaFunctionNV & rhs ) VULKAN_HPP_NOEXCEPT
{
std::swap( m_device, rhs.m_device );
std::swap( m_function, rhs.m_function );
std::swap( m_allocator, rhs.m_allocator );
std::swap( m_dispatcher, rhs.m_dispatcher );
}
private:
VULKAN_HPP_NAMESPACE::Device m_device = {};
VULKAN_HPP_NAMESPACE::CudaFunctionNV m_function = {};
const VULKAN_HPP_NAMESPACE::AllocationCallbacks * m_allocator = {};
VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::DeviceDispatcher const * m_dispatcher = nullptr;
};
# endif /*VK_ENABLE_BETA_EXTENSIONS*/
# if defined( VK_ENABLE_BETA_EXTENSIONS )
class CudaModuleNV
{
public:
using CType = VkCudaModuleNV;
using CppType = VULKAN_HPP_NAMESPACE::CudaModuleNV;
static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = VULKAN_HPP_NAMESPACE::ObjectType::eCudaModuleNV;
static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType =
2023-11-28 00:10:55 +00:00
VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eCudaModuleNV;
public:
# if !defined( VULKAN_HPP_RAII_NO_EXCEPTIONS )
CudaModuleNV( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device,
VULKAN_HPP_NAMESPACE::CudaModuleCreateInfoNV const & createInfo,
VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr )
{
*this = device.createCudaModuleNV( createInfo, allocator );
}
# endif
CudaModuleNV( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device,
VkCudaModuleNV module,
VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr )
: m_device( device )
, m_module( module )
, m_allocator( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) )
, m_dispatcher( device.getDispatcher() )
{
}
CudaModuleNV( std::nullptr_t ) {}
~CudaModuleNV()
{
clear();
}
CudaModuleNV() = delete;
CudaModuleNV( CudaModuleNV const & ) = delete;
CudaModuleNV( CudaModuleNV && rhs ) VULKAN_HPP_NOEXCEPT
: m_device( VULKAN_HPP_NAMESPACE::exchange( rhs.m_device, {} ) )
, m_module( VULKAN_HPP_NAMESPACE::exchange( rhs.m_module, {} ) )
, m_allocator( VULKAN_HPP_NAMESPACE::exchange( rhs.m_allocator, {} ) )
, m_dispatcher( VULKAN_HPP_NAMESPACE::exchange( rhs.m_dispatcher, nullptr ) )
{
}
CudaModuleNV & operator=( CudaModuleNV const & ) = delete;
CudaModuleNV & operator=( CudaModuleNV && rhs ) VULKAN_HPP_NOEXCEPT
{
if ( this != &rhs )
{
std::swap( m_device, rhs.m_device );
std::swap( m_module, rhs.m_module );
std::swap( m_allocator, rhs.m_allocator );
std::swap( m_dispatcher, rhs.m_dispatcher );
}
return *this;
}
VULKAN_HPP_NAMESPACE::CudaModuleNV const & operator*() const VULKAN_HPP_NOEXCEPT
{
return m_module;
}
operator VULKAN_HPP_NAMESPACE::CudaModuleNV() const VULKAN_HPP_NOEXCEPT
{
return m_module;
}
void clear() VULKAN_HPP_NOEXCEPT
{
if ( m_module )
{
getDispatcher()->vkDestroyCudaModuleNV(
static_cast<VkDevice>( m_device ), static_cast<VkCudaModuleNV>( m_module ), reinterpret_cast<const VkAllocationCallbacks *>( m_allocator ) );
}
m_device = nullptr;
m_module = nullptr;
m_allocator = nullptr;
m_dispatcher = nullptr;
}
VULKAN_HPP_NAMESPACE::CudaModuleNV release()
{
m_device = nullptr;
m_allocator = nullptr;
m_dispatcher = nullptr;
return VULKAN_HPP_NAMESPACE::exchange( m_module, nullptr );
}
VULKAN_HPP_NAMESPACE::Device getDevice() const
{
return m_device;
}
VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::DeviceDispatcher const * getDispatcher() const
{
VULKAN_HPP_ASSERT( m_dispatcher->getVkHeaderVersion() == VK_HEADER_VERSION );
return m_dispatcher;
}
void swap( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::CudaModuleNV & rhs ) VULKAN_HPP_NOEXCEPT
{
std::swap( m_device, rhs.m_device );
std::swap( m_module, rhs.m_module );
std::swap( m_allocator, rhs.m_allocator );
std::swap( m_dispatcher, rhs.m_dispatcher );
}
//=== VK_NV_cuda_kernel_launch ===
VULKAN_HPP_NODISCARD std::vector<uint8_t> getCache() const;
private:
VULKAN_HPP_NAMESPACE::Device m_device = {};
VULKAN_HPP_NAMESPACE::CudaModuleNV m_module = {};
const VULKAN_HPP_NAMESPACE::AllocationCallbacks * m_allocator = {};
VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::DeviceDispatcher const * m_dispatcher = nullptr;
};
# endif /*VK_ENABLE_BETA_EXTENSIONS*/
class DebugReportCallbackEXT
{
public:
using CType = VkDebugReportCallbackEXT;
using CppType = VULKAN_HPP_NAMESPACE::DebugReportCallbackEXT;
static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = VULKAN_HPP_NAMESPACE::ObjectType::eDebugReportCallbackEXT;
static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType =
VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eDebugReportCallbackEXT;
public:
# if !defined( VULKAN_HPP_RAII_NO_EXCEPTIONS )
DebugReportCallbackEXT( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Instance const & instance,
VULKAN_HPP_NAMESPACE::DebugReportCallbackCreateInfoEXT const & createInfo,
VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr )
{
*this = instance.createDebugReportCallbackEXT( createInfo, allocator );
}
# endif
DebugReportCallbackEXT( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Instance const & instance,
VkDebugReportCallbackEXT callback,
VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr )
: m_instance( instance )
, m_callback( callback )
, m_allocator( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) )
, m_dispatcher( instance.getDispatcher() )
2022-03-09 10:20:05 +00:00
{
}
DebugReportCallbackEXT( std::nullptr_t ) {}
~DebugReportCallbackEXT()
{
clear();
}
DebugReportCallbackEXT() = delete;
DebugReportCallbackEXT( DebugReportCallbackEXT const & ) = delete;
DebugReportCallbackEXT( DebugReportCallbackEXT && rhs ) VULKAN_HPP_NOEXCEPT
: m_instance( VULKAN_HPP_NAMESPACE::exchange( rhs.m_instance, {} ) )
, m_callback( VULKAN_HPP_NAMESPACE::exchange( rhs.m_callback, {} ) )
, m_allocator( VULKAN_HPP_NAMESPACE::exchange( rhs.m_allocator, {} ) )
, m_dispatcher( VULKAN_HPP_NAMESPACE::exchange( rhs.m_dispatcher, nullptr ) )
2022-03-09 10:20:05 +00:00
{
}
DebugReportCallbackEXT & operator=( DebugReportCallbackEXT const & ) = delete;
2022-12-06 00:11:19 +00:00
DebugReportCallbackEXT & operator=( DebugReportCallbackEXT && rhs ) VULKAN_HPP_NOEXCEPT
{
if ( this != &rhs )
{
std::swap( m_instance, rhs.m_instance );
std::swap( m_callback, rhs.m_callback );
std::swap( m_allocator, rhs.m_allocator );
std::swap( m_dispatcher, rhs.m_dispatcher );
}
return *this;
}
VULKAN_HPP_NAMESPACE::DebugReportCallbackEXT const & operator*() const VULKAN_HPP_NOEXCEPT
{
return m_callback;
}
operator VULKAN_HPP_NAMESPACE::DebugReportCallbackEXT() const VULKAN_HPP_NOEXCEPT
{
return m_callback;
}
void clear() VULKAN_HPP_NOEXCEPT
{
if ( m_callback )
{
getDispatcher()->vkDestroyDebugReportCallbackEXT( static_cast<VkInstance>( m_instance ),
static_cast<VkDebugReportCallbackEXT>( m_callback ),
reinterpret_cast<const VkAllocationCallbacks *>( m_allocator ) );
}
m_instance = nullptr;
m_callback = nullptr;
m_allocator = nullptr;
m_dispatcher = nullptr;
}
VULKAN_HPP_NAMESPACE::DebugReportCallbackEXT release()
{
m_instance = nullptr;
m_allocator = nullptr;
m_dispatcher = nullptr;
return VULKAN_HPP_NAMESPACE::exchange( m_callback, nullptr );
}
VULKAN_HPP_NAMESPACE::Instance getInstance() const
{
return m_instance;
}
VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::InstanceDispatcher const * getDispatcher() const
{
VULKAN_HPP_ASSERT( m_dispatcher->getVkHeaderVersion() == VK_HEADER_VERSION );
return m_dispatcher;
}
void swap( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::DebugReportCallbackEXT & rhs ) VULKAN_HPP_NOEXCEPT
{
std::swap( m_instance, rhs.m_instance );
std::swap( m_callback, rhs.m_callback );
std::swap( m_allocator, rhs.m_allocator );
std::swap( m_dispatcher, rhs.m_dispatcher );
}
private:
VULKAN_HPP_NAMESPACE::Instance m_instance = {};
VULKAN_HPP_NAMESPACE::DebugReportCallbackEXT m_callback = {};
const VULKAN_HPP_NAMESPACE::AllocationCallbacks * m_allocator = {};
VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::InstanceDispatcher const * m_dispatcher = nullptr;
};
class DebugUtilsMessengerEXT
{
public:
using CType = VkDebugUtilsMessengerEXT;
using CppType = VULKAN_HPP_NAMESPACE::DebugUtilsMessengerEXT;
static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = VULKAN_HPP_NAMESPACE::ObjectType::eDebugUtilsMessengerEXT;
static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType =
VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eUnknown;
public:
# if !defined( VULKAN_HPP_RAII_NO_EXCEPTIONS )
DebugUtilsMessengerEXT( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Instance const & instance,
VULKAN_HPP_NAMESPACE::DebugUtilsMessengerCreateInfoEXT const & createInfo,
VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr )
{
*this = instance.createDebugUtilsMessengerEXT( createInfo, allocator );
}
# endif
DebugUtilsMessengerEXT( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Instance const & instance,
VkDebugUtilsMessengerEXT messenger,
VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr )
: m_instance( instance )
, m_messenger( messenger )
, m_allocator( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) )
, m_dispatcher( instance.getDispatcher() )
2022-03-09 10:20:05 +00:00
{
}
DebugUtilsMessengerEXT( std::nullptr_t ) {}
~DebugUtilsMessengerEXT()
{
clear();
}
DebugUtilsMessengerEXT() = delete;
DebugUtilsMessengerEXT( DebugUtilsMessengerEXT const & ) = delete;
DebugUtilsMessengerEXT( DebugUtilsMessengerEXT && rhs ) VULKAN_HPP_NOEXCEPT
: m_instance( VULKAN_HPP_NAMESPACE::exchange( rhs.m_instance, {} ) )
, m_messenger( VULKAN_HPP_NAMESPACE::exchange( rhs.m_messenger, {} ) )
, m_allocator( VULKAN_HPP_NAMESPACE::exchange( rhs.m_allocator, {} ) )
, m_dispatcher( VULKAN_HPP_NAMESPACE::exchange( rhs.m_dispatcher, nullptr ) )
2022-03-09 10:20:05 +00:00
{
}
DebugUtilsMessengerEXT & operator=( DebugUtilsMessengerEXT const & ) = delete;
2022-12-06 00:11:19 +00:00
DebugUtilsMessengerEXT & operator=( DebugUtilsMessengerEXT && rhs ) VULKAN_HPP_NOEXCEPT
{
if ( this != &rhs )
{
std::swap( m_instance, rhs.m_instance );
std::swap( m_messenger, rhs.m_messenger );
std::swap( m_allocator, rhs.m_allocator );
std::swap( m_dispatcher, rhs.m_dispatcher );
}
return *this;
}
VULKAN_HPP_NAMESPACE::DebugUtilsMessengerEXT const & operator*() const VULKAN_HPP_NOEXCEPT
{
return m_messenger;
}
operator VULKAN_HPP_NAMESPACE::DebugUtilsMessengerEXT() const VULKAN_HPP_NOEXCEPT
{
return m_messenger;
}
void clear() VULKAN_HPP_NOEXCEPT
{
if ( m_messenger )
{
getDispatcher()->vkDestroyDebugUtilsMessengerEXT( static_cast<VkInstance>( m_instance ),
static_cast<VkDebugUtilsMessengerEXT>( m_messenger ),
reinterpret_cast<const VkAllocationCallbacks *>( m_allocator ) );
}
m_instance = nullptr;
m_messenger = nullptr;
m_allocator = nullptr;
m_dispatcher = nullptr;
}
VULKAN_HPP_NAMESPACE::DebugUtilsMessengerEXT release()
{
m_instance = nullptr;
m_allocator = nullptr;
m_dispatcher = nullptr;
return VULKAN_HPP_NAMESPACE::exchange( m_messenger, nullptr );
}
VULKAN_HPP_NAMESPACE::Instance getInstance() const
{
return m_instance;
}
VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::InstanceDispatcher const * getDispatcher() const
{
VULKAN_HPP_ASSERT( m_dispatcher->getVkHeaderVersion() == VK_HEADER_VERSION );
return m_dispatcher;
}
void swap( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::DebugUtilsMessengerEXT & rhs ) VULKAN_HPP_NOEXCEPT
{
std::swap( m_instance, rhs.m_instance );
std::swap( m_messenger, rhs.m_messenger );
std::swap( m_allocator, rhs.m_allocator );
std::swap( m_dispatcher, rhs.m_dispatcher );
}
private:
VULKAN_HPP_NAMESPACE::Instance m_instance = {};
VULKAN_HPP_NAMESPACE::DebugUtilsMessengerEXT m_messenger = {};
const VULKAN_HPP_NAMESPACE::AllocationCallbacks * m_allocator = {};
VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::InstanceDispatcher const * m_dispatcher = nullptr;
};
class DeferredOperationKHR
{
public:
using CType = VkDeferredOperationKHR;
using CppType = VULKAN_HPP_NAMESPACE::DeferredOperationKHR;
static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = VULKAN_HPP_NAMESPACE::ObjectType::eDeferredOperationKHR;
static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType =
VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eUnknown;
public:
# if !defined( VULKAN_HPP_RAII_NO_EXCEPTIONS )
DeferredOperationKHR( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device,
VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr )
{
*this = device.createDeferredOperationKHR( allocator );
}
# endif
DeferredOperationKHR( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device,
VkDeferredOperationKHR operation,
VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr )
: m_device( device )
, m_operation( operation )
, m_allocator( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) )
, m_dispatcher( device.getDispatcher() )
2022-03-09 10:20:05 +00:00
{
}
DeferredOperationKHR( std::nullptr_t ) {}
~DeferredOperationKHR()
{
clear();
}
DeferredOperationKHR() = delete;
DeferredOperationKHR( DeferredOperationKHR const & ) = delete;
DeferredOperationKHR( DeferredOperationKHR && rhs ) VULKAN_HPP_NOEXCEPT
: m_device( VULKAN_HPP_NAMESPACE::exchange( rhs.m_device, {} ) )
, m_operation( VULKAN_HPP_NAMESPACE::exchange( rhs.m_operation, {} ) )
, m_allocator( VULKAN_HPP_NAMESPACE::exchange( rhs.m_allocator, {} ) )
, m_dispatcher( VULKAN_HPP_NAMESPACE::exchange( rhs.m_dispatcher, nullptr ) )
2022-03-09 10:20:05 +00:00
{
}
DeferredOperationKHR & operator=( DeferredOperationKHR const & ) = delete;
2022-12-06 00:11:19 +00:00
DeferredOperationKHR & operator=( DeferredOperationKHR && rhs ) VULKAN_HPP_NOEXCEPT
{
if ( this != &rhs )
{
std::swap( m_device, rhs.m_device );
std::swap( m_operation, rhs.m_operation );
std::swap( m_allocator, rhs.m_allocator );
std::swap( m_dispatcher, rhs.m_dispatcher );
}
return *this;
}
VULKAN_HPP_NAMESPACE::DeferredOperationKHR const & operator*() const VULKAN_HPP_NOEXCEPT
{
return m_operation;
}
operator VULKAN_HPP_NAMESPACE::DeferredOperationKHR() const VULKAN_HPP_NOEXCEPT
{
return m_operation;
}
void clear() VULKAN_HPP_NOEXCEPT
{
if ( m_operation )
{
getDispatcher()->vkDestroyDeferredOperationKHR( static_cast<VkDevice>( m_device ),
static_cast<VkDeferredOperationKHR>( m_operation ),
reinterpret_cast<const VkAllocationCallbacks *>( m_allocator ) );
}
m_device = nullptr;
m_operation = nullptr;
m_allocator = nullptr;
m_dispatcher = nullptr;
}
VULKAN_HPP_NAMESPACE::DeferredOperationKHR release()
{
m_device = nullptr;
m_allocator = nullptr;
m_dispatcher = nullptr;
return VULKAN_HPP_NAMESPACE::exchange( m_operation, nullptr );
}
VULKAN_HPP_NAMESPACE::Device getDevice() const
{
return m_device;
}
VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::DeviceDispatcher const * getDispatcher() const
{
VULKAN_HPP_ASSERT( m_dispatcher->getVkHeaderVersion() == VK_HEADER_VERSION );
return m_dispatcher;
}
void swap( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::DeferredOperationKHR & rhs ) VULKAN_HPP_NOEXCEPT
{
std::swap( m_device, rhs.m_device );
std::swap( m_operation, rhs.m_operation );
std::swap( m_allocator, rhs.m_allocator );
std::swap( m_dispatcher, rhs.m_dispatcher );
}
//=== VK_KHR_deferred_host_operations ===
VULKAN_HPP_NODISCARD uint32_t getMaxConcurrency() const VULKAN_HPP_NOEXCEPT;
VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::Result getResult() const VULKAN_HPP_NOEXCEPT;
VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::Result join() const;
private:
VULKAN_HPP_NAMESPACE::Device m_device = {};
VULKAN_HPP_NAMESPACE::DeferredOperationKHR m_operation = {};
const VULKAN_HPP_NAMESPACE::AllocationCallbacks * m_allocator = {};
VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::DeviceDispatcher const * m_dispatcher = nullptr;
};
class DescriptorPool
{
public:
using CType = VkDescriptorPool;
using CppType = VULKAN_HPP_NAMESPACE::DescriptorPool;
static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = VULKAN_HPP_NAMESPACE::ObjectType::eDescriptorPool;
static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType =
VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eDescriptorPool;
public:
# if !defined( VULKAN_HPP_RAII_NO_EXCEPTIONS )
DescriptorPool( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device,
VULKAN_HPP_NAMESPACE::DescriptorPoolCreateInfo const & createInfo,
VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr )
{
*this = device.createDescriptorPool( createInfo, allocator );
}
# endif
DescriptorPool( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device,
VkDescriptorPool descriptorPool,
VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr )
: m_device( device )
, m_descriptorPool( descriptorPool )
, m_allocator( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) )
, m_dispatcher( device.getDispatcher() )
2022-03-09 10:20:05 +00:00
{
}
DescriptorPool( std::nullptr_t ) {}
~DescriptorPool()
{
clear();
}
DescriptorPool() = delete;
DescriptorPool( DescriptorPool const & ) = delete;
DescriptorPool( DescriptorPool && rhs ) VULKAN_HPP_NOEXCEPT
: m_device( VULKAN_HPP_NAMESPACE::exchange( rhs.m_device, {} ) )
, m_descriptorPool( VULKAN_HPP_NAMESPACE::exchange( rhs.m_descriptorPool, {} ) )
, m_allocator( VULKAN_HPP_NAMESPACE::exchange( rhs.m_allocator, {} ) )
, m_dispatcher( VULKAN_HPP_NAMESPACE::exchange( rhs.m_dispatcher, nullptr ) )
2022-03-09 10:20:05 +00:00
{
}
DescriptorPool & operator=( DescriptorPool const & ) = delete;
2022-12-06 00:11:19 +00:00
DescriptorPool & operator=( DescriptorPool && rhs ) VULKAN_HPP_NOEXCEPT
{
if ( this != &rhs )
{
std::swap( m_device, rhs.m_device );
std::swap( m_descriptorPool, rhs.m_descriptorPool );
std::swap( m_allocator, rhs.m_allocator );
std::swap( m_dispatcher, rhs.m_dispatcher );
}
return *this;
}
VULKAN_HPP_NAMESPACE::DescriptorPool const & operator*() const VULKAN_HPP_NOEXCEPT
{
return m_descriptorPool;
}
operator VULKAN_HPP_NAMESPACE::DescriptorPool() const VULKAN_HPP_NOEXCEPT
{
return m_descriptorPool;
}
void clear() VULKAN_HPP_NOEXCEPT
{
if ( m_descriptorPool )
{
getDispatcher()->vkDestroyDescriptorPool( static_cast<VkDevice>( m_device ),
static_cast<VkDescriptorPool>( m_descriptorPool ),
reinterpret_cast<const VkAllocationCallbacks *>( m_allocator ) );
}
m_device = nullptr;
m_descriptorPool = nullptr;
m_allocator = nullptr;
m_dispatcher = nullptr;
}
VULKAN_HPP_NAMESPACE::DescriptorPool release()
{
m_device = nullptr;
m_allocator = nullptr;
m_dispatcher = nullptr;
return VULKAN_HPP_NAMESPACE::exchange( m_descriptorPool, nullptr );
}
VULKAN_HPP_NAMESPACE::Device getDevice() const
{
return m_device;
}
VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::DeviceDispatcher const * getDispatcher() const
{
VULKAN_HPP_ASSERT( m_dispatcher->getVkHeaderVersion() == VK_HEADER_VERSION );
return m_dispatcher;
}
void swap( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::DescriptorPool & rhs ) VULKAN_HPP_NOEXCEPT
{
std::swap( m_device, rhs.m_device );
std::swap( m_descriptorPool, rhs.m_descriptorPool );
std::swap( m_allocator, rhs.m_allocator );
std::swap( m_dispatcher, rhs.m_dispatcher );
}
//=== VK_VERSION_1_0 ===
void reset( VULKAN_HPP_NAMESPACE::DescriptorPoolResetFlags flags VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
private:
VULKAN_HPP_NAMESPACE::Device m_device = {};
VULKAN_HPP_NAMESPACE::DescriptorPool m_descriptorPool = {};
const VULKAN_HPP_NAMESPACE::AllocationCallbacks * m_allocator = {};
VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::DeviceDispatcher const * m_dispatcher = nullptr;
};
class DescriptorSet
{
public:
using CType = VkDescriptorSet;
using CppType = VULKAN_HPP_NAMESPACE::DescriptorSet;
static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = VULKAN_HPP_NAMESPACE::ObjectType::eDescriptorSet;
static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType =
VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eDescriptorSet;
public:
DescriptorSet( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device, VkDescriptorSet descriptorSet, VkDescriptorPool descriptorPool )
: m_device( device ), m_descriptorPool( descriptorPool ), m_descriptorSet( descriptorSet ), m_dispatcher( device.getDispatcher() )
2022-03-09 10:20:05 +00:00
{
}
DescriptorSet( std::nullptr_t ) {}
~DescriptorSet()
{
clear();
}
DescriptorSet() = delete;
DescriptorSet( DescriptorSet const & ) = delete;
DescriptorSet( DescriptorSet && rhs ) VULKAN_HPP_NOEXCEPT
: m_device( VULKAN_HPP_NAMESPACE::exchange( rhs.m_device, {} ) )
, m_descriptorPool( VULKAN_HPP_NAMESPACE::exchange( rhs.m_descriptorPool, {} ) )
, m_descriptorSet( VULKAN_HPP_NAMESPACE::exchange( rhs.m_descriptorSet, {} ) )
, m_dispatcher( VULKAN_HPP_NAMESPACE::exchange( rhs.m_dispatcher, nullptr ) )
2022-03-09 10:20:05 +00:00
{
}
DescriptorSet & operator=( DescriptorSet const & ) = delete;
2022-12-06 00:11:19 +00:00
DescriptorSet & operator=( DescriptorSet && rhs ) VULKAN_HPP_NOEXCEPT
{
if ( this != &rhs )
{
std::swap( m_device, rhs.m_device );
std::swap( m_descriptorPool, rhs.m_descriptorPool );
std::swap( m_descriptorSet, rhs.m_descriptorSet );
std::swap( m_dispatcher, rhs.m_dispatcher );
}
return *this;
}
VULKAN_HPP_NAMESPACE::DescriptorSet const & operator*() const VULKAN_HPP_NOEXCEPT
{
return m_descriptorSet;
}
operator VULKAN_HPP_NAMESPACE::DescriptorSet() const VULKAN_HPP_NOEXCEPT
{
return m_descriptorSet;
}
void clear() VULKAN_HPP_NOEXCEPT
{
if ( m_descriptorSet )
{
getDispatcher()->vkFreeDescriptorSets( static_cast<VkDevice>( m_device ),
static_cast<VkDescriptorPool>( m_descriptorPool ),
1,
reinterpret_cast<VkDescriptorSet const *>( &m_descriptorSet ) );
}
m_device = nullptr;
m_descriptorPool = nullptr;
m_descriptorSet = nullptr;
m_dispatcher = nullptr;
}
VULKAN_HPP_NAMESPACE::DescriptorSet release()
{
m_device = nullptr;
m_descriptorPool = nullptr;
m_dispatcher = nullptr;
return VULKAN_HPP_NAMESPACE::exchange( m_descriptorSet, nullptr );
}
VULKAN_HPP_NAMESPACE::Device getDevice() const
{
return m_device;
}
VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::DeviceDispatcher const * getDispatcher() const
{
VULKAN_HPP_ASSERT( m_dispatcher->getVkHeaderVersion() == VK_HEADER_VERSION );
return m_dispatcher;
}
void swap( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::DescriptorSet & rhs ) VULKAN_HPP_NOEXCEPT
{
std::swap( m_device, rhs.m_device );
std::swap( m_descriptorPool, rhs.m_descriptorPool );
std::swap( m_descriptorSet, rhs.m_descriptorSet );
std::swap( m_dispatcher, rhs.m_dispatcher );
}
//=== VK_VERSION_1_1 ===
template <typename DataType>
void updateWithTemplate( VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate descriptorUpdateTemplate, DataType const & data ) const VULKAN_HPP_NOEXCEPT;
//=== VK_KHR_descriptor_update_template ===
template <typename DataType>
void updateWithTemplateKHR( VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate descriptorUpdateTemplate, DataType const & data ) const VULKAN_HPP_NOEXCEPT;
2022-03-16 08:09:01 +00:00
//=== VK_VALVE_descriptor_set_host_mapping ===
VULKAN_HPP_NODISCARD void * getHostMappingVALVE() const VULKAN_HPP_NOEXCEPT;
private:
VULKAN_HPP_NAMESPACE::Device m_device = {};
VULKAN_HPP_NAMESPACE::DescriptorPool m_descriptorPool = {};
VULKAN_HPP_NAMESPACE::DescriptorSet m_descriptorSet = {};
VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::DeviceDispatcher const * m_dispatcher = nullptr;
};
class DescriptorSets : public std::vector<VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::DescriptorSet>
{
public:
# if !defined( VULKAN_HPP_RAII_NO_EXCEPTIONS )
DescriptorSets( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device,
VULKAN_HPP_NAMESPACE::DescriptorSetAllocateInfo const & allocateInfo )
{
*this = device.allocateDescriptorSets( allocateInfo );
}
# endif
DescriptorSets( std::nullptr_t ) {}
2022-12-06 00:11:19 +00:00
DescriptorSets() = delete;
DescriptorSets( DescriptorSets const & ) = delete;
DescriptorSets( DescriptorSets && rhs ) = default;
DescriptorSets & operator=( DescriptorSets const & ) = delete;
2022-12-06 00:11:19 +00:00
DescriptorSets & operator=( DescriptorSets && rhs ) = default;
private:
DescriptorSets( std::vector<VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::DescriptorSet> && rhs )
{
std::swap( *this, rhs );
}
};
class DescriptorSetLayout
{
public:
using CType = VkDescriptorSetLayout;
using CppType = VULKAN_HPP_NAMESPACE::DescriptorSetLayout;
static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = VULKAN_HPP_NAMESPACE::ObjectType::eDescriptorSetLayout;
static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType =
VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eDescriptorSetLayout;
public:
# if !defined( VULKAN_HPP_RAII_NO_EXCEPTIONS )
DescriptorSetLayout( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device,
VULKAN_HPP_NAMESPACE::DescriptorSetLayoutCreateInfo const & createInfo,
VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr )
{
*this = device.createDescriptorSetLayout( createInfo, allocator );
}
# endif
DescriptorSetLayout( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device,
VkDescriptorSetLayout descriptorSetLayout,
VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr )
: m_device( device )
, m_descriptorSetLayout( descriptorSetLayout )
, m_allocator( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) )
, m_dispatcher( device.getDispatcher() )
2022-03-09 10:20:05 +00:00
{
}
DescriptorSetLayout( std::nullptr_t ) {}
~DescriptorSetLayout()
{
clear();
}
DescriptorSetLayout() = delete;
DescriptorSetLayout( DescriptorSetLayout const & ) = delete;
DescriptorSetLayout( DescriptorSetLayout && rhs ) VULKAN_HPP_NOEXCEPT
: m_device( VULKAN_HPP_NAMESPACE::exchange( rhs.m_device, {} ) )
, m_descriptorSetLayout( VULKAN_HPP_NAMESPACE::exchange( rhs.m_descriptorSetLayout, {} ) )
, m_allocator( VULKAN_HPP_NAMESPACE::exchange( rhs.m_allocator, {} ) )
, m_dispatcher( VULKAN_HPP_NAMESPACE::exchange( rhs.m_dispatcher, nullptr ) )
2022-03-09 10:20:05 +00:00
{
}
DescriptorSetLayout & operator=( DescriptorSetLayout const & ) = delete;
2022-12-06 00:11:19 +00:00
DescriptorSetLayout & operator=( DescriptorSetLayout && rhs ) VULKAN_HPP_NOEXCEPT
{
if ( this != &rhs )
{
std::swap( m_device, rhs.m_device );
std::swap( m_descriptorSetLayout, rhs.m_descriptorSetLayout );
std::swap( m_allocator, rhs.m_allocator );
std::swap( m_dispatcher, rhs.m_dispatcher );
}
return *this;
}
VULKAN_HPP_NAMESPACE::DescriptorSetLayout const & operator*() const VULKAN_HPP_NOEXCEPT
{
return m_descriptorSetLayout;
}
operator VULKAN_HPP_NAMESPACE::DescriptorSetLayout() const VULKAN_HPP_NOEXCEPT
{
return m_descriptorSetLayout;
}
void clear() VULKAN_HPP_NOEXCEPT
{
if ( m_descriptorSetLayout )
{
getDispatcher()->vkDestroyDescriptorSetLayout( static_cast<VkDevice>( m_device ),
static_cast<VkDescriptorSetLayout>( m_descriptorSetLayout ),
reinterpret_cast<const VkAllocationCallbacks *>( m_allocator ) );
}
m_device = nullptr;
m_descriptorSetLayout = nullptr;
m_allocator = nullptr;
m_dispatcher = nullptr;
}
VULKAN_HPP_NAMESPACE::DescriptorSetLayout release()
{
m_device = nullptr;
m_allocator = nullptr;
m_dispatcher = nullptr;
return VULKAN_HPP_NAMESPACE::exchange( m_descriptorSetLayout, nullptr );
}
VULKAN_HPP_NAMESPACE::Device getDevice() const
{
return m_device;
}
VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::DeviceDispatcher const * getDispatcher() const
{
VULKAN_HPP_ASSERT( m_dispatcher->getVkHeaderVersion() == VK_HEADER_VERSION );
return m_dispatcher;
}
void swap( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::DescriptorSetLayout & rhs ) VULKAN_HPP_NOEXCEPT
{
std::swap( m_device, rhs.m_device );
std::swap( m_descriptorSetLayout, rhs.m_descriptorSetLayout );
std::swap( m_allocator, rhs.m_allocator );
std::swap( m_dispatcher, rhs.m_dispatcher );
}
2022-11-18 00:12:31 +00:00
//=== VK_EXT_descriptor_buffer ===
VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::DeviceSize getSizeEXT() const VULKAN_HPP_NOEXCEPT;
VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::DeviceSize getBindingOffsetEXT( uint32_t binding ) const VULKAN_HPP_NOEXCEPT;
private:
VULKAN_HPP_NAMESPACE::Device m_device = {};
VULKAN_HPP_NAMESPACE::DescriptorSetLayout m_descriptorSetLayout = {};
const VULKAN_HPP_NAMESPACE::AllocationCallbacks * m_allocator = {};
VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::DeviceDispatcher const * m_dispatcher = nullptr;
};
class DescriptorUpdateTemplate
{
public:
using CType = VkDescriptorUpdateTemplate;
using CppType = VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate;
static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = VULKAN_HPP_NAMESPACE::ObjectType::eDescriptorUpdateTemplate;
static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType =
VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eDescriptorUpdateTemplate;
public:
# if !defined( VULKAN_HPP_RAII_NO_EXCEPTIONS )
DescriptorUpdateTemplate( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device,
VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplateCreateInfo const & createInfo,
VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr )
{
*this = device.createDescriptorUpdateTemplate( createInfo, allocator );
}
# endif
DescriptorUpdateTemplate( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device,
VkDescriptorUpdateTemplate descriptorUpdateTemplate,
VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr )
: m_device( device )
, m_descriptorUpdateTemplate( descriptorUpdateTemplate )
, m_allocator( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) )
, m_dispatcher( device.getDispatcher() )
2022-03-09 10:20:05 +00:00
{
}
DescriptorUpdateTemplate( std::nullptr_t ) {}
~DescriptorUpdateTemplate()
{
clear();
}
DescriptorUpdateTemplate() = delete;
DescriptorUpdateTemplate( DescriptorUpdateTemplate const & ) = delete;
DescriptorUpdateTemplate( DescriptorUpdateTemplate && rhs ) VULKAN_HPP_NOEXCEPT
: m_device( VULKAN_HPP_NAMESPACE::exchange( rhs.m_device, {} ) )
, m_descriptorUpdateTemplate( VULKAN_HPP_NAMESPACE::exchange( rhs.m_descriptorUpdateTemplate, {} ) )
, m_allocator( VULKAN_HPP_NAMESPACE::exchange( rhs.m_allocator, {} ) )
, m_dispatcher( VULKAN_HPP_NAMESPACE::exchange( rhs.m_dispatcher, nullptr ) )
2022-03-09 10:20:05 +00:00
{
}
DescriptorUpdateTemplate & operator=( DescriptorUpdateTemplate const & ) = delete;
2022-12-06 00:11:19 +00:00
DescriptorUpdateTemplate & operator=( DescriptorUpdateTemplate && rhs ) VULKAN_HPP_NOEXCEPT
{
if ( this != &rhs )
{
std::swap( m_device, rhs.m_device );
std::swap( m_descriptorUpdateTemplate, rhs.m_descriptorUpdateTemplate );
std::swap( m_allocator, rhs.m_allocator );
std::swap( m_dispatcher, rhs.m_dispatcher );
}
return *this;
}
VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate const & operator*() const VULKAN_HPP_NOEXCEPT
{
return m_descriptorUpdateTemplate;
}
operator VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate() const VULKAN_HPP_NOEXCEPT
{
return m_descriptorUpdateTemplate;
}
void clear() VULKAN_HPP_NOEXCEPT
{
if ( m_descriptorUpdateTemplate )
{
getDispatcher()->vkDestroyDescriptorUpdateTemplate( static_cast<VkDevice>( m_device ),
static_cast<VkDescriptorUpdateTemplate>( m_descriptorUpdateTemplate ),
reinterpret_cast<const VkAllocationCallbacks *>( m_allocator ) );
}
m_device = nullptr;
m_descriptorUpdateTemplate = nullptr;
m_allocator = nullptr;
m_dispatcher = nullptr;
}
VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate release()
{
m_device = nullptr;
m_allocator = nullptr;
m_dispatcher = nullptr;
return VULKAN_HPP_NAMESPACE::exchange( m_descriptorUpdateTemplate, nullptr );
}
VULKAN_HPP_NAMESPACE::Device getDevice() const
{
return m_device;
}
VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::DeviceDispatcher const * getDispatcher() const
{
VULKAN_HPP_ASSERT( m_dispatcher->getVkHeaderVersion() == VK_HEADER_VERSION );
return m_dispatcher;
}
void swap( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::DescriptorUpdateTemplate & rhs ) VULKAN_HPP_NOEXCEPT
{
std::swap( m_device, rhs.m_device );
std::swap( m_descriptorUpdateTemplate, rhs.m_descriptorUpdateTemplate );
std::swap( m_allocator, rhs.m_allocator );
std::swap( m_dispatcher, rhs.m_dispatcher );
}
private:
VULKAN_HPP_NAMESPACE::Device m_device = {};
VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate m_descriptorUpdateTemplate = {};
const VULKAN_HPP_NAMESPACE::AllocationCallbacks * m_allocator = {};
VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::DeviceDispatcher const * m_dispatcher = nullptr;
};
class DeviceMemory
{
public:
using CType = VkDeviceMemory;
using CppType = VULKAN_HPP_NAMESPACE::DeviceMemory;
static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = VULKAN_HPP_NAMESPACE::ObjectType::eDeviceMemory;
static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType =
VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eDeviceMemory;
public:
# if !defined( VULKAN_HPP_RAII_NO_EXCEPTIONS )
DeviceMemory( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device,
VULKAN_HPP_NAMESPACE::MemoryAllocateInfo const & allocateInfo,
VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr )
{
*this = device.allocateMemory( allocateInfo, allocator );
}
# endif
DeviceMemory( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device,
VkDeviceMemory memory,
VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr )
: m_device( device )
, m_memory( memory )
, m_allocator( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) )
, m_dispatcher( device.getDispatcher() )
2022-03-09 10:20:05 +00:00
{
}
DeviceMemory( std::nullptr_t ) {}
~DeviceMemory()
{
clear();
}
DeviceMemory() = delete;
DeviceMemory( DeviceMemory const & ) = delete;
DeviceMemory( DeviceMemory && rhs ) VULKAN_HPP_NOEXCEPT
: m_device( VULKAN_HPP_NAMESPACE::exchange( rhs.m_device, {} ) )
, m_memory( VULKAN_HPP_NAMESPACE::exchange( rhs.m_memory, {} ) )
, m_allocator( VULKAN_HPP_NAMESPACE::exchange( rhs.m_allocator, {} ) )
, m_dispatcher( VULKAN_HPP_NAMESPACE::exchange( rhs.m_dispatcher, nullptr ) )
2022-03-09 10:20:05 +00:00
{
}
DeviceMemory & operator=( DeviceMemory const & ) = delete;
2022-12-06 00:11:19 +00:00
DeviceMemory & operator=( DeviceMemory && rhs ) VULKAN_HPP_NOEXCEPT
{
if ( this != &rhs )
{
std::swap( m_device, rhs.m_device );
std::swap( m_memory, rhs.m_memory );
std::swap( m_allocator, rhs.m_allocator );
std::swap( m_dispatcher, rhs.m_dispatcher );
}
return *this;
}
VULKAN_HPP_NAMESPACE::DeviceMemory const & operator*() const VULKAN_HPP_NOEXCEPT
{
return m_memory;
}
operator VULKAN_HPP_NAMESPACE::DeviceMemory() const VULKAN_HPP_NOEXCEPT
{
return m_memory;
}
void clear() VULKAN_HPP_NOEXCEPT
{
if ( m_memory )
{
getDispatcher()->vkFreeMemory(
static_cast<VkDevice>( m_device ), static_cast<VkDeviceMemory>( m_memory ), reinterpret_cast<const VkAllocationCallbacks *>( m_allocator ) );
}
m_device = nullptr;
m_memory = nullptr;
m_allocator = nullptr;
m_dispatcher = nullptr;
}
VULKAN_HPP_NAMESPACE::DeviceMemory release()
{
m_device = nullptr;
m_allocator = nullptr;
m_dispatcher = nullptr;
return VULKAN_HPP_NAMESPACE::exchange( m_memory, nullptr );
}
VULKAN_HPP_NAMESPACE::Device getDevice() const
{
return m_device;
}
VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::DeviceDispatcher const * getDispatcher() const
{
VULKAN_HPP_ASSERT( m_dispatcher->getVkHeaderVersion() == VK_HEADER_VERSION );
return m_dispatcher;
}
void swap( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::DeviceMemory & rhs ) VULKAN_HPP_NOEXCEPT
{
std::swap( m_device, rhs.m_device );
std::swap( m_memory, rhs.m_memory );
std::swap( m_allocator, rhs.m_allocator );
std::swap( m_dispatcher, rhs.m_dispatcher );
}
//=== VK_VERSION_1_0 ===
VULKAN_HPP_NODISCARD void * mapMemory( VULKAN_HPP_NAMESPACE::DeviceSize offset,
VULKAN_HPP_NAMESPACE::DeviceSize size,
VULKAN_HPP_NAMESPACE::MemoryMapFlags flags VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT ) const;
void unmapMemory() const VULKAN_HPP_NOEXCEPT;
VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::DeviceSize getCommitment() const VULKAN_HPP_NOEXCEPT;
# if defined( VK_USE_PLATFORM_WIN32_KHR )
//=== VK_NV_external_memory_win32 ===
VULKAN_HPP_NODISCARD HANDLE getMemoryWin32HandleNV( VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagsNV handleType ) const;
# endif /*VK_USE_PLATFORM_WIN32_KHR*/
2021-09-07 08:20:55 +00:00
//=== VK_EXT_pageable_device_local_memory ===
void setPriorityEXT( float priority ) const VULKAN_HPP_NOEXCEPT;
private:
VULKAN_HPP_NAMESPACE::Device m_device = {};
VULKAN_HPP_NAMESPACE::DeviceMemory m_memory = {};
const VULKAN_HPP_NAMESPACE::AllocationCallbacks * m_allocator = {};
VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::DeviceDispatcher const * m_dispatcher = nullptr;
};
class DisplayKHR
{
public:
using CType = VkDisplayKHR;
using CppType = VULKAN_HPP_NAMESPACE::DisplayKHR;
static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = VULKAN_HPP_NAMESPACE::ObjectType::eDisplayKHR;
static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType =
VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eDisplayKHR;
public:
# if !defined( VULKAN_HPP_RAII_NO_EXCEPTIONS )
DisplayKHR( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::PhysicalDevice const & physicalDevice, int32_t drmFd, uint32_t connectorId )
2021-06-22 07:40:12 +00:00
{
*this = physicalDevice.getDrmDisplayEXT( drmFd, connectorId );
2021-06-22 07:40:12 +00:00
}
# endif
2021-06-22 07:40:12 +00:00
# if !defined( VULKAN_HPP_RAII_NO_EXCEPTIONS )
# if defined( VK_USE_PLATFORM_XLIB_XRANDR_EXT )
DisplayKHR( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::PhysicalDevice const & physicalDevice, Display & dpy, RROutput rrOutput )
{
*this = physicalDevice.getRandROutputDisplayEXT( dpy, rrOutput );
}
# endif /*VK_USE_PLATFORM_XLIB_XRANDR_EXT*/
# endif
# if !defined( VULKAN_HPP_RAII_NO_EXCEPTIONS )
# if defined( VK_USE_PLATFORM_WIN32_KHR )
DisplayKHR( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::PhysicalDevice const & physicalDevice, uint32_t deviceRelativeId )
{
*this = physicalDevice.getWinrtDisplayNV( deviceRelativeId );
}
# endif /*VK_USE_PLATFORM_WIN32_KHR*/
# endif
DisplayKHR( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::PhysicalDevice const & physicalDevice, VkDisplayKHR display )
: m_physicalDevice( physicalDevice ), m_display( display ), m_dispatcher( physicalDevice.getDispatcher() )
2022-03-09 10:20:05 +00:00
{
}
DisplayKHR( std::nullptr_t ) {}
~DisplayKHR()
{
clear();
}
DisplayKHR() = delete;
DisplayKHR( DisplayKHR const & ) = delete;
DisplayKHR( DisplayKHR && rhs ) VULKAN_HPP_NOEXCEPT
: m_physicalDevice( VULKAN_HPP_NAMESPACE::exchange( rhs.m_physicalDevice, {} ) )
, m_display( VULKAN_HPP_NAMESPACE::exchange( rhs.m_display, {} ) )
, m_dispatcher( VULKAN_HPP_NAMESPACE::exchange( rhs.m_dispatcher, nullptr ) )
2022-03-09 10:20:05 +00:00
{
}
DisplayKHR & operator=( DisplayKHR const & ) = delete;
2022-12-06 00:11:19 +00:00
DisplayKHR & operator=( DisplayKHR && rhs ) VULKAN_HPP_NOEXCEPT
{
if ( this != &rhs )
{
std::swap( m_physicalDevice, rhs.m_physicalDevice );
std::swap( m_display, rhs.m_display );
std::swap( m_dispatcher, rhs.m_dispatcher );
}
return *this;
}
VULKAN_HPP_NAMESPACE::DisplayKHR const & operator*() const VULKAN_HPP_NOEXCEPT
{
return m_display;
}
operator VULKAN_HPP_NAMESPACE::DisplayKHR() const VULKAN_HPP_NOEXCEPT
{
return m_display;
}
void clear() VULKAN_HPP_NOEXCEPT
{
if ( m_display )
{
getDispatcher()->vkReleaseDisplayEXT( static_cast<VkPhysicalDevice>( m_physicalDevice ), static_cast<VkDisplayKHR>( m_display ) );
}
m_physicalDevice = nullptr;
m_display = nullptr;
m_dispatcher = nullptr;
}
VULKAN_HPP_NAMESPACE::DisplayKHR release()
{
m_physicalDevice = nullptr;
m_dispatcher = nullptr;
return VULKAN_HPP_NAMESPACE::exchange( m_display, nullptr );
}
VULKAN_HPP_NAMESPACE::PhysicalDevice getPhysicalDevice() const
{
return m_physicalDevice;
}
VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::InstanceDispatcher const * getDispatcher() const
{
VULKAN_HPP_ASSERT( m_dispatcher->getVkHeaderVersion() == VK_HEADER_VERSION );
return m_dispatcher;
}
void swap( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::DisplayKHR & rhs ) VULKAN_HPP_NOEXCEPT
{
std::swap( m_physicalDevice, rhs.m_physicalDevice );
std::swap( m_display, rhs.m_display );
std::swap( m_dispatcher, rhs.m_dispatcher );
}
//=== VK_KHR_display ===
VULKAN_HPP_NODISCARD std::vector<VULKAN_HPP_NAMESPACE::DisplayModePropertiesKHR> getModeProperties() const;
VULKAN_HPP_NODISCARD
VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::CreateReturnType<VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::DisplayModeKHR>::Type
createMode( VULKAN_HPP_NAMESPACE::DisplayModeCreateInfoKHR const & createInfo,
VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr ) const VULKAN_HPP_RAII_CREATE_NOEXCEPT;
//=== VK_KHR_get_display_properties2 ===
VULKAN_HPP_NODISCARD std::vector<VULKAN_HPP_NAMESPACE::DisplayModeProperties2KHR> getModeProperties2() const;
# if defined( VK_USE_PLATFORM_WIN32_KHR )
//=== VK_NV_acquire_winrt_display ===
void acquireWinrtNV() const;
# endif /*VK_USE_PLATFORM_WIN32_KHR*/
private:
VULKAN_HPP_NAMESPACE::PhysicalDevice m_physicalDevice = {};
VULKAN_HPP_NAMESPACE::DisplayKHR m_display = {};
VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::InstanceDispatcher const * m_dispatcher = nullptr;
};
class DisplayKHRs : public std::vector<VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::DisplayKHR>
{
public:
# if !defined( VULKAN_HPP_RAII_NO_EXCEPTIONS )
DisplayKHRs( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::PhysicalDevice const & physicalDevice, uint32_t planeIndex )
{
*this = physicalDevice.getDisplayPlaneSupportedDisplaysKHR( planeIndex );
}
# endif
DisplayKHRs( std::nullptr_t ) {}
2022-12-06 00:11:19 +00:00
DisplayKHRs() = delete;
DisplayKHRs( DisplayKHRs const & ) = delete;
DisplayKHRs( DisplayKHRs && rhs ) = default;
DisplayKHRs & operator=( DisplayKHRs const & ) = delete;
2022-12-06 00:11:19 +00:00
DisplayKHRs & operator=( DisplayKHRs && rhs ) = default;
private:
DisplayKHRs( std::vector<VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::DisplayKHR> && rhs )
{
std::swap( *this, rhs );
}
};
class DisplayModeKHR
{
public:
using CType = VkDisplayModeKHR;
using CppType = VULKAN_HPP_NAMESPACE::DisplayModeKHR;
static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = VULKAN_HPP_NAMESPACE::ObjectType::eDisplayModeKHR;
static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType =
VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eDisplayModeKHR;
public:
# if !defined( VULKAN_HPP_RAII_NO_EXCEPTIONS )
DisplayModeKHR( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::DisplayKHR const & display,
VULKAN_HPP_NAMESPACE::DisplayModeCreateInfoKHR const & createInfo,
VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr )
{
*this = display.createMode( createInfo, allocator );
}
# endif
DisplayModeKHR( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::DisplayKHR const & display, VkDisplayModeKHR displayModeKHR )
: m_physicalDevice( display.getPhysicalDevice() ), m_displayModeKHR( displayModeKHR ), m_dispatcher( display.getDispatcher() )
2022-03-09 10:20:05 +00:00
{
}
DisplayModeKHR( std::nullptr_t ) {}
~DisplayModeKHR()
{
clear();
}
DisplayModeKHR() = delete;
DisplayModeKHR( DisplayModeKHR const & rhs ) : m_displayModeKHR( rhs.m_displayModeKHR ), m_dispatcher( rhs.m_dispatcher ) {}
DisplayModeKHR( DisplayModeKHR && rhs ) VULKAN_HPP_NOEXCEPT
: m_physicalDevice( VULKAN_HPP_NAMESPACE::exchange( rhs.m_physicalDevice, {} ) )
, m_displayModeKHR( VULKAN_HPP_NAMESPACE::exchange( rhs.m_displayModeKHR, {} ) )
, m_dispatcher( VULKAN_HPP_NAMESPACE::exchange( rhs.m_dispatcher, nullptr ) )
2022-03-09 10:20:05 +00:00
{
}
DisplayModeKHR & operator=( DisplayModeKHR const & rhs )
{
m_displayModeKHR = rhs.m_displayModeKHR;
m_dispatcher = rhs.m_dispatcher;
return *this;
}
DisplayModeKHR & operator=( DisplayModeKHR && rhs ) VULKAN_HPP_NOEXCEPT
{
if ( this != &rhs )
{
std::swap( m_physicalDevice, rhs.m_physicalDevice );
std::swap( m_displayModeKHR, rhs.m_displayModeKHR );
std::swap( m_dispatcher, rhs.m_dispatcher );
}
return *this;
}
VULKAN_HPP_NAMESPACE::DisplayModeKHR const & operator*() const VULKAN_HPP_NOEXCEPT
{
return m_displayModeKHR;
}
operator VULKAN_HPP_NAMESPACE::DisplayModeKHR() const VULKAN_HPP_NOEXCEPT
{
return m_displayModeKHR;
}
void clear() VULKAN_HPP_NOEXCEPT
{
m_physicalDevice = nullptr;
m_displayModeKHR = nullptr;
m_dispatcher = nullptr;
}
VULKAN_HPP_NAMESPACE::DisplayModeKHR release()
{
m_physicalDevice = nullptr;
m_dispatcher = nullptr;
return VULKAN_HPP_NAMESPACE::exchange( m_displayModeKHR, nullptr );
}
VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::InstanceDispatcher const * getDispatcher() const
{
VULKAN_HPP_ASSERT( m_dispatcher->getVkHeaderVersion() == VK_HEADER_VERSION );
return m_dispatcher;
}
void swap( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::DisplayModeKHR & rhs ) VULKAN_HPP_NOEXCEPT
{
std::swap( m_physicalDevice, rhs.m_physicalDevice );
std::swap( m_displayModeKHR, rhs.m_displayModeKHR );
std::swap( m_dispatcher, rhs.m_dispatcher );
}
//=== VK_KHR_display ===
VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::DisplayPlaneCapabilitiesKHR getDisplayPlaneCapabilities( uint32_t planeIndex ) const;
private:
VULKAN_HPP_NAMESPACE::PhysicalDevice m_physicalDevice = {};
VULKAN_HPP_NAMESPACE::DisplayModeKHR m_displayModeKHR = {};
VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::InstanceDispatcher const * m_dispatcher = nullptr;
};
class Event
{
public:
using CType = VkEvent;
using CppType = VULKAN_HPP_NAMESPACE::Event;
static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = VULKAN_HPP_NAMESPACE::ObjectType::eEvent;
static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType =
VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eEvent;
public:
# if !defined( VULKAN_HPP_RAII_NO_EXCEPTIONS )
Event( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device,
VULKAN_HPP_NAMESPACE::EventCreateInfo const & createInfo,
VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr )
{
*this = device.createEvent( createInfo, allocator );
}
# endif
Event( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device,
VkEvent event,
VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr )
: m_device( device )
, m_event( event )
, m_allocator( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) )
, m_dispatcher( device.getDispatcher() )
2022-03-09 10:20:05 +00:00
{
}
Event( std::nullptr_t ) {}
~Event()
{
clear();
}
Event() = delete;
Event( Event const & ) = delete;
Event( Event && rhs ) VULKAN_HPP_NOEXCEPT
: m_device( VULKAN_HPP_NAMESPACE::exchange( rhs.m_device, {} ) )
, m_event( VULKAN_HPP_NAMESPACE::exchange( rhs.m_event, {} ) )
, m_allocator( VULKAN_HPP_NAMESPACE::exchange( rhs.m_allocator, {} ) )
, m_dispatcher( VULKAN_HPP_NAMESPACE::exchange( rhs.m_dispatcher, nullptr ) )
2022-03-09 10:20:05 +00:00
{
}
Event & operator=( Event const & ) = delete;
2022-12-06 00:11:19 +00:00
Event & operator=( Event && rhs ) VULKAN_HPP_NOEXCEPT
{
if ( this != &rhs )
{
std::swap( m_device, rhs.m_device );
std::swap( m_event, rhs.m_event );
std::swap( m_allocator, rhs.m_allocator );
std::swap( m_dispatcher, rhs.m_dispatcher );
}
return *this;
}
VULKAN_HPP_NAMESPACE::Event const & operator*() const VULKAN_HPP_NOEXCEPT
{
return m_event;
}
operator VULKAN_HPP_NAMESPACE::Event() const VULKAN_HPP_NOEXCEPT
{
return m_event;
}
void clear() VULKAN_HPP_NOEXCEPT
{
if ( m_event )
{
getDispatcher()->vkDestroyEvent(
static_cast<VkDevice>( m_device ), static_cast<VkEvent>( m_event ), reinterpret_cast<const VkAllocationCallbacks *>( m_allocator ) );
}
m_device = nullptr;
m_event = nullptr;
m_allocator = nullptr;
m_dispatcher = nullptr;
}
VULKAN_HPP_NAMESPACE::Event release()
{
m_device = nullptr;
m_allocator = nullptr;
m_dispatcher = nullptr;
return VULKAN_HPP_NAMESPACE::exchange( m_event, nullptr );
}
VULKAN_HPP_NAMESPACE::Device getDevice() const
{
return m_device;
}
VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::DeviceDispatcher const * getDispatcher() const
{
VULKAN_HPP_ASSERT( m_dispatcher->getVkHeaderVersion() == VK_HEADER_VERSION );
return m_dispatcher;
}
void swap( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Event & rhs ) VULKAN_HPP_NOEXCEPT
{
std::swap( m_device, rhs.m_device );
std::swap( m_event, rhs.m_event );
std::swap( m_allocator, rhs.m_allocator );
std::swap( m_dispatcher, rhs.m_dispatcher );
}
//=== VK_VERSION_1_0 ===
VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::Result getStatus() const;
void set() const;
void reset() const;
private:
VULKAN_HPP_NAMESPACE::Device m_device = {};
VULKAN_HPP_NAMESPACE::Event m_event = {};
const VULKAN_HPP_NAMESPACE::AllocationCallbacks * m_allocator = {};
VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::DeviceDispatcher const * m_dispatcher = nullptr;
};
class Fence
{
public:
using CType = VkFence;
using CppType = VULKAN_HPP_NAMESPACE::Fence;
static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = VULKAN_HPP_NAMESPACE::ObjectType::eFence;
static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType =
VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eFence;
public:
# if !defined( VULKAN_HPP_RAII_NO_EXCEPTIONS )
Fence( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device,
VULKAN_HPP_NAMESPACE::FenceCreateInfo const & createInfo,
VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr )
{
*this = device.createFence( createInfo, allocator );
}
# endif
# if !defined( VULKAN_HPP_RAII_NO_EXCEPTIONS )
Fence( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device,
VULKAN_HPP_NAMESPACE::DeviceEventInfoEXT const & deviceEventInfo,
VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr )
{
*this = device.registerEventEXT( deviceEventInfo, allocator );
}
# endif
# if !defined( VULKAN_HPP_RAII_NO_EXCEPTIONS )
Fence( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device,
VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::DisplayKHR const & display,
VULKAN_HPP_NAMESPACE::DisplayEventInfoEXT const & displayEventInfo,
VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr )
{
*this = device.registerDisplayEventEXT( display, displayEventInfo, allocator );
}
# endif
Fence( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device,
VkFence fence,
VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr )
: m_device( device )
, m_fence( fence )
, m_allocator( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) )
, m_dispatcher( device.getDispatcher() )
2022-03-09 10:20:05 +00:00
{
}
Fence( std::nullptr_t ) {}
~Fence()
{
clear();
}
Fence() = delete;
Fence( Fence const & ) = delete;
Fence( Fence && rhs ) VULKAN_HPP_NOEXCEPT
: m_device( VULKAN_HPP_NAMESPACE::exchange( rhs.m_device, {} ) )
, m_fence( VULKAN_HPP_NAMESPACE::exchange( rhs.m_fence, {} ) )
, m_allocator( VULKAN_HPP_NAMESPACE::exchange( rhs.m_allocator, {} ) )
, m_dispatcher( VULKAN_HPP_NAMESPACE::exchange( rhs.m_dispatcher, nullptr ) )
2022-03-09 10:20:05 +00:00
{
}
Fence & operator=( Fence const & ) = delete;
2022-12-06 00:11:19 +00:00
Fence & operator=( Fence && rhs ) VULKAN_HPP_NOEXCEPT
{
if ( this != &rhs )
{
std::swap( m_device, rhs.m_device );
std::swap( m_fence, rhs.m_fence );
std::swap( m_allocator, rhs.m_allocator );
std::swap( m_dispatcher, rhs.m_dispatcher );
}
return *this;
}
VULKAN_HPP_NAMESPACE::Fence const & operator*() const VULKAN_HPP_NOEXCEPT
{
return m_fence;
}
operator VULKAN_HPP_NAMESPACE::Fence() const VULKAN_HPP_NOEXCEPT
{
return m_fence;
}
void clear() VULKAN_HPP_NOEXCEPT
{
if ( m_fence )
{
getDispatcher()->vkDestroyFence(
static_cast<VkDevice>( m_device ), static_cast<VkFence>( m_fence ), reinterpret_cast<const VkAllocationCallbacks *>( m_allocator ) );
}
m_device = nullptr;
m_fence = nullptr;
m_allocator = nullptr;
m_dispatcher = nullptr;
}
VULKAN_HPP_NAMESPACE::Fence release()
{
m_device = nullptr;
m_allocator = nullptr;
m_dispatcher = nullptr;
return VULKAN_HPP_NAMESPACE::exchange( m_fence, nullptr );
}
VULKAN_HPP_NAMESPACE::Device getDevice() const
{
return m_device;
}
VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::DeviceDispatcher const * getDispatcher() const
{
VULKAN_HPP_ASSERT( m_dispatcher->getVkHeaderVersion() == VK_HEADER_VERSION );
return m_dispatcher;
}
void swap( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Fence & rhs ) VULKAN_HPP_NOEXCEPT
{
std::swap( m_device, rhs.m_device );
std::swap( m_fence, rhs.m_fence );
std::swap( m_allocator, rhs.m_allocator );
std::swap( m_dispatcher, rhs.m_dispatcher );
}
//=== VK_VERSION_1_0 ===
VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::Result getStatus() const;
private:
VULKAN_HPP_NAMESPACE::Device m_device = {};
VULKAN_HPP_NAMESPACE::Fence m_fence = {};
const VULKAN_HPP_NAMESPACE::AllocationCallbacks * m_allocator = {};
VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::DeviceDispatcher const * m_dispatcher = nullptr;
};
class Framebuffer
{
public:
using CType = VkFramebuffer;
using CppType = VULKAN_HPP_NAMESPACE::Framebuffer;
static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = VULKAN_HPP_NAMESPACE::ObjectType::eFramebuffer;
static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType =
VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eFramebuffer;
public:
# if !defined( VULKAN_HPP_RAII_NO_EXCEPTIONS )
Framebuffer( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device,
VULKAN_HPP_NAMESPACE::FramebufferCreateInfo const & createInfo,
VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr )
{
*this = device.createFramebuffer( createInfo, allocator );
}
# endif
Framebuffer( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device,
VkFramebuffer framebuffer,
VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr )
: m_device( device )
, m_framebuffer( framebuffer )
, m_allocator( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) )
, m_dispatcher( device.getDispatcher() )
2022-03-09 10:20:05 +00:00
{
}
Framebuffer( std::nullptr_t ) {}
~Framebuffer()
{
clear();
}
Framebuffer() = delete;
Framebuffer( Framebuffer const & ) = delete;
Framebuffer( Framebuffer && rhs ) VULKAN_HPP_NOEXCEPT
: m_device( VULKAN_HPP_NAMESPACE::exchange( rhs.m_device, {} ) )
, m_framebuffer( VULKAN_HPP_NAMESPACE::exchange( rhs.m_framebuffer, {} ) )
, m_allocator( VULKAN_HPP_NAMESPACE::exchange( rhs.m_allocator, {} ) )
, m_dispatcher( VULKAN_HPP_NAMESPACE::exchange( rhs.m_dispatcher, nullptr ) )
2022-03-09 10:20:05 +00:00
{
}
Framebuffer & operator=( Framebuffer const & ) = delete;
2022-12-06 00:11:19 +00:00
Framebuffer & operator=( Framebuffer && rhs ) VULKAN_HPP_NOEXCEPT
{
if ( this != &rhs )
{
std::swap( m_device, rhs.m_device );
std::swap( m_framebuffer, rhs.m_framebuffer );
std::swap( m_allocator, rhs.m_allocator );
std::swap( m_dispatcher, rhs.m_dispatcher );
}
return *this;
}
VULKAN_HPP_NAMESPACE::Framebuffer const & operator*() const VULKAN_HPP_NOEXCEPT
{
return m_framebuffer;
}
operator VULKAN_HPP_NAMESPACE::Framebuffer() const VULKAN_HPP_NOEXCEPT
{
return m_framebuffer;
}
void clear() VULKAN_HPP_NOEXCEPT
{
if ( m_framebuffer )
{
getDispatcher()->vkDestroyFramebuffer(
static_cast<VkDevice>( m_device ), static_cast<VkFramebuffer>( m_framebuffer ), reinterpret_cast<const VkAllocationCallbacks *>( m_allocator ) );
}
m_device = nullptr;
m_framebuffer = nullptr;
m_allocator = nullptr;
m_dispatcher = nullptr;
}
VULKAN_HPP_NAMESPACE::Framebuffer release()
{
m_device = nullptr;
m_allocator = nullptr;
m_dispatcher = nullptr;
return VULKAN_HPP_NAMESPACE::exchange( m_framebuffer, nullptr );
}
VULKAN_HPP_NAMESPACE::Device getDevice() const
{
return m_device;
}
VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::DeviceDispatcher const * getDispatcher() const
{
VULKAN_HPP_ASSERT( m_dispatcher->getVkHeaderVersion() == VK_HEADER_VERSION );
return m_dispatcher;
}
void swap( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Framebuffer & rhs ) VULKAN_HPP_NOEXCEPT
{
std::swap( m_device, rhs.m_device );
std::swap( m_framebuffer, rhs.m_framebuffer );
std::swap( m_allocator, rhs.m_allocator );
std::swap( m_dispatcher, rhs.m_dispatcher );
}
2022-07-22 00:12:13 +00:00
//=== VK_QCOM_tile_properties ===
VULKAN_HPP_NODISCARD std::vector<VULKAN_HPP_NAMESPACE::TilePropertiesQCOM> getTilePropertiesQCOM() const;
private:
VULKAN_HPP_NAMESPACE::Device m_device = {};
VULKAN_HPP_NAMESPACE::Framebuffer m_framebuffer = {};
const VULKAN_HPP_NAMESPACE::AllocationCallbacks * m_allocator = {};
VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::DeviceDispatcher const * m_dispatcher = nullptr;
};
class Image
{
public:
using CType = VkImage;
using CppType = VULKAN_HPP_NAMESPACE::Image;
static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = VULKAN_HPP_NAMESPACE::ObjectType::eImage;
static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType =
VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eImage;
public:
# if !defined( VULKAN_HPP_RAII_NO_EXCEPTIONS )
Image( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device,
VULKAN_HPP_NAMESPACE::ImageCreateInfo const & createInfo,
VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr )
{
*this = device.createImage( createInfo, allocator );
}
# endif
Image( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device,
VkImage image,
VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr )
: m_device( device )
, m_image( image )
, m_allocator( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) )
, m_dispatcher( device.getDispatcher() )
2022-03-09 10:20:05 +00:00
{
}
Image( std::nullptr_t ) {}
~Image()
{
clear();
}
Image() = delete;
Image( Image const & ) = delete;
Image( Image && rhs ) VULKAN_HPP_NOEXCEPT
: m_device( VULKAN_HPP_NAMESPACE::exchange( rhs.m_device, {} ) )
, m_image( VULKAN_HPP_NAMESPACE::exchange( rhs.m_image, {} ) )
, m_allocator( VULKAN_HPP_NAMESPACE::exchange( rhs.m_allocator, {} ) )
, m_dispatcher( VULKAN_HPP_NAMESPACE::exchange( rhs.m_dispatcher, nullptr ) )
2022-03-09 10:20:05 +00:00
{
}
Image & operator=( Image const & ) = delete;
2022-12-06 00:11:19 +00:00
Image & operator=( Image && rhs ) VULKAN_HPP_NOEXCEPT
{
if ( this != &rhs )
{
std::swap( m_device, rhs.m_device );
std::swap( m_image, rhs.m_image );
std::swap( m_allocator, rhs.m_allocator );
std::swap( m_dispatcher, rhs.m_dispatcher );
}
return *this;
}
VULKAN_HPP_NAMESPACE::Image const & operator*() const VULKAN_HPP_NOEXCEPT
{
return m_image;
}
operator VULKAN_HPP_NAMESPACE::Image() const VULKAN_HPP_NOEXCEPT
{
return m_image;
}
void clear() VULKAN_HPP_NOEXCEPT
{
if ( m_image )
{
getDispatcher()->vkDestroyImage(
static_cast<VkDevice>( m_device ), static_cast<VkImage>( m_image ), reinterpret_cast<const VkAllocationCallbacks *>( m_allocator ) );
}
m_device = nullptr;
m_image = nullptr;
m_allocator = nullptr;
m_dispatcher = nullptr;
}
VULKAN_HPP_NAMESPACE::Image release()
{
m_device = nullptr;
m_allocator = nullptr;
m_dispatcher = nullptr;
return VULKAN_HPP_NAMESPACE::exchange( m_image, nullptr );
}
VULKAN_HPP_NAMESPACE::Device getDevice() const
{
return m_device;
}
VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::DeviceDispatcher const * getDispatcher() const
{
VULKAN_HPP_ASSERT( m_dispatcher->getVkHeaderVersion() == VK_HEADER_VERSION );
return m_dispatcher;
}
void swap( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Image & rhs ) VULKAN_HPP_NOEXCEPT
{
std::swap( m_device, rhs.m_device );
std::swap( m_image, rhs.m_image );
std::swap( m_allocator, rhs.m_allocator );
std::swap( m_dispatcher, rhs.m_dispatcher );
}
//=== VK_VERSION_1_0 ===
void bindMemory( VULKAN_HPP_NAMESPACE::DeviceMemory memory, VULKAN_HPP_NAMESPACE::DeviceSize memoryOffset ) const;
VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::MemoryRequirements getMemoryRequirements() const VULKAN_HPP_NOEXCEPT;
VULKAN_HPP_NODISCARD std::vector<VULKAN_HPP_NAMESPACE::SparseImageMemoryRequirements> getSparseMemoryRequirements() const;
VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::SubresourceLayout
getSubresourceLayout( const VULKAN_HPP_NAMESPACE::ImageSubresource & subresource ) const VULKAN_HPP_NOEXCEPT;
//=== VK_EXT_image_drm_format_modifier ===
VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::ImageDrmFormatModifierPropertiesEXT getDrmFormatModifierPropertiesEXT() const;
//=== VK_EXT_host_image_copy ===
2022-05-11 00:09:28 +00:00
VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::SubresourceLayout2KHR
getSubresourceLayout2EXT( const VULKAN_HPP_NAMESPACE::ImageSubresource2KHR & subresource ) const VULKAN_HPP_NOEXCEPT;
template <typename X, typename Y, typename... Z>
VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::StructureChain<X, Y, Z...>
getSubresourceLayout2EXT( const VULKAN_HPP_NAMESPACE::ImageSubresource2KHR & subresource ) const VULKAN_HPP_NOEXCEPT;
//=== VK_KHR_maintenance5 ===
VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::SubresourceLayout2KHR
getSubresourceLayout2KHR( const VULKAN_HPP_NAMESPACE::ImageSubresource2KHR & subresource ) const VULKAN_HPP_NOEXCEPT;
2022-05-11 00:09:28 +00:00
template <typename X, typename Y, typename... Z>
VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::StructureChain<X, Y, Z...>
getSubresourceLayout2KHR( const VULKAN_HPP_NAMESPACE::ImageSubresource2KHR & subresource ) const VULKAN_HPP_NOEXCEPT;
2022-05-11 00:09:28 +00:00
private:
VULKAN_HPP_NAMESPACE::Device m_device = {};
VULKAN_HPP_NAMESPACE::Image m_image = {};
const VULKAN_HPP_NAMESPACE::AllocationCallbacks * m_allocator = {};
VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::DeviceDispatcher const * m_dispatcher = nullptr;
};
class ImageView
{
public:
using CType = VkImageView;
using CppType = VULKAN_HPP_NAMESPACE::ImageView;
static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = VULKAN_HPP_NAMESPACE::ObjectType::eImageView;
static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType =
VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eImageView;
public:
# if !defined( VULKAN_HPP_RAII_NO_EXCEPTIONS )
ImageView( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device,
VULKAN_HPP_NAMESPACE::ImageViewCreateInfo const & createInfo,
VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr )
{
*this = device.createImageView( createInfo, allocator );
}
# endif
ImageView( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device,
VkImageView imageView,
VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr )
: m_device( device )
, m_imageView( imageView )
, m_allocator( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) )
, m_dispatcher( device.getDispatcher() )
2022-03-09 10:20:05 +00:00
{
}
ImageView( std::nullptr_t ) {}
~ImageView()
{
clear();
}
ImageView() = delete;
ImageView( ImageView const & ) = delete;
ImageView( ImageView && rhs ) VULKAN_HPP_NOEXCEPT
: m_device( VULKAN_HPP_NAMESPACE::exchange( rhs.m_device, {} ) )
, m_imageView( VULKAN_HPP_NAMESPACE::exchange( rhs.m_imageView, {} ) )
, m_allocator( VULKAN_HPP_NAMESPACE::exchange( rhs.m_allocator, {} ) )
, m_dispatcher( VULKAN_HPP_NAMESPACE::exchange( rhs.m_dispatcher, nullptr ) )
2022-03-09 10:20:05 +00:00
{
}
ImageView & operator=( ImageView const & ) = delete;
2022-12-06 00:11:19 +00:00
ImageView & operator=( ImageView && rhs ) VULKAN_HPP_NOEXCEPT
{
if ( this != &rhs )
{
std::swap( m_device, rhs.m_device );
std::swap( m_imageView, rhs.m_imageView );
std::swap( m_allocator, rhs.m_allocator );
std::swap( m_dispatcher, rhs.m_dispatcher );
}
return *this;
}
VULKAN_HPP_NAMESPACE::ImageView const & operator*() const VULKAN_HPP_NOEXCEPT
{
return m_imageView;
}
operator VULKAN_HPP_NAMESPACE::ImageView() const VULKAN_HPP_NOEXCEPT
{
return m_imageView;
}
void clear() VULKAN_HPP_NOEXCEPT
{
if ( m_imageView )
{
getDispatcher()->vkDestroyImageView(
static_cast<VkDevice>( m_device ), static_cast<VkImageView>( m_imageView ), reinterpret_cast<const VkAllocationCallbacks *>( m_allocator ) );
}
m_device = nullptr;
m_imageView = nullptr;
m_allocator = nullptr;
m_dispatcher = nullptr;
}
VULKAN_HPP_NAMESPACE::ImageView release()
{
m_device = nullptr;
m_allocator = nullptr;
m_dispatcher = nullptr;
return VULKAN_HPP_NAMESPACE::exchange( m_imageView, nullptr );
}
VULKAN_HPP_NAMESPACE::Device getDevice() const
{
return m_device;
}
VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::DeviceDispatcher const * getDispatcher() const
{
VULKAN_HPP_ASSERT( m_dispatcher->getVkHeaderVersion() == VK_HEADER_VERSION );
return m_dispatcher;
}
void swap( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::ImageView & rhs ) VULKAN_HPP_NOEXCEPT
{
std::swap( m_device, rhs.m_device );
std::swap( m_imageView, rhs.m_imageView );
std::swap( m_allocator, rhs.m_allocator );
std::swap( m_dispatcher, rhs.m_dispatcher );
}
//=== VK_NVX_image_view_handle ===
VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::ImageViewAddressPropertiesNVX getAddressNVX() const;
private:
VULKAN_HPP_NAMESPACE::Device m_device = {};
VULKAN_HPP_NAMESPACE::ImageView m_imageView = {};
const VULKAN_HPP_NAMESPACE::AllocationCallbacks * m_allocator = {};
VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::DeviceDispatcher const * m_dispatcher = nullptr;
};
2024-10-05 00:12:51 +00:00
class IndirectCommandsLayoutEXT
{
public:
using CType = VkIndirectCommandsLayoutEXT;
using CppType = VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutEXT;
static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = VULKAN_HPP_NAMESPACE::ObjectType::eIndirectCommandsLayoutEXT;
static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType =
VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eUnknown;
public:
# if !defined( VULKAN_HPP_RAII_NO_EXCEPTIONS )
IndirectCommandsLayoutEXT( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device,
VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutCreateInfoEXT const & createInfo,
VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr )
{
*this = device.createIndirectCommandsLayoutEXT( createInfo, allocator );
}
# endif
IndirectCommandsLayoutEXT( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device,
VkIndirectCommandsLayoutEXT indirectCommandsLayout,
VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr )
: m_device( device )
, m_indirectCommandsLayout( indirectCommandsLayout )
, m_allocator( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) )
, m_dispatcher( device.getDispatcher() )
{
}
IndirectCommandsLayoutEXT( std::nullptr_t ) {}
~IndirectCommandsLayoutEXT()
{
clear();
}
IndirectCommandsLayoutEXT() = delete;
IndirectCommandsLayoutEXT( IndirectCommandsLayoutEXT const & ) = delete;
IndirectCommandsLayoutEXT( IndirectCommandsLayoutEXT && rhs ) VULKAN_HPP_NOEXCEPT
: m_device( VULKAN_HPP_NAMESPACE::exchange( rhs.m_device, {} ) )
, m_indirectCommandsLayout( VULKAN_HPP_NAMESPACE::exchange( rhs.m_indirectCommandsLayout, {} ) )
, m_allocator( VULKAN_HPP_NAMESPACE::exchange( rhs.m_allocator, {} ) )
, m_dispatcher( VULKAN_HPP_NAMESPACE::exchange( rhs.m_dispatcher, nullptr ) )
{
}
IndirectCommandsLayoutEXT & operator=( IndirectCommandsLayoutEXT const & ) = delete;
IndirectCommandsLayoutEXT & operator=( IndirectCommandsLayoutEXT && rhs ) VULKAN_HPP_NOEXCEPT
{
if ( this != &rhs )
{
std::swap( m_device, rhs.m_device );
std::swap( m_indirectCommandsLayout, rhs.m_indirectCommandsLayout );
std::swap( m_allocator, rhs.m_allocator );
std::swap( m_dispatcher, rhs.m_dispatcher );
}
return *this;
}
VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutEXT const & operator*() const VULKAN_HPP_NOEXCEPT
{
return m_indirectCommandsLayout;
}
operator VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutEXT() const VULKAN_HPP_NOEXCEPT
{
return m_indirectCommandsLayout;
}
void clear() VULKAN_HPP_NOEXCEPT
{
if ( m_indirectCommandsLayout )
{
getDispatcher()->vkDestroyIndirectCommandsLayoutEXT( static_cast<VkDevice>( m_device ),
static_cast<VkIndirectCommandsLayoutEXT>( m_indirectCommandsLayout ),
reinterpret_cast<const VkAllocationCallbacks *>( m_allocator ) );
}
m_device = nullptr;
m_indirectCommandsLayout = nullptr;
m_allocator = nullptr;
m_dispatcher = nullptr;
}
VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutEXT release()
{
m_device = nullptr;
m_allocator = nullptr;
m_dispatcher = nullptr;
return VULKAN_HPP_NAMESPACE::exchange( m_indirectCommandsLayout, nullptr );
}
VULKAN_HPP_NAMESPACE::Device getDevice() const
{
return m_device;
}
VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::DeviceDispatcher const * getDispatcher() const
2024-10-05 00:12:51 +00:00
{
VULKAN_HPP_ASSERT( m_dispatcher->getVkHeaderVersion() == VK_HEADER_VERSION );
return m_dispatcher;
}
void swap( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::IndirectCommandsLayoutEXT & rhs ) VULKAN_HPP_NOEXCEPT
{
std::swap( m_device, rhs.m_device );
std::swap( m_indirectCommandsLayout, rhs.m_indirectCommandsLayout );
std::swap( m_allocator, rhs.m_allocator );
std::swap( m_dispatcher, rhs.m_dispatcher );
}
private:
VULKAN_HPP_NAMESPACE::Device m_device = {};
VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutEXT m_indirectCommandsLayout = {};
const VULKAN_HPP_NAMESPACE::AllocationCallbacks * m_allocator = {};
VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::DeviceDispatcher const * m_dispatcher = nullptr;
2024-10-05 00:12:51 +00:00
};
class IndirectCommandsLayoutNV
{
public:
using CType = VkIndirectCommandsLayoutNV;
using CppType = VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutNV;
static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = VULKAN_HPP_NAMESPACE::ObjectType::eIndirectCommandsLayoutNV;
static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType =
VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eUnknown;
public:
# if !defined( VULKAN_HPP_RAII_NO_EXCEPTIONS )
IndirectCommandsLayoutNV( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device,
VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutCreateInfoNV const & createInfo,
VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr )
{
*this = device.createIndirectCommandsLayoutNV( createInfo, allocator );
}
# endif
IndirectCommandsLayoutNV( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device,
VkIndirectCommandsLayoutNV indirectCommandsLayout,
VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr )
: m_device( device )
, m_indirectCommandsLayout( indirectCommandsLayout )
, m_allocator( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) )
, m_dispatcher( device.getDispatcher() )
2022-03-09 10:20:05 +00:00
{
}
IndirectCommandsLayoutNV( std::nullptr_t ) {}
~IndirectCommandsLayoutNV()
{
clear();
}
IndirectCommandsLayoutNV() = delete;
IndirectCommandsLayoutNV( IndirectCommandsLayoutNV const & ) = delete;
IndirectCommandsLayoutNV( IndirectCommandsLayoutNV && rhs ) VULKAN_HPP_NOEXCEPT
: m_device( VULKAN_HPP_NAMESPACE::exchange( rhs.m_device, {} ) )
, m_indirectCommandsLayout( VULKAN_HPP_NAMESPACE::exchange( rhs.m_indirectCommandsLayout, {} ) )
, m_allocator( VULKAN_HPP_NAMESPACE::exchange( rhs.m_allocator, {} ) )
, m_dispatcher( VULKAN_HPP_NAMESPACE::exchange( rhs.m_dispatcher, nullptr ) )
2022-03-09 10:20:05 +00:00
{
}
IndirectCommandsLayoutNV & operator=( IndirectCommandsLayoutNV const & ) = delete;
2022-12-06 00:11:19 +00:00
IndirectCommandsLayoutNV & operator=( IndirectCommandsLayoutNV && rhs ) VULKAN_HPP_NOEXCEPT
{
if ( this != &rhs )
{
std::swap( m_device, rhs.m_device );
std::swap( m_indirectCommandsLayout, rhs.m_indirectCommandsLayout );
std::swap( m_allocator, rhs.m_allocator );
std::swap( m_dispatcher, rhs.m_dispatcher );
}
return *this;
}
VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutNV const & operator*() const VULKAN_HPP_NOEXCEPT
{
return m_indirectCommandsLayout;
}
operator VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutNV() const VULKAN_HPP_NOEXCEPT
{
return m_indirectCommandsLayout;
}
void clear() VULKAN_HPP_NOEXCEPT
{
if ( m_indirectCommandsLayout )
{
getDispatcher()->vkDestroyIndirectCommandsLayoutNV( static_cast<VkDevice>( m_device ),
static_cast<VkIndirectCommandsLayoutNV>( m_indirectCommandsLayout ),
reinterpret_cast<const VkAllocationCallbacks *>( m_allocator ) );
}
m_device = nullptr;
m_indirectCommandsLayout = nullptr;
m_allocator = nullptr;
m_dispatcher = nullptr;
}
VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutNV release()
{
m_device = nullptr;
m_allocator = nullptr;
m_dispatcher = nullptr;
return VULKAN_HPP_NAMESPACE::exchange( m_indirectCommandsLayout, nullptr );
}
VULKAN_HPP_NAMESPACE::Device getDevice() const
{
return m_device;
}
VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::DeviceDispatcher const * getDispatcher() const
{
VULKAN_HPP_ASSERT( m_dispatcher->getVkHeaderVersion() == VK_HEADER_VERSION );
return m_dispatcher;
}
void swap( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::IndirectCommandsLayoutNV & rhs ) VULKAN_HPP_NOEXCEPT
{
std::swap( m_device, rhs.m_device );
std::swap( m_indirectCommandsLayout, rhs.m_indirectCommandsLayout );
std::swap( m_allocator, rhs.m_allocator );
std::swap( m_dispatcher, rhs.m_dispatcher );
}
private:
VULKAN_HPP_NAMESPACE::Device m_device = {};
VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutNV m_indirectCommandsLayout = {};
const VULKAN_HPP_NAMESPACE::AllocationCallbacks * m_allocator = {};
VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::DeviceDispatcher const * m_dispatcher = nullptr;
};
2024-10-05 00:12:51 +00:00
class IndirectExecutionSetEXT
{
public:
using CType = VkIndirectExecutionSetEXT;
using CppType = VULKAN_HPP_NAMESPACE::IndirectExecutionSetEXT;
static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = VULKAN_HPP_NAMESPACE::ObjectType::eIndirectExecutionSetEXT;
static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType =
VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eUnknown;
public:
# if !defined( VULKAN_HPP_RAII_NO_EXCEPTIONS )
IndirectExecutionSetEXT( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device,
VULKAN_HPP_NAMESPACE::IndirectExecutionSetCreateInfoEXT const & createInfo,
VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr )
{
*this = device.createIndirectExecutionSetEXT( createInfo, allocator );
}
# endif
IndirectExecutionSetEXT( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device,
VkIndirectExecutionSetEXT indirectExecutionSet,
VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr )
: m_device( device )
, m_indirectExecutionSet( indirectExecutionSet )
, m_allocator( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) )
, m_dispatcher( device.getDispatcher() )
{
}
IndirectExecutionSetEXT( std::nullptr_t ) {}
~IndirectExecutionSetEXT()
{
clear();
}
IndirectExecutionSetEXT() = delete;
IndirectExecutionSetEXT( IndirectExecutionSetEXT const & ) = delete;
IndirectExecutionSetEXT( IndirectExecutionSetEXT && rhs ) VULKAN_HPP_NOEXCEPT
: m_device( VULKAN_HPP_NAMESPACE::exchange( rhs.m_device, {} ) )
, m_indirectExecutionSet( VULKAN_HPP_NAMESPACE::exchange( rhs.m_indirectExecutionSet, {} ) )
, m_allocator( VULKAN_HPP_NAMESPACE::exchange( rhs.m_allocator, {} ) )
, m_dispatcher( VULKAN_HPP_NAMESPACE::exchange( rhs.m_dispatcher, nullptr ) )
{
}
IndirectExecutionSetEXT & operator=( IndirectExecutionSetEXT const & ) = delete;
IndirectExecutionSetEXT & operator=( IndirectExecutionSetEXT && rhs ) VULKAN_HPP_NOEXCEPT
{
if ( this != &rhs )
{
std::swap( m_device, rhs.m_device );
std::swap( m_indirectExecutionSet, rhs.m_indirectExecutionSet );
std::swap( m_allocator, rhs.m_allocator );
std::swap( m_dispatcher, rhs.m_dispatcher );
}
return *this;
}
VULKAN_HPP_NAMESPACE::IndirectExecutionSetEXT const & operator*() const VULKAN_HPP_NOEXCEPT
{
return m_indirectExecutionSet;
}
operator VULKAN_HPP_NAMESPACE::IndirectExecutionSetEXT() const VULKAN_HPP_NOEXCEPT
{
return m_indirectExecutionSet;
}
void clear() VULKAN_HPP_NOEXCEPT
{
if ( m_indirectExecutionSet )
{
getDispatcher()->vkDestroyIndirectExecutionSetEXT( static_cast<VkDevice>( m_device ),
static_cast<VkIndirectExecutionSetEXT>( m_indirectExecutionSet ),
reinterpret_cast<const VkAllocationCallbacks *>( m_allocator ) );
}
m_device = nullptr;
m_indirectExecutionSet = nullptr;
m_allocator = nullptr;
m_dispatcher = nullptr;
}
VULKAN_HPP_NAMESPACE::IndirectExecutionSetEXT release()
{
m_device = nullptr;
m_allocator = nullptr;
m_dispatcher = nullptr;
return VULKAN_HPP_NAMESPACE::exchange( m_indirectExecutionSet, nullptr );
}
VULKAN_HPP_NAMESPACE::Device getDevice() const
{
return m_device;
}
VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::DeviceDispatcher const * getDispatcher() const
2024-10-05 00:12:51 +00:00
{
VULKAN_HPP_ASSERT( m_dispatcher->getVkHeaderVersion() == VK_HEADER_VERSION );
return m_dispatcher;
}
void swap( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::IndirectExecutionSetEXT & rhs ) VULKAN_HPP_NOEXCEPT
{
std::swap( m_device, rhs.m_device );
std::swap( m_indirectExecutionSet, rhs.m_indirectExecutionSet );
std::swap( m_allocator, rhs.m_allocator );
std::swap( m_dispatcher, rhs.m_dispatcher );
}
//=== VK_EXT_device_generated_commands ===
void updatePipeline( VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::WriteIndirectExecutionSetPipelineEXT> const & executionSetWrites ) const
VULKAN_HPP_NOEXCEPT;
void updateShader( VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::WriteIndirectExecutionSetShaderEXT> const & executionSetWrites ) const
VULKAN_HPP_NOEXCEPT;
private:
VULKAN_HPP_NAMESPACE::Device m_device = {};
VULKAN_HPP_NAMESPACE::IndirectExecutionSetEXT m_indirectExecutionSet = {};
const VULKAN_HPP_NAMESPACE::AllocationCallbacks * m_allocator = {};
VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::DeviceDispatcher const * m_dispatcher = nullptr;
2024-10-05 00:12:51 +00:00
};
2022-09-30 00:20:29 +00:00
class MicromapEXT
{
public:
using CType = VkMicromapEXT;
using CppType = VULKAN_HPP_NAMESPACE::MicromapEXT;
2022-09-30 00:20:29 +00:00
static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = VULKAN_HPP_NAMESPACE::ObjectType::eMicromapEXT;
static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType =
VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eUnknown;
public:
# if !defined( VULKAN_HPP_RAII_NO_EXCEPTIONS )
2022-09-30 00:20:29 +00:00
MicromapEXT( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device,
VULKAN_HPP_NAMESPACE::MicromapCreateInfoEXT const & createInfo,
VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr )
{
*this = device.createMicromapEXT( createInfo, allocator );
2022-09-30 00:20:29 +00:00
}
# endif
2022-09-30 00:20:29 +00:00
MicromapEXT( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device,
VkMicromapEXT micromap,
VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr )
: m_device( device )
2022-09-30 00:20:29 +00:00
, m_micromap( micromap )
, m_allocator( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) )
, m_dispatcher( device.getDispatcher() )
{
}
MicromapEXT( std::nullptr_t ) {}
~MicromapEXT()
{
clear();
}
MicromapEXT() = delete;
MicromapEXT( MicromapEXT const & ) = delete;
2022-09-30 00:20:29 +00:00
MicromapEXT( MicromapEXT && rhs ) VULKAN_HPP_NOEXCEPT
: m_device( VULKAN_HPP_NAMESPACE::exchange( rhs.m_device, {} ) )
, m_micromap( VULKAN_HPP_NAMESPACE::exchange( rhs.m_micromap, {} ) )
, m_allocator( VULKAN_HPP_NAMESPACE::exchange( rhs.m_allocator, {} ) )
, m_dispatcher( VULKAN_HPP_NAMESPACE::exchange( rhs.m_dispatcher, nullptr ) )
2022-09-30 00:20:29 +00:00
{
}
2022-09-30 00:20:29 +00:00
MicromapEXT & operator=( MicromapEXT const & ) = delete;
2022-12-06 00:11:19 +00:00
MicromapEXT & operator=( MicromapEXT && rhs ) VULKAN_HPP_NOEXCEPT
2022-09-30 00:20:29 +00:00
{
if ( this != &rhs )
{
std::swap( m_device, rhs.m_device );
std::swap( m_micromap, rhs.m_micromap );
std::swap( m_allocator, rhs.m_allocator );
std::swap( m_dispatcher, rhs.m_dispatcher );
2022-09-30 00:20:29 +00:00
}
return *this;
}
VULKAN_HPP_NAMESPACE::MicromapEXT const & operator*() const VULKAN_HPP_NOEXCEPT
{
return m_micromap;
}
operator VULKAN_HPP_NAMESPACE::MicromapEXT() const VULKAN_HPP_NOEXCEPT
{
return m_micromap;
}
2022-09-30 00:20:29 +00:00
void clear() VULKAN_HPP_NOEXCEPT
{
if ( m_micromap )
{
getDispatcher()->vkDestroyMicromapEXT(
static_cast<VkDevice>( m_device ), static_cast<VkMicromapEXT>( m_micromap ), reinterpret_cast<const VkAllocationCallbacks *>( m_allocator ) );
}
m_device = nullptr;
m_micromap = nullptr;
m_allocator = nullptr;
m_dispatcher = nullptr;
}
VULKAN_HPP_NAMESPACE::MicromapEXT release()
{
m_device = nullptr;
m_allocator = nullptr;
m_dispatcher = nullptr;
return VULKAN_HPP_NAMESPACE::exchange( m_micromap, nullptr );
}
2022-09-30 00:20:29 +00:00
VULKAN_HPP_NAMESPACE::Device getDevice() const
{
return m_device;
}
VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::DeviceDispatcher const * getDispatcher() const
2022-09-30 00:20:29 +00:00
{
VULKAN_HPP_ASSERT( m_dispatcher->getVkHeaderVersion() == VK_HEADER_VERSION );
return m_dispatcher;
}
void swap( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::MicromapEXT & rhs ) VULKAN_HPP_NOEXCEPT
{
std::swap( m_device, rhs.m_device );
std::swap( m_micromap, rhs.m_micromap );
std::swap( m_allocator, rhs.m_allocator );
std::swap( m_dispatcher, rhs.m_dispatcher );
}
private:
VULKAN_HPP_NAMESPACE::Device m_device = {};
VULKAN_HPP_NAMESPACE::MicromapEXT m_micromap = {};
const VULKAN_HPP_NAMESPACE::AllocationCallbacks * m_allocator = {};
VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::DeviceDispatcher const * m_dispatcher = nullptr;
2022-09-30 00:20:29 +00:00
};
class OpticalFlowSessionNV
{
public:
using CType = VkOpticalFlowSessionNV;
using CppType = VULKAN_HPP_NAMESPACE::OpticalFlowSessionNV;
2022-09-30 00:20:29 +00:00
static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = VULKAN_HPP_NAMESPACE::ObjectType::eOpticalFlowSessionNV;
static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType =
VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eUnknown;
public:
# if !defined( VULKAN_HPP_RAII_NO_EXCEPTIONS )
2022-09-30 00:20:29 +00:00
OpticalFlowSessionNV( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device,
VULKAN_HPP_NAMESPACE::OpticalFlowSessionCreateInfoNV const & createInfo,
VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr )
{
*this = device.createOpticalFlowSessionNV( createInfo, allocator );
2022-09-30 00:20:29 +00:00
}
# endif
2022-09-30 00:20:29 +00:00
OpticalFlowSessionNV( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device,
VkOpticalFlowSessionNV session,
VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr )
: m_device( device )
2022-09-30 00:20:29 +00:00
, m_session( session )
, m_allocator( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) )
, m_dispatcher( device.getDispatcher() )
{
}
OpticalFlowSessionNV( std::nullptr_t ) {}
~OpticalFlowSessionNV()
{
clear();
}
OpticalFlowSessionNV() = delete;
OpticalFlowSessionNV( OpticalFlowSessionNV const & ) = delete;
2022-09-30 00:20:29 +00:00
OpticalFlowSessionNV( OpticalFlowSessionNV && rhs ) VULKAN_HPP_NOEXCEPT
: m_device( VULKAN_HPP_NAMESPACE::exchange( rhs.m_device, {} ) )
, m_session( VULKAN_HPP_NAMESPACE::exchange( rhs.m_session, {} ) )
, m_allocator( VULKAN_HPP_NAMESPACE::exchange( rhs.m_allocator, {} ) )
, m_dispatcher( VULKAN_HPP_NAMESPACE::exchange( rhs.m_dispatcher, nullptr ) )
2022-09-30 00:20:29 +00:00
{
}
2022-09-30 00:20:29 +00:00
OpticalFlowSessionNV & operator=( OpticalFlowSessionNV const & ) = delete;
2022-12-06 00:11:19 +00:00
OpticalFlowSessionNV & operator=( OpticalFlowSessionNV && rhs ) VULKAN_HPP_NOEXCEPT
2022-09-30 00:20:29 +00:00
{
if ( this != &rhs )
{
std::swap( m_device, rhs.m_device );
std::swap( m_session, rhs.m_session );
std::swap( m_allocator, rhs.m_allocator );
std::swap( m_dispatcher, rhs.m_dispatcher );
2022-09-30 00:20:29 +00:00
}
return *this;
}
VULKAN_HPP_NAMESPACE::OpticalFlowSessionNV const & operator*() const VULKAN_HPP_NOEXCEPT
{
return m_session;
}
operator VULKAN_HPP_NAMESPACE::OpticalFlowSessionNV() const VULKAN_HPP_NOEXCEPT
{
return m_session;
}
2022-09-30 00:20:29 +00:00
void clear() VULKAN_HPP_NOEXCEPT
{
if ( m_session )
{
getDispatcher()->vkDestroyOpticalFlowSessionNV( static_cast<VkDevice>( m_device ),
static_cast<VkOpticalFlowSessionNV>( m_session ),
reinterpret_cast<const VkAllocationCallbacks *>( m_allocator ) );
}
m_device = nullptr;
m_session = nullptr;
m_allocator = nullptr;
m_dispatcher = nullptr;
}
VULKAN_HPP_NAMESPACE::OpticalFlowSessionNV release()
{
m_device = nullptr;
m_allocator = nullptr;
m_dispatcher = nullptr;
return VULKAN_HPP_NAMESPACE::exchange( m_session, nullptr );
}
2022-09-30 00:20:29 +00:00
VULKAN_HPP_NAMESPACE::Device getDevice() const
{
return m_device;
}
VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::DeviceDispatcher const * getDispatcher() const
2022-09-30 00:20:29 +00:00
{
VULKAN_HPP_ASSERT( m_dispatcher->getVkHeaderVersion() == VK_HEADER_VERSION );
return m_dispatcher;
}
void swap( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::OpticalFlowSessionNV & rhs ) VULKAN_HPP_NOEXCEPT
{
std::swap( m_device, rhs.m_device );
std::swap( m_session, rhs.m_session );
std::swap( m_allocator, rhs.m_allocator );
std::swap( m_dispatcher, rhs.m_dispatcher );
}
//=== VK_NV_optical_flow ===
void bindImage( VULKAN_HPP_NAMESPACE::OpticalFlowSessionBindingPointNV bindingPoint,
VULKAN_HPP_NAMESPACE::ImageView view,
VULKAN_HPP_NAMESPACE::ImageLayout layout ) const;
private:
VULKAN_HPP_NAMESPACE::Device m_device = {};
VULKAN_HPP_NAMESPACE::OpticalFlowSessionNV m_session = {};
const VULKAN_HPP_NAMESPACE::AllocationCallbacks * m_allocator = {};
VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::DeviceDispatcher const * m_dispatcher = nullptr;
2022-09-30 00:20:29 +00:00
};
class PerformanceConfigurationINTEL
{
public:
using CType = VkPerformanceConfigurationINTEL;
using CppType = VULKAN_HPP_NAMESPACE::PerformanceConfigurationINTEL;
static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = VULKAN_HPP_NAMESPACE::ObjectType::ePerformanceConfigurationINTEL;
static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType =
VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eUnknown;
public:
# if !defined( VULKAN_HPP_RAII_NO_EXCEPTIONS )
PerformanceConfigurationINTEL( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device,
VULKAN_HPP_NAMESPACE::PerformanceConfigurationAcquireInfoINTEL const & acquireInfo )
{
*this = device.acquirePerformanceConfigurationINTEL( acquireInfo );
}
# endif
PerformanceConfigurationINTEL( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device, VkPerformanceConfigurationINTEL configuration )
: m_device( device ), m_configuration( configuration ), m_dispatcher( device.getDispatcher() )
2022-03-09 10:20:05 +00:00
{
}
PerformanceConfigurationINTEL( std::nullptr_t ) {}
~PerformanceConfigurationINTEL()
{
clear();
}
PerformanceConfigurationINTEL() = delete;
PerformanceConfigurationINTEL( PerformanceConfigurationINTEL const & ) = delete;
PerformanceConfigurationINTEL( PerformanceConfigurationINTEL && rhs ) VULKAN_HPP_NOEXCEPT
: m_device( VULKAN_HPP_NAMESPACE::exchange( rhs.m_device, {} ) )
, m_configuration( VULKAN_HPP_NAMESPACE::exchange( rhs.m_configuration, {} ) )
, m_dispatcher( VULKAN_HPP_NAMESPACE::exchange( rhs.m_dispatcher, nullptr ) )
2022-03-09 10:20:05 +00:00
{
}
PerformanceConfigurationINTEL & operator=( PerformanceConfigurationINTEL const & ) = delete;
2022-12-06 00:11:19 +00:00
PerformanceConfigurationINTEL & operator=( PerformanceConfigurationINTEL && rhs ) VULKAN_HPP_NOEXCEPT
{
if ( this != &rhs )
{
std::swap( m_device, rhs.m_device );
std::swap( m_configuration, rhs.m_configuration );
std::swap( m_dispatcher, rhs.m_dispatcher );
}
return *this;
}
VULKAN_HPP_NAMESPACE::PerformanceConfigurationINTEL const & operator*() const VULKAN_HPP_NOEXCEPT
{
return m_configuration;
}
operator VULKAN_HPP_NAMESPACE::PerformanceConfigurationINTEL() const VULKAN_HPP_NOEXCEPT
{
return m_configuration;
}
void clear() VULKAN_HPP_NOEXCEPT
{
if ( m_configuration )
{
getDispatcher()->vkReleasePerformanceConfigurationINTEL( static_cast<VkDevice>( m_device ),
static_cast<VkPerformanceConfigurationINTEL>( m_configuration ) );
}
m_device = nullptr;
m_configuration = nullptr;
m_dispatcher = nullptr;
}
VULKAN_HPP_NAMESPACE::PerformanceConfigurationINTEL release()
{
m_device = nullptr;
m_dispatcher = nullptr;
return VULKAN_HPP_NAMESPACE::exchange( m_configuration, nullptr );
}
VULKAN_HPP_NAMESPACE::Device getDevice() const
{
return m_device;
}
VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::DeviceDispatcher const * getDispatcher() const
{
VULKAN_HPP_ASSERT( m_dispatcher->getVkHeaderVersion() == VK_HEADER_VERSION );
return m_dispatcher;
}
void swap( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::PerformanceConfigurationINTEL & rhs ) VULKAN_HPP_NOEXCEPT
{
std::swap( m_device, rhs.m_device );
std::swap( m_configuration, rhs.m_configuration );
std::swap( m_dispatcher, rhs.m_dispatcher );
}
private:
VULKAN_HPP_NAMESPACE::Device m_device = {};
VULKAN_HPP_NAMESPACE::PerformanceConfigurationINTEL m_configuration = {};
VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::DeviceDispatcher const * m_dispatcher = nullptr;
};
class PipelineCache
{
public:
using CType = VkPipelineCache;
using CppType = VULKAN_HPP_NAMESPACE::PipelineCache;
static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = VULKAN_HPP_NAMESPACE::ObjectType::ePipelineCache;
static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType =
VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::ePipelineCache;
public:
# if !defined( VULKAN_HPP_RAII_NO_EXCEPTIONS )
PipelineCache( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device,
VULKAN_HPP_NAMESPACE::PipelineCacheCreateInfo const & createInfo,
VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr )
{
*this = device.createPipelineCache( createInfo, allocator );
}
# endif
PipelineCache( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device,
VkPipelineCache pipelineCache,
VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr )
: m_device( device )
, m_pipelineCache( pipelineCache )
, m_allocator( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) )
, m_dispatcher( device.getDispatcher() )
2022-03-09 10:20:05 +00:00
{
}
PipelineCache( std::nullptr_t ) {}
~PipelineCache()
{
clear();
}
PipelineCache() = delete;
PipelineCache( PipelineCache const & ) = delete;
PipelineCache( PipelineCache && rhs ) VULKAN_HPP_NOEXCEPT
: m_device( VULKAN_HPP_NAMESPACE::exchange( rhs.m_device, {} ) )
, m_pipelineCache( VULKAN_HPP_NAMESPACE::exchange( rhs.m_pipelineCache, {} ) )
, m_allocator( VULKAN_HPP_NAMESPACE::exchange( rhs.m_allocator, {} ) )
, m_dispatcher( VULKAN_HPP_NAMESPACE::exchange( rhs.m_dispatcher, nullptr ) )
2022-03-09 10:20:05 +00:00
{
}
PipelineCache & operator=( PipelineCache const & ) = delete;
2022-12-06 00:11:19 +00:00
PipelineCache & operator=( PipelineCache && rhs ) VULKAN_HPP_NOEXCEPT
{
if ( this != &rhs )
{
std::swap( m_device, rhs.m_device );
std::swap( m_pipelineCache, rhs.m_pipelineCache );
std::swap( m_allocator, rhs.m_allocator );
std::swap( m_dispatcher, rhs.m_dispatcher );
}
return *this;
}
VULKAN_HPP_NAMESPACE::PipelineCache const & operator*() const VULKAN_HPP_NOEXCEPT
{
return m_pipelineCache;
}
operator VULKAN_HPP_NAMESPACE::PipelineCache() const VULKAN_HPP_NOEXCEPT
{
return m_pipelineCache;
}
void clear() VULKAN_HPP_NOEXCEPT
{
if ( m_pipelineCache )
{
getDispatcher()->vkDestroyPipelineCache( static_cast<VkDevice>( m_device ),
static_cast<VkPipelineCache>( m_pipelineCache ),
reinterpret_cast<const VkAllocationCallbacks *>( m_allocator ) );
}
m_device = nullptr;
m_pipelineCache = nullptr;
m_allocator = nullptr;
m_dispatcher = nullptr;
}
VULKAN_HPP_NAMESPACE::PipelineCache release()
{
m_device = nullptr;
m_allocator = nullptr;
m_dispatcher = nullptr;
return VULKAN_HPP_NAMESPACE::exchange( m_pipelineCache, nullptr );
}
VULKAN_HPP_NAMESPACE::Device getDevice() const
{
return m_device;
}
VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::DeviceDispatcher const * getDispatcher() const
{
VULKAN_HPP_ASSERT( m_dispatcher->getVkHeaderVersion() == VK_HEADER_VERSION );
return m_dispatcher;
}
void swap( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::PipelineCache & rhs ) VULKAN_HPP_NOEXCEPT
{
std::swap( m_device, rhs.m_device );
std::swap( m_pipelineCache, rhs.m_pipelineCache );
std::swap( m_allocator, rhs.m_allocator );
std::swap( m_dispatcher, rhs.m_dispatcher );
}
//=== VK_VERSION_1_0 ===
VULKAN_HPP_NODISCARD std::vector<uint8_t> getData() const;
void merge( VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::PipelineCache> const & srcCaches ) const;
private:
VULKAN_HPP_NAMESPACE::Device m_device = {};
VULKAN_HPP_NAMESPACE::PipelineCache m_pipelineCache = {};
const VULKAN_HPP_NAMESPACE::AllocationCallbacks * m_allocator = {};
VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::DeviceDispatcher const * m_dispatcher = nullptr;
};
class Pipeline
{
public:
using CType = VkPipeline;
using CppType = VULKAN_HPP_NAMESPACE::Pipeline;
static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = VULKAN_HPP_NAMESPACE::ObjectType::ePipeline;
static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType =
VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::ePipeline;
public:
# if !defined( VULKAN_HPP_RAII_NO_EXCEPTIONS )
Pipeline( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device,
VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::PipelineCache> const & pipelineCache,
VULKAN_HPP_NAMESPACE::ComputePipelineCreateInfo const & createInfo,
VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr )
{
*this = device.createComputePipeline( pipelineCache, createInfo, allocator );
}
# endif
# if !defined( VULKAN_HPP_RAII_NO_EXCEPTIONS )
# if defined( VK_ENABLE_BETA_EXTENSIONS )
Pipeline( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device,
VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::PipelineCache> const & pipelineCache,
VULKAN_HPP_NAMESPACE::ExecutionGraphPipelineCreateInfoAMDX const & createInfo,
VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr )
{
*this = device.createExecutionGraphPipelineAMDX( pipelineCache, createInfo, allocator );
}
# endif /*VK_ENABLE_BETA_EXTENSIONS*/
# endif
# if !defined( VULKAN_HPP_RAII_NO_EXCEPTIONS )
Pipeline( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device,
VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::PipelineCache> const & pipelineCache,
VULKAN_HPP_NAMESPACE::GraphicsPipelineCreateInfo const & createInfo,
VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr )
{
*this = device.createGraphicsPipeline( pipelineCache, createInfo, allocator );
}
# endif
# if !defined( VULKAN_HPP_RAII_NO_EXCEPTIONS )
Pipeline( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device,
VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::DeferredOperationKHR> const & deferredOperation,
VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::PipelineCache> const & pipelineCache,
VULKAN_HPP_NAMESPACE::RayTracingPipelineCreateInfoKHR const & createInfo,
VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr )
{
*this = device.createRayTracingPipelineKHR( deferredOperation, pipelineCache, createInfo, allocator );
}
# endif
# if !defined( VULKAN_HPP_RAII_NO_EXCEPTIONS )
Pipeline( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device,
VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::PipelineCache> const & pipelineCache,
VULKAN_HPP_NAMESPACE::RayTracingPipelineCreateInfoNV const & createInfo,
VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr )
{
*this = device.createRayTracingPipelineNV( pipelineCache, createInfo, allocator );
}
# endif
Pipeline( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device,
VkPipeline pipeline,
VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr,
VULKAN_HPP_NAMESPACE::Result successCode = VULKAN_HPP_NAMESPACE::Result::eSuccess )
: m_device( device )
, m_pipeline( pipeline )
, m_allocator( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) )
, m_constructorSuccessCode( successCode )
, m_dispatcher( device.getDispatcher() )
2022-03-09 10:20:05 +00:00
{
}
Pipeline( std::nullptr_t ) {}
~Pipeline()
{
clear();
}
Pipeline() = delete;
Pipeline( Pipeline const & ) = delete;
Pipeline( Pipeline && rhs ) VULKAN_HPP_NOEXCEPT
: m_device( VULKAN_HPP_NAMESPACE::exchange( rhs.m_device, {} ) )
, m_pipeline( VULKAN_HPP_NAMESPACE::exchange( rhs.m_pipeline, {} ) )
, m_allocator( VULKAN_HPP_NAMESPACE::exchange( rhs.m_allocator, {} ) )
, m_constructorSuccessCode( VULKAN_HPP_NAMESPACE::exchange( rhs.m_constructorSuccessCode, {} ) )
, m_dispatcher( VULKAN_HPP_NAMESPACE::exchange( rhs.m_dispatcher, nullptr ) )
2022-03-09 10:20:05 +00:00
{
}
Pipeline & operator=( Pipeline const & ) = delete;
2022-12-06 00:11:19 +00:00
Pipeline & operator=( Pipeline && rhs ) VULKAN_HPP_NOEXCEPT
{
if ( this != &rhs )
{
std::swap( m_device, rhs.m_device );
std::swap( m_pipeline, rhs.m_pipeline );
std::swap( m_allocator, rhs.m_allocator );
std::swap( m_constructorSuccessCode, rhs.m_constructorSuccessCode );
std::swap( m_dispatcher, rhs.m_dispatcher );
}
return *this;
}
VULKAN_HPP_NAMESPACE::Pipeline const & operator*() const VULKAN_HPP_NOEXCEPT
{
return m_pipeline;
}
operator VULKAN_HPP_NAMESPACE::Pipeline() const VULKAN_HPP_NOEXCEPT
{
return m_pipeline;
}
void clear() VULKAN_HPP_NOEXCEPT
{
if ( m_pipeline )
{
getDispatcher()->vkDestroyPipeline(
static_cast<VkDevice>( m_device ), static_cast<VkPipeline>( m_pipeline ), reinterpret_cast<const VkAllocationCallbacks *>( m_allocator ) );
}
m_device = nullptr;
m_pipeline = nullptr;
m_allocator = nullptr;
m_constructorSuccessCode = VULKAN_HPP_NAMESPACE::Result::eErrorUnknown;
m_dispatcher = nullptr;
}
VULKAN_HPP_NAMESPACE::Pipeline release()
{
m_device = nullptr;
m_allocator = nullptr;
m_constructorSuccessCode = VULKAN_HPP_NAMESPACE::Result::eErrorUnknown;
m_dispatcher = nullptr;
return VULKAN_HPP_NAMESPACE::exchange( m_pipeline, nullptr );
}
VULKAN_HPP_NAMESPACE::Result getConstructorSuccessCode() const
{
return m_constructorSuccessCode;
}
VULKAN_HPP_NAMESPACE::Device getDevice() const
{
return m_device;
}
VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::DeviceDispatcher const * getDispatcher() const
{
VULKAN_HPP_ASSERT( m_dispatcher->getVkHeaderVersion() == VK_HEADER_VERSION );
return m_dispatcher;
}
void swap( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Pipeline & rhs ) VULKAN_HPP_NOEXCEPT
{
std::swap( m_device, rhs.m_device );
std::swap( m_pipeline, rhs.m_pipeline );
std::swap( m_allocator, rhs.m_allocator );
std::swap( m_constructorSuccessCode, rhs.m_constructorSuccessCode );
std::swap( m_dispatcher, rhs.m_dispatcher );
}
//=== VK_AMD_shader_info ===
VULKAN_HPP_NODISCARD std::vector<uint8_t> getShaderInfoAMD( VULKAN_HPP_NAMESPACE::ShaderStageFlagBits shaderStage,
VULKAN_HPP_NAMESPACE::ShaderInfoTypeAMD infoType ) const;
# if defined( VK_ENABLE_BETA_EXTENSIONS )
//=== VK_AMDX_shader_enqueue ===
VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::ExecutionGraphPipelineScratchSizeAMDX getExecutionGraphScratchSizeAMDX() const;
VULKAN_HPP_NODISCARD uint32_t getExecutionGraphNodeIndexAMDX( const VULKAN_HPP_NAMESPACE::PipelineShaderStageNodeCreateInfoAMDX & nodeInfo ) const;
# endif /*VK_ENABLE_BETA_EXTENSIONS*/
//=== VK_KHR_ray_tracing_pipeline ===
template <typename DataType>
VULKAN_HPP_NODISCARD std::vector<DataType> getRayTracingShaderGroupHandlesKHR( uint32_t firstGroup, uint32_t groupCount, size_t dataSize ) const;
template <typename DataType>
VULKAN_HPP_NODISCARD DataType getRayTracingShaderGroupHandleKHR( uint32_t firstGroup, uint32_t groupCount ) const;
template <typename DataType>
VULKAN_HPP_NODISCARD std::vector<DataType>
getRayTracingCaptureReplayShaderGroupHandlesKHR( uint32_t firstGroup, uint32_t groupCount, size_t dataSize ) const;
template <typename DataType>
VULKAN_HPP_NODISCARD DataType getRayTracingCaptureReplayShaderGroupHandleKHR( uint32_t firstGroup, uint32_t groupCount ) const;
2021-04-28 11:35:14 +00:00
VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::DeviceSize
getRayTracingShaderGroupStackSizeKHR( uint32_t group, VULKAN_HPP_NAMESPACE::ShaderGroupShaderKHR groupShader ) const VULKAN_HPP_NOEXCEPT;
2023-03-01 09:17:02 +00:00
//=== VK_NV_ray_tracing ===
template <typename DataType>
VULKAN_HPP_NODISCARD std::vector<DataType> getRayTracingShaderGroupHandlesNV( uint32_t firstGroup, uint32_t groupCount, size_t dataSize ) const;
template <typename DataType>
VULKAN_HPP_NODISCARD DataType getRayTracingShaderGroupHandleNV( uint32_t firstGroup, uint32_t groupCount ) const;
void compileDeferredNV( uint32_t shader ) const;
private:
VULKAN_HPP_NAMESPACE::Device m_device = {};
VULKAN_HPP_NAMESPACE::Pipeline m_pipeline = {};
const VULKAN_HPP_NAMESPACE::AllocationCallbacks * m_allocator = {};
VULKAN_HPP_NAMESPACE::Result m_constructorSuccessCode = VULKAN_HPP_NAMESPACE::Result::eErrorUnknown;
VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::DeviceDispatcher const * m_dispatcher = nullptr;
};
class Pipelines : public std::vector<VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Pipeline>
{
public:
# if !defined( VULKAN_HPP_RAII_NO_EXCEPTIONS )
Pipelines( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device,
VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::PipelineCache> const & pipelineCache,
VULKAN_HPP_NAMESPACE::ArrayProxy<VULKAN_HPP_NAMESPACE::ComputePipelineCreateInfo> const & createInfos,
VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr )
{
*this = device.createComputePipelines( pipelineCache, createInfos, allocator );
}
# endif
# if !defined( VULKAN_HPP_RAII_NO_EXCEPTIONS )
# if defined( VK_ENABLE_BETA_EXTENSIONS )
Pipelines( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device,
VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::PipelineCache> const & pipelineCache,
VULKAN_HPP_NAMESPACE::ArrayProxy<VULKAN_HPP_NAMESPACE::ExecutionGraphPipelineCreateInfoAMDX> const & createInfos,
VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr )
{
*this = device.createExecutionGraphPipelinesAMDX( pipelineCache, createInfos, allocator );
}
# endif /*VK_ENABLE_BETA_EXTENSIONS*/
# endif
# if !defined( VULKAN_HPP_RAII_NO_EXCEPTIONS )
Pipelines( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device,
VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::PipelineCache> const & pipelineCache,
VULKAN_HPP_NAMESPACE::ArrayProxy<VULKAN_HPP_NAMESPACE::GraphicsPipelineCreateInfo> const & createInfos,
VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr )
{
*this = device.createGraphicsPipelines( pipelineCache, createInfos, allocator );
}
# endif
# if !defined( VULKAN_HPP_RAII_NO_EXCEPTIONS )
Pipelines( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device,
VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::DeferredOperationKHR> const & deferredOperation,
VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::PipelineCache> const & pipelineCache,
VULKAN_HPP_NAMESPACE::ArrayProxy<VULKAN_HPP_NAMESPACE::RayTracingPipelineCreateInfoKHR> const & createInfos,
VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr )
{
*this = device.createRayTracingPipelinesKHR( deferredOperation, pipelineCache, createInfos, allocator );
}
# endif
# if !defined( VULKAN_HPP_RAII_NO_EXCEPTIONS )
Pipelines( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device,
VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::PipelineCache> const & pipelineCache,
VULKAN_HPP_NAMESPACE::ArrayProxy<VULKAN_HPP_NAMESPACE::RayTracingPipelineCreateInfoNV> const & createInfos,
VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr )
{
*this = device.createRayTracingPipelinesNV( pipelineCache, createInfos, allocator );
}
# endif
Pipelines( std::nullptr_t ) {}
2022-12-06 00:11:19 +00:00
Pipelines() = delete;
Pipelines( Pipelines const & ) = delete;
Pipelines( Pipelines && rhs ) = default;
Pipelines & operator=( Pipelines const & ) = delete;
2022-12-06 00:11:19 +00:00
Pipelines & operator=( Pipelines && rhs ) = default;
private:
Pipelines( std::vector<VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Pipeline> && rhs )
{
std::swap( *this, rhs );
}
};
class PipelineBinaryKHR
{
public:
using CType = VkPipelineBinaryKHR;
using CppType = VULKAN_HPP_NAMESPACE::PipelineBinaryKHR;
static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = VULKAN_HPP_NAMESPACE::ObjectType::ePipelineBinaryKHR;
static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType =
VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eUnknown;
public:
PipelineBinaryKHR( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device,
VkPipelineBinaryKHR pipelineBinary,
VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr,
VULKAN_HPP_NAMESPACE::Result successCode = VULKAN_HPP_NAMESPACE::Result::eSuccess )
: m_device( device )
, m_pipelineBinary( pipelineBinary )
, m_allocator( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) )
, m_constructorSuccessCode( successCode )
, m_dispatcher( device.getDispatcher() )
{
}
PipelineBinaryKHR( std::nullptr_t ) {}
~PipelineBinaryKHR()
{
clear();
}
PipelineBinaryKHR() = delete;
PipelineBinaryKHR( PipelineBinaryKHR const & ) = delete;
PipelineBinaryKHR( PipelineBinaryKHR && rhs ) VULKAN_HPP_NOEXCEPT
: m_device( VULKAN_HPP_NAMESPACE::exchange( rhs.m_device, {} ) )
, m_pipelineBinary( VULKAN_HPP_NAMESPACE::exchange( rhs.m_pipelineBinary, {} ) )
, m_allocator( VULKAN_HPP_NAMESPACE::exchange( rhs.m_allocator, {} ) )
, m_constructorSuccessCode( VULKAN_HPP_NAMESPACE::exchange( rhs.m_constructorSuccessCode, {} ) )
, m_dispatcher( VULKAN_HPP_NAMESPACE::exchange( rhs.m_dispatcher, nullptr ) )
{
}
PipelineBinaryKHR & operator=( PipelineBinaryKHR const & ) = delete;
PipelineBinaryKHR & operator=( PipelineBinaryKHR && rhs ) VULKAN_HPP_NOEXCEPT
{
if ( this != &rhs )
{
std::swap( m_device, rhs.m_device );
std::swap( m_pipelineBinary, rhs.m_pipelineBinary );
std::swap( m_allocator, rhs.m_allocator );
std::swap( m_constructorSuccessCode, rhs.m_constructorSuccessCode );
std::swap( m_dispatcher, rhs.m_dispatcher );
}
return *this;
}
VULKAN_HPP_NAMESPACE::PipelineBinaryKHR const & operator*() const VULKAN_HPP_NOEXCEPT
{
return m_pipelineBinary;
}
operator VULKAN_HPP_NAMESPACE::PipelineBinaryKHR() const VULKAN_HPP_NOEXCEPT
{
return m_pipelineBinary;
}
void clear() VULKAN_HPP_NOEXCEPT
{
if ( m_pipelineBinary )
{
getDispatcher()->vkDestroyPipelineBinaryKHR( static_cast<VkDevice>( m_device ),
static_cast<VkPipelineBinaryKHR>( m_pipelineBinary ),
reinterpret_cast<const VkAllocationCallbacks *>( m_allocator ) );
}
m_device = nullptr;
m_pipelineBinary = nullptr;
m_allocator = nullptr;
m_constructorSuccessCode = VULKAN_HPP_NAMESPACE::Result::eErrorUnknown;
m_dispatcher = nullptr;
}
VULKAN_HPP_NAMESPACE::PipelineBinaryKHR release()
{
m_device = nullptr;
m_allocator = nullptr;
m_constructorSuccessCode = VULKAN_HPP_NAMESPACE::Result::eErrorUnknown;
m_dispatcher = nullptr;
return VULKAN_HPP_NAMESPACE::exchange( m_pipelineBinary, nullptr );
}
VULKAN_HPP_NAMESPACE::Result getConstructorSuccessCode() const
{
return m_constructorSuccessCode;
}
VULKAN_HPP_NAMESPACE::Device getDevice() const
{
return m_device;
}
VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::DeviceDispatcher const * getDispatcher() const
{
VULKAN_HPP_ASSERT( m_dispatcher->getVkHeaderVersion() == VK_HEADER_VERSION );
return m_dispatcher;
}
void swap( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::PipelineBinaryKHR & rhs ) VULKAN_HPP_NOEXCEPT
{
std::swap( m_device, rhs.m_device );
std::swap( m_pipelineBinary, rhs.m_pipelineBinary );
std::swap( m_allocator, rhs.m_allocator );
std::swap( m_constructorSuccessCode, rhs.m_constructorSuccessCode );
std::swap( m_dispatcher, rhs.m_dispatcher );
}
private:
VULKAN_HPP_NAMESPACE::Device m_device = {};
VULKAN_HPP_NAMESPACE::PipelineBinaryKHR m_pipelineBinary = {};
const VULKAN_HPP_NAMESPACE::AllocationCallbacks * m_allocator = {};
VULKAN_HPP_NAMESPACE::Result m_constructorSuccessCode = VULKAN_HPP_NAMESPACE::Result::eErrorUnknown;
VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::DeviceDispatcher const * m_dispatcher = nullptr;
};
class PipelineBinaryKHRs : public std::vector<VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::PipelineBinaryKHR>
{
public:
# if !defined( VULKAN_HPP_RAII_NO_EXCEPTIONS )
PipelineBinaryKHRs( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device,
VULKAN_HPP_NAMESPACE::PipelineBinaryCreateInfoKHR const & createInfo,
VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr )
{
*this = device.createPipelineBinariesKHR( createInfo, allocator );
}
# endif
PipelineBinaryKHRs( std::nullptr_t ) {}
PipelineBinaryKHRs() = delete;
PipelineBinaryKHRs( PipelineBinaryKHRs const & ) = delete;
PipelineBinaryKHRs( PipelineBinaryKHRs && rhs ) = default;
PipelineBinaryKHRs & operator=( PipelineBinaryKHRs const & ) = delete;
PipelineBinaryKHRs & operator=( PipelineBinaryKHRs && rhs ) = default;
private:
PipelineBinaryKHRs( std::vector<VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::PipelineBinaryKHR> && rhs )
{
std::swap( *this, rhs );
}
};
class PipelineLayout
{
public:
using CType = VkPipelineLayout;
using CppType = VULKAN_HPP_NAMESPACE::PipelineLayout;
static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = VULKAN_HPP_NAMESPACE::ObjectType::ePipelineLayout;
static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType =
VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::ePipelineLayout;
public:
# if !defined( VULKAN_HPP_RAII_NO_EXCEPTIONS )
PipelineLayout( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device,
VULKAN_HPP_NAMESPACE::PipelineLayoutCreateInfo const & createInfo,
VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr )
{
*this = device.createPipelineLayout( createInfo, allocator );
}
# endif
PipelineLayout( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device,
VkPipelineLayout pipelineLayout,
VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr )
: m_device( device )
, m_pipelineLayout( pipelineLayout )
, m_allocator( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) )
, m_dispatcher( device.getDispatcher() )
2022-03-09 10:20:05 +00:00
{
}
PipelineLayout( std::nullptr_t ) {}
~PipelineLayout()
{
clear();
}
PipelineLayout() = delete;
PipelineLayout( PipelineLayout const & ) = delete;
PipelineLayout( PipelineLayout && rhs ) VULKAN_HPP_NOEXCEPT
: m_device( VULKAN_HPP_NAMESPACE::exchange( rhs.m_device, {} ) )
, m_pipelineLayout( VULKAN_HPP_NAMESPACE::exchange( rhs.m_pipelineLayout, {} ) )
, m_allocator( VULKAN_HPP_NAMESPACE::exchange( rhs.m_allocator, {} ) )
, m_dispatcher( VULKAN_HPP_NAMESPACE::exchange( rhs.m_dispatcher, nullptr ) )
2022-03-09 10:20:05 +00:00
{
}
PipelineLayout & operator=( PipelineLayout const & ) = delete;
2022-12-06 00:11:19 +00:00
PipelineLayout & operator=( PipelineLayout && rhs ) VULKAN_HPP_NOEXCEPT
{
if ( this != &rhs )
{
std::swap( m_device, rhs.m_device );
std::swap( m_pipelineLayout, rhs.m_pipelineLayout );
std::swap( m_allocator, rhs.m_allocator );
std::swap( m_dispatcher, rhs.m_dispatcher );
}
return *this;
}
VULKAN_HPP_NAMESPACE::PipelineLayout const & operator*() const VULKAN_HPP_NOEXCEPT
{
return m_pipelineLayout;
}
operator VULKAN_HPP_NAMESPACE::PipelineLayout() const VULKAN_HPP_NOEXCEPT
{
return m_pipelineLayout;
}
void clear() VULKAN_HPP_NOEXCEPT
{
if ( m_pipelineLayout )
{
getDispatcher()->vkDestroyPipelineLayout( static_cast<VkDevice>( m_device ),
static_cast<VkPipelineLayout>( m_pipelineLayout ),
reinterpret_cast<const VkAllocationCallbacks *>( m_allocator ) );
}
m_device = nullptr;
m_pipelineLayout = nullptr;
m_allocator = nullptr;
m_dispatcher = nullptr;
}
VULKAN_HPP_NAMESPACE::PipelineLayout release()
{
m_device = nullptr;
m_allocator = nullptr;
m_dispatcher = nullptr;
return VULKAN_HPP_NAMESPACE::exchange( m_pipelineLayout, nullptr );
}
VULKAN_HPP_NAMESPACE::Device getDevice() const
{
return m_device;
}
VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::DeviceDispatcher const * getDispatcher() const
{
VULKAN_HPP_ASSERT( m_dispatcher->getVkHeaderVersion() == VK_HEADER_VERSION );
return m_dispatcher;
}
void swap( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::PipelineLayout & rhs ) VULKAN_HPP_NOEXCEPT
{
std::swap( m_device, rhs.m_device );
std::swap( m_pipelineLayout, rhs.m_pipelineLayout );
std::swap( m_allocator, rhs.m_allocator );
std::swap( m_dispatcher, rhs.m_dispatcher );
}
private:
VULKAN_HPP_NAMESPACE::Device m_device = {};
VULKAN_HPP_NAMESPACE::PipelineLayout m_pipelineLayout = {};
const VULKAN_HPP_NAMESPACE::AllocationCallbacks * m_allocator = {};
VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::DeviceDispatcher const * m_dispatcher = nullptr;
};
2022-01-26 00:42:08 +00:00
class PrivateDataSlot
{
public:
using CType = VkPrivateDataSlot;
using CppType = VULKAN_HPP_NAMESPACE::PrivateDataSlot;
static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = VULKAN_HPP_NAMESPACE::ObjectType::ePrivateDataSlot;
static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType =
VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eUnknown;
public:
# if !defined( VULKAN_HPP_RAII_NO_EXCEPTIONS )
PrivateDataSlot( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device,
VULKAN_HPP_NAMESPACE::PrivateDataSlotCreateInfo const & createInfo,
VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr )
{
*this = device.createPrivateDataSlot( createInfo, allocator );
}
# endif
PrivateDataSlot( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device,
VkPrivateDataSlot privateDataSlot,
VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr )
: m_device( device )
, m_privateDataSlot( privateDataSlot )
, m_allocator( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) )
, m_dispatcher( device.getDispatcher() )
2022-03-09 10:20:05 +00:00
{
}
2022-01-26 00:42:08 +00:00
PrivateDataSlot( std::nullptr_t ) {}
2022-01-26 00:42:08 +00:00
~PrivateDataSlot()
{
clear();
}
2022-01-26 00:42:08 +00:00
PrivateDataSlot() = delete;
PrivateDataSlot( PrivateDataSlot const & ) = delete;
2022-01-26 00:42:08 +00:00
PrivateDataSlot( PrivateDataSlot && rhs ) VULKAN_HPP_NOEXCEPT
: m_device( VULKAN_HPP_NAMESPACE::exchange( rhs.m_device, {} ) )
, m_privateDataSlot( VULKAN_HPP_NAMESPACE::exchange( rhs.m_privateDataSlot, {} ) )
, m_allocator( VULKAN_HPP_NAMESPACE::exchange( rhs.m_allocator, {} ) )
, m_dispatcher( VULKAN_HPP_NAMESPACE::exchange( rhs.m_dispatcher, nullptr ) )
2022-03-09 10:20:05 +00:00
{
}
2022-01-26 00:42:08 +00:00
PrivateDataSlot & operator=( PrivateDataSlot const & ) = delete;
2022-12-06 00:11:19 +00:00
PrivateDataSlot & operator=( PrivateDataSlot && rhs ) VULKAN_HPP_NOEXCEPT
{
if ( this != &rhs )
{
std::swap( m_device, rhs.m_device );
std::swap( m_privateDataSlot, rhs.m_privateDataSlot );
std::swap( m_allocator, rhs.m_allocator );
std::swap( m_dispatcher, rhs.m_dispatcher );
}
return *this;
}
2022-01-26 00:42:08 +00:00
VULKAN_HPP_NAMESPACE::PrivateDataSlot const & operator*() const VULKAN_HPP_NOEXCEPT
{
return m_privateDataSlot;
}
operator VULKAN_HPP_NAMESPACE::PrivateDataSlot() const VULKAN_HPP_NOEXCEPT
{
return m_privateDataSlot;
}
void clear() VULKAN_HPP_NOEXCEPT
{
if ( m_privateDataSlot )
{
getDispatcher()->vkDestroyPrivateDataSlot( static_cast<VkDevice>( m_device ),
static_cast<VkPrivateDataSlot>( m_privateDataSlot ),
reinterpret_cast<const VkAllocationCallbacks *>( m_allocator ) );
}
m_device = nullptr;
m_privateDataSlot = nullptr;
m_allocator = nullptr;
m_dispatcher = nullptr;
}
VULKAN_HPP_NAMESPACE::PrivateDataSlot release()
{
m_device = nullptr;
m_allocator = nullptr;
m_dispatcher = nullptr;
return VULKAN_HPP_NAMESPACE::exchange( m_privateDataSlot, nullptr );
}
VULKAN_HPP_NAMESPACE::Device getDevice() const
{
return m_device;
}
VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::DeviceDispatcher const * getDispatcher() const
{
VULKAN_HPP_ASSERT( m_dispatcher->getVkHeaderVersion() == VK_HEADER_VERSION );
return m_dispatcher;
}
void swap( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::PrivateDataSlot & rhs ) VULKAN_HPP_NOEXCEPT
{
std::swap( m_device, rhs.m_device );
std::swap( m_privateDataSlot, rhs.m_privateDataSlot );
std::swap( m_allocator, rhs.m_allocator );
std::swap( m_dispatcher, rhs.m_dispatcher );
}
private:
VULKAN_HPP_NAMESPACE::Device m_device = {};
VULKAN_HPP_NAMESPACE::PrivateDataSlot m_privateDataSlot = {};
const VULKAN_HPP_NAMESPACE::AllocationCallbacks * m_allocator = {};
VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::DeviceDispatcher const * m_dispatcher = nullptr;
};
class QueryPool
{
public:
using CType = VkQueryPool;
using CppType = VULKAN_HPP_NAMESPACE::QueryPool;
static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = VULKAN_HPP_NAMESPACE::ObjectType::eQueryPool;
static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType =
VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eQueryPool;
public:
# if !defined( VULKAN_HPP_RAII_NO_EXCEPTIONS )
QueryPool( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device,
VULKAN_HPP_NAMESPACE::QueryPoolCreateInfo const & createInfo,
VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr )
{
*this = device.createQueryPool( createInfo, allocator );
}
# endif
QueryPool( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device,
VkQueryPool queryPool,
VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr )
: m_device( device )
, m_queryPool( queryPool )
, m_allocator( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) )
, m_dispatcher( device.getDispatcher() )
2022-03-09 10:20:05 +00:00
{
}
QueryPool( std::nullptr_t ) {}
~QueryPool()
{
clear();
}
QueryPool() = delete;
QueryPool( QueryPool const & ) = delete;
QueryPool( QueryPool && rhs ) VULKAN_HPP_NOEXCEPT
: m_device( VULKAN_HPP_NAMESPACE::exchange( rhs.m_device, {} ) )
, m_queryPool( VULKAN_HPP_NAMESPACE::exchange( rhs.m_queryPool, {} ) )
, m_allocator( VULKAN_HPP_NAMESPACE::exchange( rhs.m_allocator, {} ) )
, m_dispatcher( VULKAN_HPP_NAMESPACE::exchange( rhs.m_dispatcher, nullptr ) )
2022-03-09 10:20:05 +00:00
{
}
QueryPool & operator=( QueryPool const & ) = delete;
2022-12-06 00:11:19 +00:00
QueryPool & operator=( QueryPool && rhs ) VULKAN_HPP_NOEXCEPT
{
if ( this != &rhs )
{
std::swap( m_device, rhs.m_device );
std::swap( m_queryPool, rhs.m_queryPool );
std::swap( m_allocator, rhs.m_allocator );
std::swap( m_dispatcher, rhs.m_dispatcher );
}
return *this;
}
VULKAN_HPP_NAMESPACE::QueryPool const & operator*() const VULKAN_HPP_NOEXCEPT
{
return m_queryPool;
}
operator VULKAN_HPP_NAMESPACE::QueryPool() const VULKAN_HPP_NOEXCEPT
{
return m_queryPool;
}
void clear() VULKAN_HPP_NOEXCEPT
{
if ( m_queryPool )
{
getDispatcher()->vkDestroyQueryPool(
static_cast<VkDevice>( m_device ), static_cast<VkQueryPool>( m_queryPool ), reinterpret_cast<const VkAllocationCallbacks *>( m_allocator ) );
}
m_device = nullptr;
m_queryPool = nullptr;
m_allocator = nullptr;
m_dispatcher = nullptr;
}
VULKAN_HPP_NAMESPACE::QueryPool release()
{
m_device = nullptr;
m_allocator = nullptr;
m_dispatcher = nullptr;
return VULKAN_HPP_NAMESPACE::exchange( m_queryPool, nullptr );
}
VULKAN_HPP_NAMESPACE::Device getDevice() const
{
return m_device;
}
VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::DeviceDispatcher const * getDispatcher() const
{
VULKAN_HPP_ASSERT( m_dispatcher->getVkHeaderVersion() == VK_HEADER_VERSION );
return m_dispatcher;
}
void swap( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::QueryPool & rhs ) VULKAN_HPP_NOEXCEPT
{
std::swap( m_device, rhs.m_device );
std::swap( m_queryPool, rhs.m_queryPool );
std::swap( m_allocator, rhs.m_allocator );
std::swap( m_dispatcher, rhs.m_dispatcher );
}
//=== VK_VERSION_1_0 ===
template <typename DataType>
VULKAN_HPP_NODISCARD std::pair<VULKAN_HPP_NAMESPACE::Result, std::vector<DataType>>
getResults( uint32_t firstQuery,
uint32_t queryCount,
size_t dataSize,
VULKAN_HPP_NAMESPACE::DeviceSize stride,
VULKAN_HPP_NAMESPACE::QueryResultFlags flags VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT ) const;
template <typename DataType>
VULKAN_HPP_NODISCARD std::pair<VULKAN_HPP_NAMESPACE::Result, DataType>
getResult( uint32_t firstQuery,
uint32_t queryCount,
VULKAN_HPP_NAMESPACE::DeviceSize stride,
VULKAN_HPP_NAMESPACE::QueryResultFlags flags VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT ) const;
//=== VK_VERSION_1_2 ===
void reset( uint32_t firstQuery, uint32_t queryCount ) const VULKAN_HPP_NOEXCEPT;
//=== VK_EXT_host_query_reset ===
2021-04-28 11:35:14 +00:00
void resetEXT( uint32_t firstQuery, uint32_t queryCount ) const VULKAN_HPP_NOEXCEPT;
private:
VULKAN_HPP_NAMESPACE::Device m_device = {};
VULKAN_HPP_NAMESPACE::QueryPool m_queryPool = {};
const VULKAN_HPP_NAMESPACE::AllocationCallbacks * m_allocator = {};
VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::DeviceDispatcher const * m_dispatcher = nullptr;
};
class Queue
{
public:
using CType = VkQueue;
using CppType = VULKAN_HPP_NAMESPACE::Queue;
static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = VULKAN_HPP_NAMESPACE::ObjectType::eQueue;
static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType =
VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eQueue;
public:
# if !defined( VULKAN_HPP_RAII_NO_EXCEPTIONS )
Queue( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device, uint32_t queueFamilyIndex, uint32_t queueIndex )
{
*this = device.getQueue( queueFamilyIndex, queueIndex );
}
# endif
# if !defined( VULKAN_HPP_RAII_NO_EXCEPTIONS )
Queue( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device, VULKAN_HPP_NAMESPACE::DeviceQueueInfo2 const & queueInfo )
{
*this = device.getQueue2( queueInfo );
}
# endif
Queue( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device, VkQueue queue ) : m_queue( queue ), m_dispatcher( device.getDispatcher() )
2022-03-09 10:20:05 +00:00
{
}
Queue( std::nullptr_t ) {}
~Queue()
{
clear();
}
Queue() = delete;
Queue( Queue const & rhs ) : m_queue( rhs.m_queue ), m_dispatcher( rhs.m_dispatcher ) {}
Queue( Queue && rhs ) VULKAN_HPP_NOEXCEPT
: m_queue( VULKAN_HPP_NAMESPACE::exchange( rhs.m_queue, {} ) )
, m_dispatcher( VULKAN_HPP_NAMESPACE::exchange( rhs.m_dispatcher, nullptr ) )
2022-03-09 10:20:05 +00:00
{
}
Queue & operator=( Queue const & rhs )
{
m_queue = rhs.m_queue;
m_dispatcher = rhs.m_dispatcher;
return *this;
}
Queue & operator=( Queue && rhs ) VULKAN_HPP_NOEXCEPT
{
if ( this != &rhs )
{
std::swap( m_queue, rhs.m_queue );
std::swap( m_dispatcher, rhs.m_dispatcher );
}
return *this;
}
VULKAN_HPP_NAMESPACE::Queue const & operator*() const VULKAN_HPP_NOEXCEPT
{
return m_queue;
}
operator VULKAN_HPP_NAMESPACE::Queue() const VULKAN_HPP_NOEXCEPT
{
return m_queue;
}
void clear() VULKAN_HPP_NOEXCEPT
{
m_queue = nullptr;
m_dispatcher = nullptr;
}
VULKAN_HPP_NAMESPACE::Queue release()
{
m_dispatcher = nullptr;
return VULKAN_HPP_NAMESPACE::exchange( m_queue, nullptr );
}
VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::DeviceDispatcher const * getDispatcher() const
{
VULKAN_HPP_ASSERT( m_dispatcher->getVkHeaderVersion() == VK_HEADER_VERSION );
return m_dispatcher;
}
void swap( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Queue & rhs ) VULKAN_HPP_NOEXCEPT
{
std::swap( m_queue, rhs.m_queue );
std::swap( m_dispatcher, rhs.m_dispatcher );
}
//=== VK_VERSION_1_0 ===
void submit( VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::SubmitInfo> const & submits,
VULKAN_HPP_NAMESPACE::Fence fence VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT ) const;
void waitIdle() const;
void bindSparse( VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::BindSparseInfo> const & bindInfo,
VULKAN_HPP_NAMESPACE::Fence fence VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT ) const;
2022-01-26 00:42:08 +00:00
//=== VK_VERSION_1_3 ===
void submit2( VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::SubmitInfo2> const & submits,
VULKAN_HPP_NAMESPACE::Fence fence VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT ) const;
2022-01-26 00:42:08 +00:00
//=== VK_KHR_swapchain ===
VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::Result presentKHR( const VULKAN_HPP_NAMESPACE::PresentInfoKHR & presentInfo ) const;
//=== VK_EXT_debug_utils ===
void beginDebugUtilsLabelEXT( const VULKAN_HPP_NAMESPACE::DebugUtilsLabelEXT & labelInfo ) const VULKAN_HPP_NOEXCEPT;
void endDebugUtilsLabelEXT() const VULKAN_HPP_NOEXCEPT;
void insertDebugUtilsLabelEXT( const VULKAN_HPP_NAMESPACE::DebugUtilsLabelEXT & labelInfo ) const VULKAN_HPP_NOEXCEPT;
//=== VK_NV_device_diagnostic_checkpoints ===
VULKAN_HPP_NODISCARD std::vector<VULKAN_HPP_NAMESPACE::CheckpointDataNV> getCheckpointDataNV() const;
VULKAN_HPP_NODISCARD std::vector<VULKAN_HPP_NAMESPACE::CheckpointData2NV> getCheckpointData2NV() const;
//=== VK_INTEL_performance_query ===
void setPerformanceConfigurationINTEL( VULKAN_HPP_NAMESPACE::PerformanceConfigurationINTEL configuration ) const;
//=== VK_KHR_synchronization2 ===
void submit2KHR( VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::SubmitInfo2> const & submits,
VULKAN_HPP_NAMESPACE::Fence fence VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT ) const;
//=== VK_NV_low_latency2 ===
void notifyOutOfBandNV( const VULKAN_HPP_NAMESPACE::OutOfBandQueueTypeInfoNV & queueTypeInfo ) const VULKAN_HPP_NOEXCEPT;
private:
VULKAN_HPP_NAMESPACE::Queue m_queue = {};
VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::DeviceDispatcher const * m_dispatcher = nullptr;
};
class RenderPass
{
public:
using CType = VkRenderPass;
using CppType = VULKAN_HPP_NAMESPACE::RenderPass;
static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = VULKAN_HPP_NAMESPACE::ObjectType::eRenderPass;
static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType =
VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eRenderPass;
public:
# if !defined( VULKAN_HPP_RAII_NO_EXCEPTIONS )
RenderPass( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device,
VULKAN_HPP_NAMESPACE::RenderPassCreateInfo const & createInfo,
VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr )
{
*this = device.createRenderPass( createInfo, allocator );
}
# endif
# if !defined( VULKAN_HPP_RAII_NO_EXCEPTIONS )
RenderPass( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device,
VULKAN_HPP_NAMESPACE::RenderPassCreateInfo2 const & createInfo,
VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr )
{
*this = device.createRenderPass2( createInfo, allocator );
}
# endif
RenderPass( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device,
VkRenderPass renderPass,
VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr )
: m_device( device )
, m_renderPass( renderPass )
, m_allocator( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) )
, m_dispatcher( device.getDispatcher() )
2022-03-09 10:20:05 +00:00
{
}
RenderPass( std::nullptr_t ) {}
~RenderPass()
{
clear();
}
RenderPass() = delete;
RenderPass( RenderPass const & ) = delete;
RenderPass( RenderPass && rhs ) VULKAN_HPP_NOEXCEPT
: m_device( VULKAN_HPP_NAMESPACE::exchange( rhs.m_device, {} ) )
, m_renderPass( VULKAN_HPP_NAMESPACE::exchange( rhs.m_renderPass, {} ) )
, m_allocator( VULKAN_HPP_NAMESPACE::exchange( rhs.m_allocator, {} ) )
, m_dispatcher( VULKAN_HPP_NAMESPACE::exchange( rhs.m_dispatcher, nullptr ) )
2022-03-09 10:20:05 +00:00
{
}
RenderPass & operator=( RenderPass const & ) = delete;
2022-12-06 00:11:19 +00:00
RenderPass & operator=( RenderPass && rhs ) VULKAN_HPP_NOEXCEPT
{
if ( this != &rhs )
{
std::swap( m_device, rhs.m_device );
std::swap( m_renderPass, rhs.m_renderPass );
std::swap( m_allocator, rhs.m_allocator );
std::swap( m_dispatcher, rhs.m_dispatcher );
}
return *this;
}
VULKAN_HPP_NAMESPACE::RenderPass const & operator*() const VULKAN_HPP_NOEXCEPT
{
return m_renderPass;
}
operator VULKAN_HPP_NAMESPACE::RenderPass() const VULKAN_HPP_NOEXCEPT
{
return m_renderPass;
}
void clear() VULKAN_HPP_NOEXCEPT
{
if ( m_renderPass )
{
getDispatcher()->vkDestroyRenderPass(
static_cast<VkDevice>( m_device ), static_cast<VkRenderPass>( m_renderPass ), reinterpret_cast<const VkAllocationCallbacks *>( m_allocator ) );
}
m_device = nullptr;
m_renderPass = nullptr;
m_allocator = nullptr;
m_dispatcher = nullptr;
}
VULKAN_HPP_NAMESPACE::RenderPass release()
{
m_device = nullptr;
m_allocator = nullptr;
m_dispatcher = nullptr;
return VULKAN_HPP_NAMESPACE::exchange( m_renderPass, nullptr );
}
VULKAN_HPP_NAMESPACE::Device getDevice() const
{
return m_device;
}
VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::DeviceDispatcher const * getDispatcher() const
{
VULKAN_HPP_ASSERT( m_dispatcher->getVkHeaderVersion() == VK_HEADER_VERSION );
return m_dispatcher;
}
void swap( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::RenderPass & rhs ) VULKAN_HPP_NOEXCEPT
{
std::swap( m_device, rhs.m_device );
std::swap( m_renderPass, rhs.m_renderPass );
std::swap( m_allocator, rhs.m_allocator );
std::swap( m_dispatcher, rhs.m_dispatcher );
}
//=== VK_VERSION_1_0 ===
VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::Extent2D getRenderAreaGranularity() const VULKAN_HPP_NOEXCEPT;
//=== VK_HUAWEI_subpass_shading ===
VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::Extent2D getSubpassShadingMaxWorkgroupSizeHUAWEI() const;
private:
VULKAN_HPP_NAMESPACE::Device m_device = {};
VULKAN_HPP_NAMESPACE::RenderPass m_renderPass = {};
const VULKAN_HPP_NAMESPACE::AllocationCallbacks * m_allocator = {};
VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::DeviceDispatcher const * m_dispatcher = nullptr;
};
class Sampler
{
public:
using CType = VkSampler;
using CppType = VULKAN_HPP_NAMESPACE::Sampler;
static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = VULKAN_HPP_NAMESPACE::ObjectType::eSampler;
static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType =
VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eSampler;
public:
# if !defined( VULKAN_HPP_RAII_NO_EXCEPTIONS )
Sampler( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device,
VULKAN_HPP_NAMESPACE::SamplerCreateInfo const & createInfo,
VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr )
{
*this = device.createSampler( createInfo, allocator );
}
# endif
Sampler( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device,
VkSampler sampler,
VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr )
: m_device( device )
, m_sampler( sampler )
, m_allocator( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) )
, m_dispatcher( device.getDispatcher() )
2022-03-09 10:20:05 +00:00
{
}
Sampler( std::nullptr_t ) {}
~Sampler()
{
clear();
}
Sampler() = delete;
Sampler( Sampler const & ) = delete;
Sampler( Sampler && rhs ) VULKAN_HPP_NOEXCEPT
: m_device( VULKAN_HPP_NAMESPACE::exchange( rhs.m_device, {} ) )
, m_sampler( VULKAN_HPP_NAMESPACE::exchange( rhs.m_sampler, {} ) )
, m_allocator( VULKAN_HPP_NAMESPACE::exchange( rhs.m_allocator, {} ) )
, m_dispatcher( VULKAN_HPP_NAMESPACE::exchange( rhs.m_dispatcher, nullptr ) )
2022-03-09 10:20:05 +00:00
{
}
Sampler & operator=( Sampler const & ) = delete;
2022-12-06 00:11:19 +00:00
Sampler & operator=( Sampler && rhs ) VULKAN_HPP_NOEXCEPT
{
if ( this != &rhs )
{
std::swap( m_device, rhs.m_device );
std::swap( m_sampler, rhs.m_sampler );
std::swap( m_allocator, rhs.m_allocator );
std::swap( m_dispatcher, rhs.m_dispatcher );
}
return *this;
}
VULKAN_HPP_NAMESPACE::Sampler const & operator*() const VULKAN_HPP_NOEXCEPT
{
return m_sampler;
}
operator VULKAN_HPP_NAMESPACE::Sampler() const VULKAN_HPP_NOEXCEPT
{
return m_sampler;
}
void clear() VULKAN_HPP_NOEXCEPT
{
if ( m_sampler )
{
getDispatcher()->vkDestroySampler(
static_cast<VkDevice>( m_device ), static_cast<VkSampler>( m_sampler ), reinterpret_cast<const VkAllocationCallbacks *>( m_allocator ) );
}
m_device = nullptr;
m_sampler = nullptr;
m_allocator = nullptr;
m_dispatcher = nullptr;
}
VULKAN_HPP_NAMESPACE::Sampler release()
{
m_device = nullptr;
m_allocator = nullptr;
m_dispatcher = nullptr;
return VULKAN_HPP_NAMESPACE::exchange( m_sampler, nullptr );
}
VULKAN_HPP_NAMESPACE::Device getDevice() const
{
return m_device;
}
VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::DeviceDispatcher const * getDispatcher() const
{
VULKAN_HPP_ASSERT( m_dispatcher->getVkHeaderVersion() == VK_HEADER_VERSION );
return m_dispatcher;
}
void swap( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Sampler & rhs ) VULKAN_HPP_NOEXCEPT
{
std::swap( m_device, rhs.m_device );
std::swap( m_sampler, rhs.m_sampler );
std::swap( m_allocator, rhs.m_allocator );
std::swap( m_dispatcher, rhs.m_dispatcher );
}
private:
VULKAN_HPP_NAMESPACE::Device m_device = {};
VULKAN_HPP_NAMESPACE::Sampler m_sampler = {};
const VULKAN_HPP_NAMESPACE::AllocationCallbacks * m_allocator = {};
VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::DeviceDispatcher const * m_dispatcher = nullptr;
};
class SamplerYcbcrConversion
{
public:
using CType = VkSamplerYcbcrConversion;
using CppType = VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion;
static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = VULKAN_HPP_NAMESPACE::ObjectType::eSamplerYcbcrConversion;
static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType =
VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eSamplerYcbcrConversion;
public:
# if !defined( VULKAN_HPP_RAII_NO_EXCEPTIONS )
SamplerYcbcrConversion( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device,
VULKAN_HPP_NAMESPACE::SamplerYcbcrConversionCreateInfo const & createInfo,
VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr )
{
*this = device.createSamplerYcbcrConversion( createInfo, allocator );
}
# endif
SamplerYcbcrConversion( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device,
VkSamplerYcbcrConversion ycbcrConversion,
VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr )
: m_device( device )
, m_ycbcrConversion( ycbcrConversion )
, m_allocator( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) )
, m_dispatcher( device.getDispatcher() )
2022-03-09 10:20:05 +00:00
{
}
SamplerYcbcrConversion( std::nullptr_t ) {}
~SamplerYcbcrConversion()
{
clear();
}
SamplerYcbcrConversion() = delete;
SamplerYcbcrConversion( SamplerYcbcrConversion const & ) = delete;
SamplerYcbcrConversion( SamplerYcbcrConversion && rhs ) VULKAN_HPP_NOEXCEPT
: m_device( VULKAN_HPP_NAMESPACE::exchange( rhs.m_device, {} ) )
, m_ycbcrConversion( VULKAN_HPP_NAMESPACE::exchange( rhs.m_ycbcrConversion, {} ) )
, m_allocator( VULKAN_HPP_NAMESPACE::exchange( rhs.m_allocator, {} ) )
, m_dispatcher( VULKAN_HPP_NAMESPACE::exchange( rhs.m_dispatcher, nullptr ) )
2022-03-09 10:20:05 +00:00
{
}
SamplerYcbcrConversion & operator=( SamplerYcbcrConversion const & ) = delete;
2022-12-06 00:11:19 +00:00
SamplerYcbcrConversion & operator=( SamplerYcbcrConversion && rhs ) VULKAN_HPP_NOEXCEPT
{
if ( this != &rhs )
{
std::swap( m_device, rhs.m_device );
std::swap( m_ycbcrConversion, rhs.m_ycbcrConversion );
std::swap( m_allocator, rhs.m_allocator );
std::swap( m_dispatcher, rhs.m_dispatcher );
}
return *this;
}
VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion const & operator*() const VULKAN_HPP_NOEXCEPT
{
return m_ycbcrConversion;
}
operator VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion() const VULKAN_HPP_NOEXCEPT
{
return m_ycbcrConversion;
}
void clear() VULKAN_HPP_NOEXCEPT
{
if ( m_ycbcrConversion )
{
getDispatcher()->vkDestroySamplerYcbcrConversion( static_cast<VkDevice>( m_device ),
static_cast<VkSamplerYcbcrConversion>( m_ycbcrConversion ),
reinterpret_cast<const VkAllocationCallbacks *>( m_allocator ) );
}
m_device = nullptr;
m_ycbcrConversion = nullptr;
m_allocator = nullptr;
m_dispatcher = nullptr;
}
VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion release()
{
m_device = nullptr;
m_allocator = nullptr;
m_dispatcher = nullptr;
return VULKAN_HPP_NAMESPACE::exchange( m_ycbcrConversion, nullptr );
}
VULKAN_HPP_NAMESPACE::Device getDevice() const
{
return m_device;
}
VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::DeviceDispatcher const * getDispatcher() const
{
VULKAN_HPP_ASSERT( m_dispatcher->getVkHeaderVersion() == VK_HEADER_VERSION );
return m_dispatcher;
}
void swap( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::SamplerYcbcrConversion & rhs ) VULKAN_HPP_NOEXCEPT
{
std::swap( m_device, rhs.m_device );
std::swap( m_ycbcrConversion, rhs.m_ycbcrConversion );
std::swap( m_allocator, rhs.m_allocator );
std::swap( m_dispatcher, rhs.m_dispatcher );
}
private:
VULKAN_HPP_NAMESPACE::Device m_device = {};
VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion m_ycbcrConversion = {};
const VULKAN_HPP_NAMESPACE::AllocationCallbacks * m_allocator = {};
VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::DeviceDispatcher const * m_dispatcher = nullptr;
};
class Semaphore
{
public:
using CType = VkSemaphore;
using CppType = VULKAN_HPP_NAMESPACE::Semaphore;
static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = VULKAN_HPP_NAMESPACE::ObjectType::eSemaphore;
static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType =
VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eSemaphore;
public:
# if !defined( VULKAN_HPP_RAII_NO_EXCEPTIONS )
Semaphore( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device,
VULKAN_HPP_NAMESPACE::SemaphoreCreateInfo const & createInfo,
VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr )
{
*this = device.createSemaphore( createInfo, allocator );
}
# endif
Semaphore( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device,
VkSemaphore semaphore,
VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr )
: m_device( device )
, m_semaphore( semaphore )
, m_allocator( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) )
, m_dispatcher( device.getDispatcher() )
2022-03-09 10:20:05 +00:00
{
}
Semaphore( std::nullptr_t ) {}
~Semaphore()
{
clear();
}
Semaphore() = delete;
Semaphore( Semaphore const & ) = delete;
Semaphore( Semaphore && rhs ) VULKAN_HPP_NOEXCEPT
: m_device( VULKAN_HPP_NAMESPACE::exchange( rhs.m_device, {} ) )
, m_semaphore( VULKAN_HPP_NAMESPACE::exchange( rhs.m_semaphore, {} ) )
, m_allocator( VULKAN_HPP_NAMESPACE::exchange( rhs.m_allocator, {} ) )
, m_dispatcher( VULKAN_HPP_NAMESPACE::exchange( rhs.m_dispatcher, nullptr ) )
2022-03-09 10:20:05 +00:00
{
}
Semaphore & operator=( Semaphore const & ) = delete;
2022-12-06 00:11:19 +00:00
Semaphore & operator=( Semaphore && rhs ) VULKAN_HPP_NOEXCEPT
{
if ( this != &rhs )
{
std::swap( m_device, rhs.m_device );
std::swap( m_semaphore, rhs.m_semaphore );
std::swap( m_allocator, rhs.m_allocator );
std::swap( m_dispatcher, rhs.m_dispatcher );
}
return *this;
}
VULKAN_HPP_NAMESPACE::Semaphore const & operator*() const VULKAN_HPP_NOEXCEPT
{
return m_semaphore;
}
operator VULKAN_HPP_NAMESPACE::Semaphore() const VULKAN_HPP_NOEXCEPT
{
return m_semaphore;
}
void clear() VULKAN_HPP_NOEXCEPT
{
if ( m_semaphore )
{
getDispatcher()->vkDestroySemaphore(
static_cast<VkDevice>( m_device ), static_cast<VkSemaphore>( m_semaphore ), reinterpret_cast<const VkAllocationCallbacks *>( m_allocator ) );
}
m_device = nullptr;
m_semaphore = nullptr;
m_allocator = nullptr;
m_dispatcher = nullptr;
}
VULKAN_HPP_NAMESPACE::Semaphore release()
{
m_device = nullptr;
m_allocator = nullptr;
m_dispatcher = nullptr;
return VULKAN_HPP_NAMESPACE::exchange( m_semaphore, nullptr );
}
VULKAN_HPP_NAMESPACE::Device getDevice() const
{
return m_device;
}
VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::DeviceDispatcher const * getDispatcher() const
{
VULKAN_HPP_ASSERT( m_dispatcher->getVkHeaderVersion() == VK_HEADER_VERSION );
return m_dispatcher;
}
void swap( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Semaphore & rhs ) VULKAN_HPP_NOEXCEPT
{
std::swap( m_device, rhs.m_device );
std::swap( m_semaphore, rhs.m_semaphore );
std::swap( m_allocator, rhs.m_allocator );
std::swap( m_dispatcher, rhs.m_dispatcher );
}
//=== VK_VERSION_1_2 ===
VULKAN_HPP_NODISCARD uint64_t getCounterValue() const;
//=== VK_KHR_timeline_semaphore ===
VULKAN_HPP_NODISCARD uint64_t getCounterValueKHR() const;
private:
VULKAN_HPP_NAMESPACE::Device m_device = {};
VULKAN_HPP_NAMESPACE::Semaphore m_semaphore = {};
const VULKAN_HPP_NAMESPACE::AllocationCallbacks * m_allocator = {};
VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::DeviceDispatcher const * m_dispatcher = nullptr;
};
class ShaderEXT
{
public:
using CType = VkShaderEXT;
using CppType = VULKAN_HPP_NAMESPACE::ShaderEXT;
static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = VULKAN_HPP_NAMESPACE::ObjectType::eShaderEXT;
static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType =
VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eUnknown;
public:
# if !defined( VULKAN_HPP_RAII_NO_EXCEPTIONS )
ShaderEXT( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device,
VULKAN_HPP_NAMESPACE::ShaderCreateInfoEXT const & createInfo,
VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr )
{
*this = device.createShaderEXT( createInfo, allocator );
}
# endif
ShaderEXT( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device,
VkShaderEXT shader,
VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr,
VULKAN_HPP_NAMESPACE::Result successCode = VULKAN_HPP_NAMESPACE::Result::eSuccess )
: m_device( device )
, m_shader( shader )
, m_allocator( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) )
, m_constructorSuccessCode( successCode )
, m_dispatcher( device.getDispatcher() )
{
}
ShaderEXT( std::nullptr_t ) {}
~ShaderEXT()
{
clear();
}
ShaderEXT() = delete;
ShaderEXT( ShaderEXT const & ) = delete;
ShaderEXT( ShaderEXT && rhs ) VULKAN_HPP_NOEXCEPT
: m_device( VULKAN_HPP_NAMESPACE::exchange( rhs.m_device, {} ) )
, m_shader( VULKAN_HPP_NAMESPACE::exchange( rhs.m_shader, {} ) )
, m_allocator( VULKAN_HPP_NAMESPACE::exchange( rhs.m_allocator, {} ) )
, m_constructorSuccessCode( VULKAN_HPP_NAMESPACE::exchange( rhs.m_constructorSuccessCode, {} ) )
, m_dispatcher( VULKAN_HPP_NAMESPACE::exchange( rhs.m_dispatcher, nullptr ) )
{
}
ShaderEXT & operator=( ShaderEXT const & ) = delete;
ShaderEXT & operator=( ShaderEXT && rhs ) VULKAN_HPP_NOEXCEPT
{
if ( this != &rhs )
{
std::swap( m_device, rhs.m_device );
std::swap( m_shader, rhs.m_shader );
std::swap( m_allocator, rhs.m_allocator );
std::swap( m_constructorSuccessCode, rhs.m_constructorSuccessCode );
std::swap( m_dispatcher, rhs.m_dispatcher );
}
return *this;
}
VULKAN_HPP_NAMESPACE::ShaderEXT const & operator*() const VULKAN_HPP_NOEXCEPT
{
return m_shader;
}
operator VULKAN_HPP_NAMESPACE::ShaderEXT() const VULKAN_HPP_NOEXCEPT
{
return m_shader;
}
void clear() VULKAN_HPP_NOEXCEPT
{
if ( m_shader )
{
getDispatcher()->vkDestroyShaderEXT(
static_cast<VkDevice>( m_device ), static_cast<VkShaderEXT>( m_shader ), reinterpret_cast<const VkAllocationCallbacks *>( m_allocator ) );
}
m_device = nullptr;
m_shader = nullptr;
m_allocator = nullptr;
m_constructorSuccessCode = VULKAN_HPP_NAMESPACE::Result::eErrorUnknown;
m_dispatcher = nullptr;
}
VULKAN_HPP_NAMESPACE::ShaderEXT release()
{
m_device = nullptr;
m_allocator = nullptr;
m_constructorSuccessCode = VULKAN_HPP_NAMESPACE::Result::eErrorUnknown;
m_dispatcher = nullptr;
return VULKAN_HPP_NAMESPACE::exchange( m_shader, nullptr );
}
VULKAN_HPP_NAMESPACE::Result getConstructorSuccessCode() const
{
return m_constructorSuccessCode;
}
VULKAN_HPP_NAMESPACE::Device getDevice() const
{
return m_device;
}
VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::DeviceDispatcher const * getDispatcher() const
{
VULKAN_HPP_ASSERT( m_dispatcher->getVkHeaderVersion() == VK_HEADER_VERSION );
return m_dispatcher;
}
void swap( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::ShaderEXT & rhs ) VULKAN_HPP_NOEXCEPT
{
std::swap( m_device, rhs.m_device );
std::swap( m_shader, rhs.m_shader );
std::swap( m_allocator, rhs.m_allocator );
std::swap( m_constructorSuccessCode, rhs.m_constructorSuccessCode );
std::swap( m_dispatcher, rhs.m_dispatcher );
}
//=== VK_EXT_shader_object ===
VULKAN_HPP_NODISCARD std::vector<uint8_t> getBinaryData() const;
private:
VULKAN_HPP_NAMESPACE::Device m_device = {};
VULKAN_HPP_NAMESPACE::ShaderEXT m_shader = {};
const VULKAN_HPP_NAMESPACE::AllocationCallbacks * m_allocator = {};
VULKAN_HPP_NAMESPACE::Result m_constructorSuccessCode = VULKAN_HPP_NAMESPACE::Result::eErrorUnknown;
VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::DeviceDispatcher const * m_dispatcher = nullptr;
};
class ShaderEXTs : public std::vector<VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::ShaderEXT>
{
public:
# if !defined( VULKAN_HPP_RAII_NO_EXCEPTIONS )
ShaderEXTs( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device,
VULKAN_HPP_NAMESPACE::ArrayProxy<VULKAN_HPP_NAMESPACE::ShaderCreateInfoEXT> const & createInfos,
VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr )
{
*this = device.createShadersEXT( createInfos, allocator );
}
# endif
ShaderEXTs( std::nullptr_t ) {}
ShaderEXTs() = delete;
ShaderEXTs( ShaderEXTs const & ) = delete;
ShaderEXTs( ShaderEXTs && rhs ) = default;
ShaderEXTs & operator=( ShaderEXTs const & ) = delete;
ShaderEXTs & operator=( ShaderEXTs && rhs ) = default;
private:
ShaderEXTs( std::vector<VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::ShaderEXT> && rhs )
{
std::swap( *this, rhs );
}
};
class ShaderModule
{
public:
using CType = VkShaderModule;
using CppType = VULKAN_HPP_NAMESPACE::ShaderModule;
static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = VULKAN_HPP_NAMESPACE::ObjectType::eShaderModule;
static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType =
VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eShaderModule;
public:
# if !defined( VULKAN_HPP_RAII_NO_EXCEPTIONS )
ShaderModule( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device,
VULKAN_HPP_NAMESPACE::ShaderModuleCreateInfo const & createInfo,
VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr )
{
*this = device.createShaderModule( createInfo, allocator );
}
# endif
ShaderModule( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device,
VkShaderModule shaderModule,
VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr )
: m_device( device )
, m_shaderModule( shaderModule )
, m_allocator( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) )
, m_dispatcher( device.getDispatcher() )
2022-03-09 10:20:05 +00:00
{
}
ShaderModule( std::nullptr_t ) {}
~ShaderModule()
{
clear();
}
ShaderModule() = delete;
ShaderModule( ShaderModule const & ) = delete;
ShaderModule( ShaderModule && rhs ) VULKAN_HPP_NOEXCEPT
: m_device( VULKAN_HPP_NAMESPACE::exchange( rhs.m_device, {} ) )
, m_shaderModule( VULKAN_HPP_NAMESPACE::exchange( rhs.m_shaderModule, {} ) )
, m_allocator( VULKAN_HPP_NAMESPACE::exchange( rhs.m_allocator, {} ) )
, m_dispatcher( VULKAN_HPP_NAMESPACE::exchange( rhs.m_dispatcher, nullptr ) )
2022-03-09 10:20:05 +00:00
{
}
ShaderModule & operator=( ShaderModule const & ) = delete;
2022-12-06 00:11:19 +00:00
ShaderModule & operator=( ShaderModule && rhs ) VULKAN_HPP_NOEXCEPT
{
if ( this != &rhs )
{
std::swap( m_device, rhs.m_device );
std::swap( m_shaderModule, rhs.m_shaderModule );
std::swap( m_allocator, rhs.m_allocator );
std::swap( m_dispatcher, rhs.m_dispatcher );
}
return *this;
}
VULKAN_HPP_NAMESPACE::ShaderModule const & operator*() const VULKAN_HPP_NOEXCEPT
{
return m_shaderModule;
}
operator VULKAN_HPP_NAMESPACE::ShaderModule() const VULKAN_HPP_NOEXCEPT
{
return m_shaderModule;
}
void clear() VULKAN_HPP_NOEXCEPT
{
if ( m_shaderModule )
{
getDispatcher()->vkDestroyShaderModule(
static_cast<VkDevice>( m_device ), static_cast<VkShaderModule>( m_shaderModule ), reinterpret_cast<const VkAllocationCallbacks *>( m_allocator ) );
}
m_device = nullptr;
m_shaderModule = nullptr;
m_allocator = nullptr;
m_dispatcher = nullptr;
}
VULKAN_HPP_NAMESPACE::ShaderModule release()
{
m_device = nullptr;
m_allocator = nullptr;
m_dispatcher = nullptr;
return VULKAN_HPP_NAMESPACE::exchange( m_shaderModule, nullptr );
}
VULKAN_HPP_NAMESPACE::Device getDevice() const
{
return m_device;
}
VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::DeviceDispatcher const * getDispatcher() const
{
VULKAN_HPP_ASSERT( m_dispatcher->getVkHeaderVersion() == VK_HEADER_VERSION );
return m_dispatcher;
}
void swap( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::ShaderModule & rhs ) VULKAN_HPP_NOEXCEPT
{
std::swap( m_device, rhs.m_device );
std::swap( m_shaderModule, rhs.m_shaderModule );
std::swap( m_allocator, rhs.m_allocator );
std::swap( m_dispatcher, rhs.m_dispatcher );
}
2022-07-01 00:11:04 +00:00
//=== VK_EXT_shader_module_identifier ===
VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::ShaderModuleIdentifierEXT getIdentifierEXT() const VULKAN_HPP_NOEXCEPT;
private:
VULKAN_HPP_NAMESPACE::Device m_device = {};
VULKAN_HPP_NAMESPACE::ShaderModule m_shaderModule = {};
const VULKAN_HPP_NAMESPACE::AllocationCallbacks * m_allocator = {};
VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::DeviceDispatcher const * m_dispatcher = nullptr;
};
class SurfaceKHR
{
public:
using CType = VkSurfaceKHR;
using CppType = VULKAN_HPP_NAMESPACE::SurfaceKHR;
static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = VULKAN_HPP_NAMESPACE::ObjectType::eSurfaceKHR;
static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType =
VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eSurfaceKHR;
public:
# if !defined( VULKAN_HPP_RAII_NO_EXCEPTIONS )
# if defined( VK_USE_PLATFORM_ANDROID_KHR )
SurfaceKHR( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Instance const & instance,
VULKAN_HPP_NAMESPACE::AndroidSurfaceCreateInfoKHR const & createInfo,
VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr )
{
*this = instance.createAndroidSurfaceKHR( createInfo, allocator );
}
# endif /*VK_USE_PLATFORM_ANDROID_KHR*/
# endif
# if !defined( VULKAN_HPP_RAII_NO_EXCEPTIONS )
# if defined( VK_USE_PLATFORM_DIRECTFB_EXT )
SurfaceKHR( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Instance const & instance,
VULKAN_HPP_NAMESPACE::DirectFBSurfaceCreateInfoEXT const & createInfo,
VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr )
{
*this = instance.createDirectFBSurfaceEXT( createInfo, allocator );
}
# endif /*VK_USE_PLATFORM_DIRECTFB_EXT*/
# endif
# if !defined( VULKAN_HPP_RAII_NO_EXCEPTIONS )
SurfaceKHR( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Instance const & instance,
VULKAN_HPP_NAMESPACE::DisplaySurfaceCreateInfoKHR const & createInfo,
VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr )
{
*this = instance.createDisplayPlaneSurfaceKHR( createInfo, allocator );
}
# endif
# if !defined( VULKAN_HPP_RAII_NO_EXCEPTIONS )
SurfaceKHR( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Instance const & instance,
VULKAN_HPP_NAMESPACE::HeadlessSurfaceCreateInfoEXT const & createInfo,
VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr )
{
*this = instance.createHeadlessSurfaceEXT( createInfo, allocator );
}
# endif
# if !defined( VULKAN_HPP_RAII_NO_EXCEPTIONS )
# if defined( VK_USE_PLATFORM_IOS_MVK )
SurfaceKHR( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Instance const & instance,
VULKAN_HPP_NAMESPACE::IOSSurfaceCreateInfoMVK const & createInfo,
VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr )
{
*this = instance.createIOSSurfaceMVK( createInfo, allocator );
}
# endif /*VK_USE_PLATFORM_IOS_MVK*/
# endif
# if !defined( VULKAN_HPP_RAII_NO_EXCEPTIONS )
# if defined( VK_USE_PLATFORM_FUCHSIA )
SurfaceKHR( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Instance const & instance,
VULKAN_HPP_NAMESPACE::ImagePipeSurfaceCreateInfoFUCHSIA const & createInfo,
VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr )
{
*this = instance.createImagePipeSurfaceFUCHSIA( createInfo, allocator );
}
# endif /*VK_USE_PLATFORM_FUCHSIA*/
# endif
# if !defined( VULKAN_HPP_RAII_NO_EXCEPTIONS )
# if defined( VK_USE_PLATFORM_MACOS_MVK )
SurfaceKHR( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Instance const & instance,
VULKAN_HPP_NAMESPACE::MacOSSurfaceCreateInfoMVK const & createInfo,
VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr )
{
*this = instance.createMacOSSurfaceMVK( createInfo, allocator );
}
# endif /*VK_USE_PLATFORM_MACOS_MVK*/
# endif
# if !defined( VULKAN_HPP_RAII_NO_EXCEPTIONS )
# if defined( VK_USE_PLATFORM_METAL_EXT )
SurfaceKHR( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Instance const & instance,
VULKAN_HPP_NAMESPACE::MetalSurfaceCreateInfoEXT const & createInfo,
VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr )
{
*this = instance.createMetalSurfaceEXT( createInfo, allocator );
}
# endif /*VK_USE_PLATFORM_METAL_EXT*/
# endif
# if !defined( VULKAN_HPP_RAII_NO_EXCEPTIONS )
# if defined( VK_USE_PLATFORM_SCREEN_QNX )
2021-03-02 08:06:31 +00:00
SurfaceKHR( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Instance const & instance,
VULKAN_HPP_NAMESPACE::ScreenSurfaceCreateInfoQNX const & createInfo,
VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr )
{
*this = instance.createScreenSurfaceQNX( createInfo, allocator );
2021-03-02 08:06:31 +00:00
}
# endif /*VK_USE_PLATFORM_SCREEN_QNX*/
# endif
2021-03-02 08:06:31 +00:00
# if !defined( VULKAN_HPP_RAII_NO_EXCEPTIONS )
# if defined( VK_USE_PLATFORM_GGP )
SurfaceKHR( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Instance const & instance,
VULKAN_HPP_NAMESPACE::StreamDescriptorSurfaceCreateInfoGGP const & createInfo,
VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr )
{
*this = instance.createStreamDescriptorSurfaceGGP( createInfo, allocator );
}
# endif /*VK_USE_PLATFORM_GGP*/
# endif
# if !defined( VULKAN_HPP_RAII_NO_EXCEPTIONS )
# if defined( VK_USE_PLATFORM_VI_NN )
SurfaceKHR( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Instance const & instance,
VULKAN_HPP_NAMESPACE::ViSurfaceCreateInfoNN const & createInfo,
VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr )
{
*this = instance.createViSurfaceNN( createInfo, allocator );
}
# endif /*VK_USE_PLATFORM_VI_NN*/
# endif
# if !defined( VULKAN_HPP_RAII_NO_EXCEPTIONS )
# if defined( VK_USE_PLATFORM_WAYLAND_KHR )
SurfaceKHR( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Instance const & instance,
VULKAN_HPP_NAMESPACE::WaylandSurfaceCreateInfoKHR const & createInfo,
VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr )
{
*this = instance.createWaylandSurfaceKHR( createInfo, allocator );
}
# endif /*VK_USE_PLATFORM_WAYLAND_KHR*/
# endif
# if !defined( VULKAN_HPP_RAII_NO_EXCEPTIONS )
# if defined( VK_USE_PLATFORM_WIN32_KHR )
SurfaceKHR( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Instance const & instance,
VULKAN_HPP_NAMESPACE::Win32SurfaceCreateInfoKHR const & createInfo,
VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr )
{
*this = instance.createWin32SurfaceKHR( createInfo, allocator );
}
# endif /*VK_USE_PLATFORM_WIN32_KHR*/
# endif
# if !defined( VULKAN_HPP_RAII_NO_EXCEPTIONS )
# if defined( VK_USE_PLATFORM_XCB_KHR )
SurfaceKHR( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Instance const & instance,
VULKAN_HPP_NAMESPACE::XcbSurfaceCreateInfoKHR const & createInfo,
VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr )
{
*this = instance.createXcbSurfaceKHR( createInfo, allocator );
}
# endif /*VK_USE_PLATFORM_XCB_KHR*/
# endif
# if !defined( VULKAN_HPP_RAII_NO_EXCEPTIONS )
# if defined( VK_USE_PLATFORM_XLIB_KHR )
SurfaceKHR( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Instance const & instance,
VULKAN_HPP_NAMESPACE::XlibSurfaceCreateInfoKHR const & createInfo,
VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr )
{
*this = instance.createXlibSurfaceKHR( createInfo, allocator );
}
# endif /*VK_USE_PLATFORM_XLIB_KHR*/
# endif
SurfaceKHR( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Instance const & instance,
VkSurfaceKHR surface,
VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr )
: m_instance( instance )
, m_surface( surface )
, m_allocator( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) )
, m_dispatcher( instance.getDispatcher() )
2022-03-09 10:20:05 +00:00
{
}
SurfaceKHR( std::nullptr_t ) {}
~SurfaceKHR()
{
clear();
}
SurfaceKHR() = delete;
SurfaceKHR( SurfaceKHR const & ) = delete;
SurfaceKHR( SurfaceKHR && rhs ) VULKAN_HPP_NOEXCEPT
: m_instance( VULKAN_HPP_NAMESPACE::exchange( rhs.m_instance, {} ) )
, m_surface( VULKAN_HPP_NAMESPACE::exchange( rhs.m_surface, {} ) )
, m_allocator( VULKAN_HPP_NAMESPACE::exchange( rhs.m_allocator, {} ) )
, m_dispatcher( VULKAN_HPP_NAMESPACE::exchange( rhs.m_dispatcher, nullptr ) )
2022-03-09 10:20:05 +00:00
{
}
SurfaceKHR & operator=( SurfaceKHR const & ) = delete;
2022-12-06 00:11:19 +00:00
SurfaceKHR & operator=( SurfaceKHR && rhs ) VULKAN_HPP_NOEXCEPT
{
if ( this != &rhs )
{
std::swap( m_instance, rhs.m_instance );
std::swap( m_surface, rhs.m_surface );
std::swap( m_allocator, rhs.m_allocator );
std::swap( m_dispatcher, rhs.m_dispatcher );
}
return *this;
}
VULKAN_HPP_NAMESPACE::SurfaceKHR const & operator*() const VULKAN_HPP_NOEXCEPT
{
return m_surface;
}
operator VULKAN_HPP_NAMESPACE::SurfaceKHR() const VULKAN_HPP_NOEXCEPT
{
return m_surface;
}
void clear() VULKAN_HPP_NOEXCEPT
{
if ( m_surface )
{
getDispatcher()->vkDestroySurfaceKHR(
static_cast<VkInstance>( m_instance ), static_cast<VkSurfaceKHR>( m_surface ), reinterpret_cast<const VkAllocationCallbacks *>( m_allocator ) );
}
m_instance = nullptr;
m_surface = nullptr;
m_allocator = nullptr;
m_dispatcher = nullptr;
}
VULKAN_HPP_NAMESPACE::SurfaceKHR release()
{
m_instance = nullptr;
m_allocator = nullptr;
m_dispatcher = nullptr;
return VULKAN_HPP_NAMESPACE::exchange( m_surface, nullptr );
}
VULKAN_HPP_NAMESPACE::Instance getInstance() const
{
return m_instance;
}
VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::InstanceDispatcher const * getDispatcher() const
{
VULKAN_HPP_ASSERT( m_dispatcher->getVkHeaderVersion() == VK_HEADER_VERSION );
return m_dispatcher;
}
void swap( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::SurfaceKHR & rhs ) VULKAN_HPP_NOEXCEPT
{
std::swap( m_instance, rhs.m_instance );
std::swap( m_surface, rhs.m_surface );
std::swap( m_allocator, rhs.m_allocator );
std::swap( m_dispatcher, rhs.m_dispatcher );
}
private:
VULKAN_HPP_NAMESPACE::Instance m_instance = {};
VULKAN_HPP_NAMESPACE::SurfaceKHR m_surface = {};
const VULKAN_HPP_NAMESPACE::AllocationCallbacks * m_allocator = {};
VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::InstanceDispatcher const * m_dispatcher = nullptr;
};
class SwapchainKHR
{
public:
using CType = VkSwapchainKHR;
using CppType = VULKAN_HPP_NAMESPACE::SwapchainKHR;
static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = VULKAN_HPP_NAMESPACE::ObjectType::eSwapchainKHR;
static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType =
VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eSwapchainKHR;
public:
# if !defined( VULKAN_HPP_RAII_NO_EXCEPTIONS )
SwapchainKHR( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device,
VULKAN_HPP_NAMESPACE::SwapchainCreateInfoKHR const & createInfo,
VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr )
{
*this = device.createSwapchainKHR( createInfo, allocator );
}
# endif
SwapchainKHR( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device,
VkSwapchainKHR swapchain,
VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr )
: m_device( device )
, m_swapchain( swapchain )
, m_allocator( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) )
, m_dispatcher( device.getDispatcher() )
2022-03-09 10:20:05 +00:00
{
}
SwapchainKHR( std::nullptr_t ) {}
~SwapchainKHR()
{
clear();
}
SwapchainKHR() = delete;
SwapchainKHR( SwapchainKHR const & ) = delete;
SwapchainKHR( SwapchainKHR && rhs ) VULKAN_HPP_NOEXCEPT
: m_device( VULKAN_HPP_NAMESPACE::exchange( rhs.m_device, {} ) )
, m_swapchain( VULKAN_HPP_NAMESPACE::exchange( rhs.m_swapchain, {} ) )
, m_allocator( VULKAN_HPP_NAMESPACE::exchange( rhs.m_allocator, {} ) )
, m_dispatcher( VULKAN_HPP_NAMESPACE::exchange( rhs.m_dispatcher, nullptr ) )
2022-03-09 10:20:05 +00:00
{
}
SwapchainKHR & operator=( SwapchainKHR const & ) = delete;
2022-12-06 00:11:19 +00:00
SwapchainKHR & operator=( SwapchainKHR && rhs ) VULKAN_HPP_NOEXCEPT
{
if ( this != &rhs )
{
std::swap( m_device, rhs.m_device );
std::swap( m_swapchain, rhs.m_swapchain );
std::swap( m_allocator, rhs.m_allocator );
std::swap( m_dispatcher, rhs.m_dispatcher );
}
return *this;
}
VULKAN_HPP_NAMESPACE::SwapchainKHR const & operator*() const VULKAN_HPP_NOEXCEPT
{
return m_swapchain;
}
operator VULKAN_HPP_NAMESPACE::SwapchainKHR() const VULKAN_HPP_NOEXCEPT
{
return m_swapchain;
}
void clear() VULKAN_HPP_NOEXCEPT
{
if ( m_swapchain )
{
getDispatcher()->vkDestroySwapchainKHR(
static_cast<VkDevice>( m_device ), static_cast<VkSwapchainKHR>( m_swapchain ), reinterpret_cast<const VkAllocationCallbacks *>( m_allocator ) );
}
m_device = nullptr;
m_swapchain = nullptr;
m_allocator = nullptr;
m_dispatcher = nullptr;
}
VULKAN_HPP_NAMESPACE::SwapchainKHR release()
{
m_device = nullptr;
m_allocator = nullptr;
m_dispatcher = nullptr;
return VULKAN_HPP_NAMESPACE::exchange( m_swapchain, nullptr );
}
VULKAN_HPP_NAMESPACE::Device getDevice() const
{
return m_device;
}
VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::DeviceDispatcher const * getDispatcher() const
{
VULKAN_HPP_ASSERT( m_dispatcher->getVkHeaderVersion() == VK_HEADER_VERSION );
return m_dispatcher;
}
void swap( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::SwapchainKHR & rhs ) VULKAN_HPP_NOEXCEPT
{
std::swap( m_device, rhs.m_device );
std::swap( m_swapchain, rhs.m_swapchain );
std::swap( m_allocator, rhs.m_allocator );
std::swap( m_dispatcher, rhs.m_dispatcher );
}
//=== VK_KHR_swapchain ===
VULKAN_HPP_NODISCARD std::vector<VULKAN_HPP_NAMESPACE::Image> getImages() const;
VULKAN_HPP_NODISCARD std::pair<VULKAN_HPP_NAMESPACE::Result, uint32_t>
acquireNextImage( uint64_t timeout,
VULKAN_HPP_NAMESPACE::Semaphore semaphore VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT,
VULKAN_HPP_NAMESPACE::Fence fence VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT ) const;
//=== VK_EXT_display_control ===
VULKAN_HPP_NODISCARD uint64_t getCounterEXT( VULKAN_HPP_NAMESPACE::SurfaceCounterFlagBitsEXT counter ) const;
//=== VK_GOOGLE_display_timing ===
VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::RefreshCycleDurationGOOGLE getRefreshCycleDurationGOOGLE() const;
VULKAN_HPP_NODISCARD std::vector<VULKAN_HPP_NAMESPACE::PastPresentationTimingGOOGLE> getPastPresentationTimingGOOGLE() const;
//=== VK_KHR_shared_presentable_image ===
VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::Result getStatus() const;
//=== VK_AMD_display_native_hdr ===
void setLocalDimmingAMD( VULKAN_HPP_NAMESPACE::Bool32 localDimmingEnable ) const VULKAN_HPP_NOEXCEPT;
2021-07-21 07:13:46 +00:00
//=== VK_KHR_present_wait ===
VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::Result waitForPresent( uint64_t presentId, uint64_t timeout ) const;
2021-03-23 09:52:56 +00:00
# if defined( VK_USE_PLATFORM_WIN32_KHR )
//=== VK_EXT_full_screen_exclusive ===
void acquireFullScreenExclusiveModeEXT() const;
void releaseFullScreenExclusiveModeEXT() const;
# endif /*VK_USE_PLATFORM_WIN32_KHR*/
//=== VK_NV_low_latency2 ===
void setLatencySleepModeNV( const VULKAN_HPP_NAMESPACE::LatencySleepModeInfoNV & sleepModeInfo ) const;
void latencySleepNV( const VULKAN_HPP_NAMESPACE::LatencySleepInfoNV & sleepInfo ) const VULKAN_HPP_NOEXCEPT;
void setLatencyMarkerNV( const VULKAN_HPP_NAMESPACE::SetLatencyMarkerInfoNV & latencyMarkerInfo ) const VULKAN_HPP_NOEXCEPT;
VULKAN_HPP_NODISCARD std::vector<VULKAN_HPP_NAMESPACE::LatencyTimingsFrameReportNV> getLatencyTimingsNV() const;
private:
VULKAN_HPP_NAMESPACE::Device m_device = {};
VULKAN_HPP_NAMESPACE::SwapchainKHR m_swapchain = {};
const VULKAN_HPP_NAMESPACE::AllocationCallbacks * m_allocator = {};
VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::DeviceDispatcher const * m_dispatcher = nullptr;
};
class SwapchainKHRs : public std::vector<VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::SwapchainKHR>
{
public:
# if !defined( VULKAN_HPP_RAII_NO_EXCEPTIONS )
SwapchainKHRs( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device,
VULKAN_HPP_NAMESPACE::ArrayProxy<VULKAN_HPP_NAMESPACE::SwapchainCreateInfoKHR> const & createInfos,
VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr )
{
*this = device.createSharedSwapchainsKHR( createInfos, allocator );
}
# endif
SwapchainKHRs( std::nullptr_t ) {}
2022-12-06 00:11:19 +00:00
SwapchainKHRs() = delete;
SwapchainKHRs( SwapchainKHRs const & ) = delete;
SwapchainKHRs( SwapchainKHRs && rhs ) = default;
SwapchainKHRs & operator=( SwapchainKHRs const & ) = delete;
2022-12-06 00:11:19 +00:00
SwapchainKHRs & operator=( SwapchainKHRs && rhs ) = default;
private:
SwapchainKHRs( std::vector<VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::SwapchainKHR> && rhs )
{
std::swap( *this, rhs );
}
};
class ValidationCacheEXT
{
public:
using CType = VkValidationCacheEXT;
using CppType = VULKAN_HPP_NAMESPACE::ValidationCacheEXT;
static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = VULKAN_HPP_NAMESPACE::ObjectType::eValidationCacheEXT;
static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType =
VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eValidationCacheEXT;
public:
# if !defined( VULKAN_HPP_RAII_NO_EXCEPTIONS )
ValidationCacheEXT( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device,
VULKAN_HPP_NAMESPACE::ValidationCacheCreateInfoEXT const & createInfo,
VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr )
{
*this = device.createValidationCacheEXT( createInfo, allocator );
}
# endif
ValidationCacheEXT( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device,
VkValidationCacheEXT validationCache,
VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr )
: m_device( device )
, m_validationCache( validationCache )
, m_allocator( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) )
, m_dispatcher( device.getDispatcher() )
2022-03-09 10:20:05 +00:00
{
}
ValidationCacheEXT( std::nullptr_t ) {}
~ValidationCacheEXT()
{
clear();
}
ValidationCacheEXT() = delete;
ValidationCacheEXT( ValidationCacheEXT const & ) = delete;
ValidationCacheEXT( ValidationCacheEXT && rhs ) VULKAN_HPP_NOEXCEPT
: m_device( VULKAN_HPP_NAMESPACE::exchange( rhs.m_device, {} ) )
, m_validationCache( VULKAN_HPP_NAMESPACE::exchange( rhs.m_validationCache, {} ) )
, m_allocator( VULKAN_HPP_NAMESPACE::exchange( rhs.m_allocator, {} ) )
, m_dispatcher( VULKAN_HPP_NAMESPACE::exchange( rhs.m_dispatcher, nullptr ) )
2022-03-09 10:20:05 +00:00
{
}
ValidationCacheEXT & operator=( ValidationCacheEXT const & ) = delete;
2022-12-06 00:11:19 +00:00
ValidationCacheEXT & operator=( ValidationCacheEXT && rhs ) VULKAN_HPP_NOEXCEPT
{
if ( this != &rhs )
{
std::swap( m_device, rhs.m_device );
std::swap( m_validationCache, rhs.m_validationCache );
std::swap( m_allocator, rhs.m_allocator );
std::swap( m_dispatcher, rhs.m_dispatcher );
}
return *this;
}
VULKAN_HPP_NAMESPACE::ValidationCacheEXT const & operator*() const VULKAN_HPP_NOEXCEPT
{
return m_validationCache;
}
operator VULKAN_HPP_NAMESPACE::ValidationCacheEXT() const VULKAN_HPP_NOEXCEPT
{
return m_validationCache;
}
void clear() VULKAN_HPP_NOEXCEPT
{
if ( m_validationCache )
{
getDispatcher()->vkDestroyValidationCacheEXT( static_cast<VkDevice>( m_device ),
static_cast<VkValidationCacheEXT>( m_validationCache ),
reinterpret_cast<const VkAllocationCallbacks *>( m_allocator ) );
}
m_device = nullptr;
m_validationCache = nullptr;
m_allocator = nullptr;
m_dispatcher = nullptr;
}
VULKAN_HPP_NAMESPACE::ValidationCacheEXT release()
{
m_device = nullptr;
m_allocator = nullptr;
m_dispatcher = nullptr;
return VULKAN_HPP_NAMESPACE::exchange( m_validationCache, nullptr );
}
VULKAN_HPP_NAMESPACE::Device getDevice() const
{
return m_device;
}
VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::DeviceDispatcher const * getDispatcher() const
{
VULKAN_HPP_ASSERT( m_dispatcher->getVkHeaderVersion() == VK_HEADER_VERSION );
return m_dispatcher;
}
void swap( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::ValidationCacheEXT & rhs ) VULKAN_HPP_NOEXCEPT
{
std::swap( m_device, rhs.m_device );
std::swap( m_validationCache, rhs.m_validationCache );
std::swap( m_allocator, rhs.m_allocator );
std::swap( m_dispatcher, rhs.m_dispatcher );
}
//=== VK_EXT_validation_cache ===
void merge( VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::ValidationCacheEXT> const & srcCaches ) const;
VULKAN_HPP_NODISCARD std::vector<uint8_t> getData() const;
private:
VULKAN_HPP_NAMESPACE::Device m_device = {};
VULKAN_HPP_NAMESPACE::ValidationCacheEXT m_validationCache = {};
const VULKAN_HPP_NAMESPACE::AllocationCallbacks * m_allocator = {};
VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::DeviceDispatcher const * m_dispatcher = nullptr;
};
2021-04-15 08:49:54 +00:00
class VideoSessionKHR
{
public:
using CType = VkVideoSessionKHR;
using CppType = VULKAN_HPP_NAMESPACE::VideoSessionKHR;
2021-04-15 08:49:54 +00:00
static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = VULKAN_HPP_NAMESPACE::ObjectType::eVideoSessionKHR;
2021-04-15 08:49:54 +00:00
static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType =
VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eUnknown;
public:
# if !defined( VULKAN_HPP_RAII_NO_EXCEPTIONS )
VideoSessionKHR( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device,
VULKAN_HPP_NAMESPACE::VideoSessionCreateInfoKHR const & createInfo,
VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr )
2021-04-15 08:49:54 +00:00
{
*this = device.createVideoSessionKHR( createInfo, allocator );
2021-04-15 08:49:54 +00:00
}
# endif
2021-04-15 08:49:54 +00:00
VideoSessionKHR( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device,
VkVideoSessionKHR videoSession,
VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr )
: m_device( device )
, m_videoSession( videoSession )
, m_allocator( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) )
2021-04-15 08:49:54 +00:00
, m_dispatcher( device.getDispatcher() )
2022-03-09 10:20:05 +00:00
{
}
2021-04-15 08:49:54 +00:00
VideoSessionKHR( std::nullptr_t ) {}
2021-04-15 08:49:54 +00:00
~VideoSessionKHR()
{
clear();
2021-04-15 08:49:54 +00:00
}
VideoSessionKHR() = delete;
2021-04-15 08:49:54 +00:00
VideoSessionKHR( VideoSessionKHR const & ) = delete;
VideoSessionKHR( VideoSessionKHR && rhs ) VULKAN_HPP_NOEXCEPT
: m_device( VULKAN_HPP_NAMESPACE::exchange( rhs.m_device, {} ) )
, m_videoSession( VULKAN_HPP_NAMESPACE::exchange( rhs.m_videoSession, {} ) )
, m_allocator( VULKAN_HPP_NAMESPACE::exchange( rhs.m_allocator, {} ) )
, m_dispatcher( VULKAN_HPP_NAMESPACE::exchange( rhs.m_dispatcher, nullptr ) )
2022-03-09 10:20:05 +00:00
{
}
2021-04-15 08:49:54 +00:00
VideoSessionKHR & operator=( VideoSessionKHR const & ) = delete;
2022-12-06 00:11:19 +00:00
VideoSessionKHR & operator=( VideoSessionKHR && rhs ) VULKAN_HPP_NOEXCEPT
2021-04-15 08:49:54 +00:00
{
if ( this != &rhs )
{
std::swap( m_device, rhs.m_device );
std::swap( m_videoSession, rhs.m_videoSession );
std::swap( m_allocator, rhs.m_allocator );
std::swap( m_dispatcher, rhs.m_dispatcher );
2021-04-15 08:49:54 +00:00
}
return *this;
}
VULKAN_HPP_NAMESPACE::VideoSessionKHR const & operator*() const VULKAN_HPP_NOEXCEPT
{
return m_videoSession;
2021-04-15 08:49:54 +00:00
}
operator VULKAN_HPP_NAMESPACE::VideoSessionKHR() const VULKAN_HPP_NOEXCEPT
{
return m_videoSession;
}
void clear() VULKAN_HPP_NOEXCEPT
{
if ( m_videoSession )
{
getDispatcher()->vkDestroyVideoSessionKHR( static_cast<VkDevice>( m_device ),
static_cast<VkVideoSessionKHR>( m_videoSession ),
reinterpret_cast<const VkAllocationCallbacks *>( m_allocator ) );
}
m_device = nullptr;
m_videoSession = nullptr;
m_allocator = nullptr;
m_dispatcher = nullptr;
}
VULKAN_HPP_NAMESPACE::VideoSessionKHR release()
{
m_device = nullptr;
m_allocator = nullptr;
m_dispatcher = nullptr;
return VULKAN_HPP_NAMESPACE::exchange( m_videoSession, nullptr );
}
VULKAN_HPP_NAMESPACE::Device getDevice() const
{
return m_device;
}
VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::DeviceDispatcher const * getDispatcher() const
2021-04-15 08:49:54 +00:00
{
VULKAN_HPP_ASSERT( m_dispatcher->getVkHeaderVersion() == VK_HEADER_VERSION );
2021-04-15 08:49:54 +00:00
return m_dispatcher;
}
void swap( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::VideoSessionKHR & rhs ) VULKAN_HPP_NOEXCEPT
{
std::swap( m_device, rhs.m_device );
std::swap( m_videoSession, rhs.m_videoSession );
std::swap( m_allocator, rhs.m_allocator );
std::swap( m_dispatcher, rhs.m_dispatcher );
}
//=== VK_KHR_video_queue ===
2022-08-23 00:11:46 +00:00
VULKAN_HPP_NODISCARD std::vector<VULKAN_HPP_NAMESPACE::VideoSessionMemoryRequirementsKHR> getMemoryRequirements() const;
void bindMemory( VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::BindVideoSessionMemoryInfoKHR> const & bindSessionMemoryInfos ) const;
2021-04-15 08:49:54 +00:00
private:
VULKAN_HPP_NAMESPACE::Device m_device = {};
VULKAN_HPP_NAMESPACE::VideoSessionKHR m_videoSession = {};
const VULKAN_HPP_NAMESPACE::AllocationCallbacks * m_allocator = {};
VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::DeviceDispatcher const * m_dispatcher = nullptr;
2021-04-15 08:49:54 +00:00
};
2021-04-15 08:49:54 +00:00
class VideoSessionParametersKHR
{
public:
using CType = VkVideoSessionParametersKHR;
using CppType = VULKAN_HPP_NAMESPACE::VideoSessionParametersKHR;
2021-04-15 08:49:54 +00:00
static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = VULKAN_HPP_NAMESPACE::ObjectType::eVideoSessionParametersKHR;
2021-04-15 08:49:54 +00:00
static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType =
VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eUnknown;
public:
# if !defined( VULKAN_HPP_RAII_NO_EXCEPTIONS )
VideoSessionParametersKHR( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device,
VULKAN_HPP_NAMESPACE::VideoSessionParametersCreateInfoKHR const & createInfo,
VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr )
2021-04-15 08:49:54 +00:00
{
*this = device.createVideoSessionParametersKHR( createInfo, allocator );
2021-04-15 08:49:54 +00:00
}
# endif
2021-04-15 08:49:54 +00:00
VideoSessionParametersKHR( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device,
VkVideoSessionParametersKHR videoSessionParameters,
VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr )
: m_device( device )
, m_videoSessionParameters( videoSessionParameters )
, m_allocator( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) )
2021-04-15 08:49:54 +00:00
, m_dispatcher( device.getDispatcher() )
2022-03-09 10:20:05 +00:00
{
}
2021-04-15 08:49:54 +00:00
VideoSessionParametersKHR( std::nullptr_t ) {}
2021-04-15 08:49:54 +00:00
~VideoSessionParametersKHR()
{
clear();
2021-04-15 08:49:54 +00:00
}
VideoSessionParametersKHR() = delete;
2021-04-15 08:49:54 +00:00
VideoSessionParametersKHR( VideoSessionParametersKHR const & ) = delete;
VideoSessionParametersKHR( VideoSessionParametersKHR && rhs ) VULKAN_HPP_NOEXCEPT
: m_device( VULKAN_HPP_NAMESPACE::exchange( rhs.m_device, {} ) )
, m_videoSessionParameters( VULKAN_HPP_NAMESPACE::exchange( rhs.m_videoSessionParameters, {} ) )
, m_allocator( VULKAN_HPP_NAMESPACE::exchange( rhs.m_allocator, {} ) )
, m_dispatcher( VULKAN_HPP_NAMESPACE::exchange( rhs.m_dispatcher, nullptr ) )
2022-03-09 10:20:05 +00:00
{
}
2021-04-15 08:49:54 +00:00
VideoSessionParametersKHR & operator=( VideoSessionParametersKHR const & ) = delete;
2022-12-06 00:11:19 +00:00
VideoSessionParametersKHR & operator=( VideoSessionParametersKHR && rhs ) VULKAN_HPP_NOEXCEPT
2021-04-15 08:49:54 +00:00
{
if ( this != &rhs )
{
std::swap( m_device, rhs.m_device );
std::swap( m_videoSessionParameters, rhs.m_videoSessionParameters );
std::swap( m_allocator, rhs.m_allocator );
std::swap( m_dispatcher, rhs.m_dispatcher );
2021-04-15 08:49:54 +00:00
}
return *this;
}
VULKAN_HPP_NAMESPACE::VideoSessionParametersKHR const & operator*() const VULKAN_HPP_NOEXCEPT
{
return m_videoSessionParameters;
2021-04-15 08:49:54 +00:00
}
operator VULKAN_HPP_NAMESPACE::VideoSessionParametersKHR() const VULKAN_HPP_NOEXCEPT
{
return m_videoSessionParameters;
}
void clear() VULKAN_HPP_NOEXCEPT
{
if ( m_videoSessionParameters )
{
getDispatcher()->vkDestroyVideoSessionParametersKHR( static_cast<VkDevice>( m_device ),
static_cast<VkVideoSessionParametersKHR>( m_videoSessionParameters ),
reinterpret_cast<const VkAllocationCallbacks *>( m_allocator ) );
}
m_device = nullptr;
m_videoSessionParameters = nullptr;
m_allocator = nullptr;
m_dispatcher = nullptr;
}
VULKAN_HPP_NAMESPACE::VideoSessionParametersKHR release()
{
m_device = nullptr;
m_allocator = nullptr;
m_dispatcher = nullptr;
return VULKAN_HPP_NAMESPACE::exchange( m_videoSessionParameters, nullptr );
}
VULKAN_HPP_NAMESPACE::Device getDevice() const
{
return m_device;
}
VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::DeviceDispatcher const * getDispatcher() const
2021-04-15 08:49:54 +00:00
{
VULKAN_HPP_ASSERT( m_dispatcher->getVkHeaderVersion() == VK_HEADER_VERSION );
2021-04-15 08:49:54 +00:00
return m_dispatcher;
}
void swap( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::VideoSessionParametersKHR & rhs ) VULKAN_HPP_NOEXCEPT
{
std::swap( m_device, rhs.m_device );
std::swap( m_videoSessionParameters, rhs.m_videoSessionParameters );
std::swap( m_allocator, rhs.m_allocator );
std::swap( m_dispatcher, rhs.m_dispatcher );
}
//=== VK_KHR_video_queue ===
void update( const VULKAN_HPP_NAMESPACE::VideoSessionParametersUpdateInfoKHR & updateInfo ) const;
2021-04-15 08:49:54 +00:00
private:
VULKAN_HPP_NAMESPACE::Device m_device = {};
VULKAN_HPP_NAMESPACE::VideoSessionParametersKHR m_videoSessionParameters = {};
const VULKAN_HPP_NAMESPACE::AllocationCallbacks * m_allocator = {};
VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::DeviceDispatcher const * m_dispatcher = nullptr;
2021-04-15 08:49:54 +00:00
};
//===========================
//=== COMMAND Definitions ===
//===========================
//=== VK_VERSION_1_0 ===
VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE
VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::CreateReturnType<VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Instance>::Type
Context::createInstance( VULKAN_HPP_NAMESPACE::InstanceCreateInfo const & createInfo,
VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator ) const VULKAN_HPP_RAII_CREATE_NOEXCEPT
{
VULKAN_HPP_NAMESPACE::Instance instance;
VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( getDispatcher()->vkCreateInstance(
reinterpret_cast<const VkInstanceCreateInfo *>( &createInfo ),
reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
reinterpret_cast<VkInstance *>( &instance ) ) );
if ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess )
{
# if defined( VULKAN_HPP_RAII_NO_EXCEPTIONS )
return VULKAN_HPP_UNEXPECTED( result );
# else
VULKAN_HPP_NAMESPACE::detail::throwResultException( result, "Context::createInstance" );
# endif
}
return VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Instance( *this, *reinterpret_cast<VkInstance *>( &instance ), allocator );
}
VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::CreateReturnType<
std::vector<VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::PhysicalDevice>>::Type
Instance::enumeratePhysicalDevices() const
{
std::vector<VULKAN_HPP_NAMESPACE::PhysicalDevice> physicalDevices;
uint32_t physicalDeviceCount;
VULKAN_HPP_NAMESPACE::Result result;
do
{
result = static_cast<VULKAN_HPP_NAMESPACE::Result>(
getDispatcher()->vkEnumeratePhysicalDevices( static_cast<VkInstance>( m_instance ), &physicalDeviceCount, nullptr ) );
if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) && physicalDeviceCount )
{
physicalDevices.resize( physicalDeviceCount );
result = static_cast<VULKAN_HPP_NAMESPACE::Result>( getDispatcher()->vkEnumeratePhysicalDevices(
static_cast<VkInstance>( m_instance ), &physicalDeviceCount, reinterpret_cast<VkPhysicalDevice *>( physicalDevices.data() ) ) );
}
} while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete );
if ( ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess ) && ( result != VULKAN_HPP_NAMESPACE::Result::eIncomplete ) )
{
# if defined( VULKAN_HPP_RAII_NO_EXCEPTIONS )
return VULKAN_HPP_UNEXPECTED( result );
# else
VULKAN_HPP_NAMESPACE::detail::throwResultException( result, "Instance::enumeratePhysicalDevices" );
# endif
}
std::vector<VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::PhysicalDevice> physicalDevicesRAII;
physicalDevicesRAII.reserve( physicalDevices.size() );
for ( auto & physicalDevice : physicalDevices )
{
physicalDevicesRAII.emplace_back( *this, *reinterpret_cast<VkPhysicalDevice *>( &physicalDevice ) );
}
return physicalDevicesRAII;
}
VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::PhysicalDeviceFeatures PhysicalDevice::getFeatures() const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( getDispatcher()->vkGetPhysicalDeviceFeatures && "Function <vkGetPhysicalDeviceFeatures> requires <VK_VERSION_1_0>" );
VULKAN_HPP_NAMESPACE::PhysicalDeviceFeatures features;
getDispatcher()->vkGetPhysicalDeviceFeatures( static_cast<VkPhysicalDevice>( m_physicalDevice ),
reinterpret_cast<VkPhysicalDeviceFeatures *>( &features ) );
return features;
}
VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::FormatProperties
PhysicalDevice::getFormatProperties( VULKAN_HPP_NAMESPACE::Format format ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( getDispatcher()->vkGetPhysicalDeviceFormatProperties && "Function <vkGetPhysicalDeviceFormatProperties> requires <VK_VERSION_1_0>" );
VULKAN_HPP_NAMESPACE::FormatProperties formatProperties;
getDispatcher()->vkGetPhysicalDeviceFormatProperties(
static_cast<VkPhysicalDevice>( m_physicalDevice ), static_cast<VkFormat>( format ), reinterpret_cast<VkFormatProperties *>( &formatProperties ) );
return formatProperties;
}
VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::ImageFormatProperties
PhysicalDevice::getImageFormatProperties( VULKAN_HPP_NAMESPACE::Format format,
VULKAN_HPP_NAMESPACE::ImageType type,
VULKAN_HPP_NAMESPACE::ImageTiling tiling,
VULKAN_HPP_NAMESPACE::ImageUsageFlags usage,
VULKAN_HPP_NAMESPACE::ImageCreateFlags flags ) const
{
VULKAN_HPP_ASSERT( getDispatcher()->vkGetPhysicalDeviceImageFormatProperties &&
"Function <vkGetPhysicalDeviceImageFormatProperties> requires <VK_VERSION_1_0>" );
VULKAN_HPP_NAMESPACE::ImageFormatProperties imageFormatProperties;
VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>(
getDispatcher()->vkGetPhysicalDeviceImageFormatProperties( static_cast<VkPhysicalDevice>( m_physicalDevice ),
static_cast<VkFormat>( format ),
static_cast<VkImageType>( type ),
static_cast<VkImageTiling>( tiling ),
static_cast<VkImageUsageFlags>( usage ),
static_cast<VkImageCreateFlags>( flags ),
reinterpret_cast<VkImageFormatProperties *>( &imageFormatProperties ) ) );
VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getImageFormatProperties" );
return imageFormatProperties;
}
VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::PhysicalDeviceProperties PhysicalDevice::getProperties() const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( getDispatcher()->vkGetPhysicalDeviceProperties && "Function <vkGetPhysicalDeviceProperties> requires <VK_VERSION_1_0>" );
VULKAN_HPP_NAMESPACE::PhysicalDeviceProperties properties;
getDispatcher()->vkGetPhysicalDeviceProperties( static_cast<VkPhysicalDevice>( m_physicalDevice ),
reinterpret_cast<VkPhysicalDeviceProperties *>( &properties ) );
return properties;
}
VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector<VULKAN_HPP_NAMESPACE::QueueFamilyProperties> PhysicalDevice::getQueueFamilyProperties() const
{
VULKAN_HPP_ASSERT( getDispatcher()->vkGetPhysicalDeviceQueueFamilyProperties &&
"Function <vkGetPhysicalDeviceQueueFamilyProperties> requires <VK_VERSION_1_0>" );
std::vector<VULKAN_HPP_NAMESPACE::QueueFamilyProperties> queueFamilyProperties;
uint32_t queueFamilyPropertyCount;
getDispatcher()->vkGetPhysicalDeviceQueueFamilyProperties( static_cast<VkPhysicalDevice>( m_physicalDevice ), &queueFamilyPropertyCount, nullptr );
queueFamilyProperties.resize( queueFamilyPropertyCount );
getDispatcher()->vkGetPhysicalDeviceQueueFamilyProperties( static_cast<VkPhysicalDevice>( m_physicalDevice ),
&queueFamilyPropertyCount,
reinterpret_cast<VkQueueFamilyProperties *>( queueFamilyProperties.data() ) );
VULKAN_HPP_ASSERT( queueFamilyPropertyCount <= queueFamilyProperties.size() );
if ( queueFamilyPropertyCount < queueFamilyProperties.size() )
{
queueFamilyProperties.resize( queueFamilyPropertyCount );
}
return queueFamilyProperties;
}
VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::PhysicalDeviceMemoryProperties PhysicalDevice::getMemoryProperties() const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( getDispatcher()->vkGetPhysicalDeviceMemoryProperties && "Function <vkGetPhysicalDeviceMemoryProperties> requires <VK_VERSION_1_0>" );
VULKAN_HPP_NAMESPACE::PhysicalDeviceMemoryProperties memoryProperties;
getDispatcher()->vkGetPhysicalDeviceMemoryProperties( static_cast<VkPhysicalDevice>( m_physicalDevice ),
reinterpret_cast<VkPhysicalDeviceMemoryProperties *>( &memoryProperties ) );
return memoryProperties;
}
VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE PFN_vkVoidFunction Instance::getProcAddr( const std::string & name ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( getDispatcher()->vkGetInstanceProcAddr && "Function <vkGetInstanceProcAddr> requires <VK_VERSION_1_0>" );
PFN_vkVoidFunction result = getDispatcher()->vkGetInstanceProcAddr( static_cast<VkInstance>( m_instance ), name.c_str() );
return result;
}
VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE PFN_vkVoidFunction Device::getProcAddr( const std::string & name ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( getDispatcher()->vkGetDeviceProcAddr && "Function <vkGetDeviceProcAddr> requires <VK_VERSION_1_0>" );
PFN_vkVoidFunction result = getDispatcher()->vkGetDeviceProcAddr( static_cast<VkDevice>( m_device ), name.c_str() );
return result;
}
VULKAN_HPP_NODISCARD
VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::CreateReturnType<VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device>::Type
PhysicalDevice::createDevice( VULKAN_HPP_NAMESPACE::DeviceCreateInfo const & createInfo,
VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator ) const
VULKAN_HPP_RAII_CREATE_NOEXCEPT
{
VULKAN_HPP_NAMESPACE::Device device;
VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( getDispatcher()->vkCreateDevice(
static_cast<VkPhysicalDevice>( m_physicalDevice ),
reinterpret_cast<const VkDeviceCreateInfo *>( &createInfo ),
reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
reinterpret_cast<VkDevice *>( &device ) ) );
if ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess )
{
# if defined( VULKAN_HPP_RAII_NO_EXCEPTIONS )
return VULKAN_HPP_UNEXPECTED( result );
# else
VULKAN_HPP_NAMESPACE::detail::throwResultException( result, "PhysicalDevice::createDevice" );
# endif
}
return VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device( *this, *reinterpret_cast<VkDevice *>( &device ), allocator );
}
VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector<VULKAN_HPP_NAMESPACE::ExtensionProperties>
Context::enumerateInstanceExtensionProperties( Optional<const std::string> layerName ) const
{
VULKAN_HPP_ASSERT( getDispatcher()->vkEnumerateInstanceExtensionProperties &&
"Function <vkEnumerateInstanceExtensionProperties> requires <VK_VERSION_1_0>" );
std::vector<VULKAN_HPP_NAMESPACE::ExtensionProperties> properties;
uint32_t propertyCount;
VULKAN_HPP_NAMESPACE::Result result;
do
{
result = static_cast<VULKAN_HPP_NAMESPACE::Result>(
getDispatcher()->vkEnumerateInstanceExtensionProperties( layerName ? layerName->c_str() : nullptr, &propertyCount, nullptr ) );
if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) && propertyCount )
{
properties.resize( propertyCount );
result = static_cast<VULKAN_HPP_NAMESPACE::Result>( getDispatcher()->vkEnumerateInstanceExtensionProperties(
layerName ? layerName->c_str() : nullptr, &propertyCount, reinterpret_cast<VkExtensionProperties *>( properties.data() ) ) );
}
} while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete );
VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Context::enumerateInstanceExtensionProperties" );
VULKAN_HPP_ASSERT( propertyCount <= properties.size() );
if ( propertyCount < properties.size() )
{
properties.resize( propertyCount );
}
return properties;
}
VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector<VULKAN_HPP_NAMESPACE::ExtensionProperties>
PhysicalDevice::enumerateDeviceExtensionProperties( Optional<const std::string> layerName ) const
{
VULKAN_HPP_ASSERT( getDispatcher()->vkEnumerateDeviceExtensionProperties && "Function <vkEnumerateDeviceExtensionProperties> requires <VK_VERSION_1_0>" );
std::vector<VULKAN_HPP_NAMESPACE::ExtensionProperties> properties;
uint32_t propertyCount;
VULKAN_HPP_NAMESPACE::Result result;
do
{
result = static_cast<VULKAN_HPP_NAMESPACE::Result>( getDispatcher()->vkEnumerateDeviceExtensionProperties(
static_cast<VkPhysicalDevice>( m_physicalDevice ), layerName ? layerName->c_str() : nullptr, &propertyCount, nullptr ) );
if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) && propertyCount )
{
properties.resize( propertyCount );
result = static_cast<VULKAN_HPP_NAMESPACE::Result>(
getDispatcher()->vkEnumerateDeviceExtensionProperties( static_cast<VkPhysicalDevice>( m_physicalDevice ),
layerName ? layerName->c_str() : nullptr,
&propertyCount,
reinterpret_cast<VkExtensionProperties *>( properties.data() ) ) );
}
} while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete );
VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::enumerateDeviceExtensionProperties" );
VULKAN_HPP_ASSERT( propertyCount <= properties.size() );
if ( propertyCount < properties.size() )
{
properties.resize( propertyCount );
}
return properties;
}
VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector<VULKAN_HPP_NAMESPACE::LayerProperties> Context::enumerateInstanceLayerProperties() const
{
VULKAN_HPP_ASSERT( getDispatcher()->vkEnumerateInstanceLayerProperties && "Function <vkEnumerateInstanceLayerProperties> requires <VK_VERSION_1_0>" );
std::vector<VULKAN_HPP_NAMESPACE::LayerProperties> properties;
uint32_t propertyCount;
VULKAN_HPP_NAMESPACE::Result result;
do
{
result = static_cast<VULKAN_HPP_NAMESPACE::Result>( getDispatcher()->vkEnumerateInstanceLayerProperties( &propertyCount, nullptr ) );
if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) && propertyCount )
{
properties.resize( propertyCount );
result = static_cast<VULKAN_HPP_NAMESPACE::Result>(
getDispatcher()->vkEnumerateInstanceLayerProperties( &propertyCount, reinterpret_cast<VkLayerProperties *>( properties.data() ) ) );
}
} while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete );
VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Context::enumerateInstanceLayerProperties" );
VULKAN_HPP_ASSERT( propertyCount <= properties.size() );
if ( propertyCount < properties.size() )
{
properties.resize( propertyCount );
}
return properties;
}
VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector<VULKAN_HPP_NAMESPACE::LayerProperties> PhysicalDevice::enumerateDeviceLayerProperties() const
{
VULKAN_HPP_ASSERT( getDispatcher()->vkEnumerateDeviceLayerProperties && "Function <vkEnumerateDeviceLayerProperties> requires <VK_VERSION_1_0>" );
std::vector<VULKAN_HPP_NAMESPACE::LayerProperties> properties;
uint32_t propertyCount;
VULKAN_HPP_NAMESPACE::Result result;
do
{
result = static_cast<VULKAN_HPP_NAMESPACE::Result>(
getDispatcher()->vkEnumerateDeviceLayerProperties( static_cast<VkPhysicalDevice>( m_physicalDevice ), &propertyCount, nullptr ) );
if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) && propertyCount )
{
properties.resize( propertyCount );
result = static_cast<VULKAN_HPP_NAMESPACE::Result>( getDispatcher()->vkEnumerateDeviceLayerProperties(
static_cast<VkPhysicalDevice>( m_physicalDevice ), &propertyCount, reinterpret_cast<VkLayerProperties *>( properties.data() ) ) );
}
} while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete );
VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::enumerateDeviceLayerProperties" );
VULKAN_HPP_ASSERT( propertyCount <= properties.size() );
if ( propertyCount < properties.size() )
{
properties.resize( propertyCount );
}
return properties;
}
VULKAN_HPP_NODISCARD
VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::CreateReturnType<VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Queue>::Type
Device::getQueue( uint32_t queueFamilyIndex, uint32_t queueIndex ) const VULKAN_HPP_RAII_CREATE_NOEXCEPT
{
VULKAN_HPP_NAMESPACE::Queue queue;
getDispatcher()->vkGetDeviceQueue( static_cast<VkDevice>( m_device ), queueFamilyIndex, queueIndex, reinterpret_cast<VkQueue *>( &queue ) );
return VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Queue( *this, *reinterpret_cast<VkQueue *>( &queue ) );
}
VULKAN_HPP_INLINE void Queue::submit( VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::SubmitInfo> const & submits,
VULKAN_HPP_NAMESPACE::Fence fence ) const
{
VULKAN_HPP_ASSERT( getDispatcher()->vkQueueSubmit && "Function <vkQueueSubmit> requires <VK_VERSION_1_0>" );
VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( getDispatcher()->vkQueueSubmit(
static_cast<VkQueue>( m_queue ), submits.size(), reinterpret_cast<const VkSubmitInfo *>( submits.data() ), static_cast<VkFence>( fence ) ) );
VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Queue::submit" );
}
VULKAN_HPP_INLINE void Queue::waitIdle() const
{
VULKAN_HPP_ASSERT( getDispatcher()->vkQueueWaitIdle && "Function <vkQueueWaitIdle> requires <VK_VERSION_1_0>" );
VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( getDispatcher()->vkQueueWaitIdle( static_cast<VkQueue>( m_queue ) ) );
VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Queue::waitIdle" );
}
VULKAN_HPP_INLINE void Device::waitIdle() const
2021-04-28 11:35:14 +00:00
{
VULKAN_HPP_ASSERT( getDispatcher()->vkDeviceWaitIdle && "Function <vkDeviceWaitIdle> requires <VK_VERSION_1_0>" );
VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( getDispatcher()->vkDeviceWaitIdle( static_cast<VkDevice>( m_device ) ) );
VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::waitIdle" );
2021-04-28 11:35:14 +00:00
}
VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE
VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::CreateReturnType<VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::DeviceMemory>::Type
Device::allocateMemory( VULKAN_HPP_NAMESPACE::MemoryAllocateInfo const & allocateInfo,
VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator ) const VULKAN_HPP_RAII_CREATE_NOEXCEPT
{
VULKAN_HPP_NAMESPACE::DeviceMemory memory;
VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( getDispatcher()->vkAllocateMemory(
static_cast<VkDevice>( m_device ),
reinterpret_cast<const VkMemoryAllocateInfo *>( &allocateInfo ),
reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
reinterpret_cast<VkDeviceMemory *>( &memory ) ) );
if ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess )
{
# if defined( VULKAN_HPP_RAII_NO_EXCEPTIONS )
return VULKAN_HPP_UNEXPECTED( result );
# else
VULKAN_HPP_NAMESPACE::detail::throwResultException( result, "Device::allocateMemory" );
# endif
}
return VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::DeviceMemory( *this, *reinterpret_cast<VkDeviceMemory *>( &memory ), allocator );
}
VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE void * DeviceMemory::mapMemory( VULKAN_HPP_NAMESPACE::DeviceSize offset,
VULKAN_HPP_NAMESPACE::DeviceSize size,
VULKAN_HPP_NAMESPACE::MemoryMapFlags flags ) const
{
VULKAN_HPP_ASSERT( getDispatcher()->vkMapMemory && "Function <vkMapMemory> requires <VK_VERSION_1_0>" );
void * pData;
VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( getDispatcher()->vkMapMemory( static_cast<VkDevice>( m_device ),
static_cast<VkDeviceMemory>( m_memory ),
static_cast<VkDeviceSize>( offset ),
static_cast<VkDeviceSize>( size ),
static_cast<VkMemoryMapFlags>( flags ),
&pData ) );
VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::DeviceMemory::mapMemory" );
return pData;
}
VULKAN_HPP_INLINE void DeviceMemory::unmapMemory() const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( getDispatcher()->vkUnmapMemory && "Function <vkUnmapMemory> requires <VK_VERSION_1_0>" );
getDispatcher()->vkUnmapMemory( static_cast<VkDevice>( m_device ), static_cast<VkDeviceMemory>( m_memory ) );
}
VULKAN_HPP_INLINE void
Device::flushMappedMemoryRanges( VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::MappedMemoryRange> const & memoryRanges ) const
{
VULKAN_HPP_ASSERT( getDispatcher()->vkFlushMappedMemoryRanges && "Function <vkFlushMappedMemoryRanges> requires <VK_VERSION_1_0>" );
VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( getDispatcher()->vkFlushMappedMemoryRanges(
static_cast<VkDevice>( m_device ), memoryRanges.size(), reinterpret_cast<const VkMappedMemoryRange *>( memoryRanges.data() ) ) );
VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::flushMappedMemoryRanges" );
}
VULKAN_HPP_INLINE void
Device::invalidateMappedMemoryRanges( VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::MappedMemoryRange> const & memoryRanges ) const
{
VULKAN_HPP_ASSERT( getDispatcher()->vkInvalidateMappedMemoryRanges && "Function <vkInvalidateMappedMemoryRanges> requires <VK_VERSION_1_0>" );
VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( getDispatcher()->vkInvalidateMappedMemoryRanges(
static_cast<VkDevice>( m_device ), memoryRanges.size(), reinterpret_cast<const VkMappedMemoryRange *>( memoryRanges.data() ) ) );
VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::invalidateMappedMemoryRanges" );
}
VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::DeviceSize DeviceMemory::getCommitment() const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( getDispatcher()->vkGetDeviceMemoryCommitment && "Function <vkGetDeviceMemoryCommitment> requires <VK_VERSION_1_0>" );
VULKAN_HPP_NAMESPACE::DeviceSize committedMemoryInBytes;
getDispatcher()->vkGetDeviceMemoryCommitment(
static_cast<VkDevice>( m_device ), static_cast<VkDeviceMemory>( m_memory ), reinterpret_cast<VkDeviceSize *>( &committedMemoryInBytes ) );
return committedMemoryInBytes;
}
VULKAN_HPP_INLINE void Buffer::bindMemory( VULKAN_HPP_NAMESPACE::DeviceMemory memory, VULKAN_HPP_NAMESPACE::DeviceSize memoryOffset ) const
{
VULKAN_HPP_ASSERT( getDispatcher()->vkBindBufferMemory && "Function <vkBindBufferMemory> requires <VK_VERSION_1_0>" );
VULKAN_HPP_NAMESPACE::Result result =
static_cast<VULKAN_HPP_NAMESPACE::Result>( getDispatcher()->vkBindBufferMemory( static_cast<VkDevice>( m_device ),
static_cast<VkBuffer>( m_buffer ),
static_cast<VkDeviceMemory>( memory ),
static_cast<VkDeviceSize>( memoryOffset ) ) );
VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Buffer::bindMemory" );
}
VULKAN_HPP_INLINE void Image::bindMemory( VULKAN_HPP_NAMESPACE::DeviceMemory memory, VULKAN_HPP_NAMESPACE::DeviceSize memoryOffset ) const
{
VULKAN_HPP_ASSERT( getDispatcher()->vkBindImageMemory && "Function <vkBindImageMemory> requires <VK_VERSION_1_0>" );
VULKAN_HPP_NAMESPACE::Result result =
static_cast<VULKAN_HPP_NAMESPACE::Result>( getDispatcher()->vkBindImageMemory( static_cast<VkDevice>( m_device ),
static_cast<VkImage>( m_image ),
static_cast<VkDeviceMemory>( memory ),
static_cast<VkDeviceSize>( memoryOffset ) ) );
VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Image::bindMemory" );
}
VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::MemoryRequirements Buffer::getMemoryRequirements() const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( getDispatcher()->vkGetBufferMemoryRequirements && "Function <vkGetBufferMemoryRequirements> requires <VK_VERSION_1_0>" );
VULKAN_HPP_NAMESPACE::MemoryRequirements memoryRequirements;
getDispatcher()->vkGetBufferMemoryRequirements(
static_cast<VkDevice>( m_device ), static_cast<VkBuffer>( m_buffer ), reinterpret_cast<VkMemoryRequirements *>( &memoryRequirements ) );
return memoryRequirements;
}
VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::MemoryRequirements Image::getMemoryRequirements() const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( getDispatcher()->vkGetImageMemoryRequirements && "Function <vkGetImageMemoryRequirements> requires <VK_VERSION_1_0>" );
VULKAN_HPP_NAMESPACE::MemoryRequirements memoryRequirements;
getDispatcher()->vkGetImageMemoryRequirements(
static_cast<VkDevice>( m_device ), static_cast<VkImage>( m_image ), reinterpret_cast<VkMemoryRequirements *>( &memoryRequirements ) );
return memoryRequirements;
}
VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector<VULKAN_HPP_NAMESPACE::SparseImageMemoryRequirements> Image::getSparseMemoryRequirements() const
{
VULKAN_HPP_ASSERT( getDispatcher()->vkGetImageSparseMemoryRequirements && "Function <vkGetImageSparseMemoryRequirements> requires <VK_VERSION_1_0>" );
std::vector<VULKAN_HPP_NAMESPACE::SparseImageMemoryRequirements> sparseMemoryRequirements;
uint32_t sparseMemoryRequirementCount;
getDispatcher()->vkGetImageSparseMemoryRequirements(
static_cast<VkDevice>( m_device ), static_cast<VkImage>( m_image ), &sparseMemoryRequirementCount, nullptr );
sparseMemoryRequirements.resize( sparseMemoryRequirementCount );
getDispatcher()->vkGetImageSparseMemoryRequirements( static_cast<VkDevice>( m_device ),
static_cast<VkImage>( m_image ),
&sparseMemoryRequirementCount,
reinterpret_cast<VkSparseImageMemoryRequirements *>( sparseMemoryRequirements.data() ) );
VULKAN_HPP_ASSERT( sparseMemoryRequirementCount <= sparseMemoryRequirements.size() );
if ( sparseMemoryRequirementCount < sparseMemoryRequirements.size() )
{
sparseMemoryRequirements.resize( sparseMemoryRequirementCount );
}
return sparseMemoryRequirements;
}
VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector<VULKAN_HPP_NAMESPACE::SparseImageFormatProperties>
PhysicalDevice::getSparseImageFormatProperties( VULKAN_HPP_NAMESPACE::Format format,
VULKAN_HPP_NAMESPACE::ImageType type,
VULKAN_HPP_NAMESPACE::SampleCountFlagBits samples,
VULKAN_HPP_NAMESPACE::ImageUsageFlags usage,
VULKAN_HPP_NAMESPACE::ImageTiling tiling ) const
{
VULKAN_HPP_ASSERT( getDispatcher()->vkGetPhysicalDeviceSparseImageFormatProperties &&
"Function <vkGetPhysicalDeviceSparseImageFormatProperties> requires <VK_VERSION_1_0>" );
std::vector<VULKAN_HPP_NAMESPACE::SparseImageFormatProperties> properties;
uint32_t propertyCount;
getDispatcher()->vkGetPhysicalDeviceSparseImageFormatProperties( static_cast<VkPhysicalDevice>( m_physicalDevice ),
static_cast<VkFormat>( format ),
static_cast<VkImageType>( type ),
static_cast<VkSampleCountFlagBits>( samples ),
static_cast<VkImageUsageFlags>( usage ),
static_cast<VkImageTiling>( tiling ),
&propertyCount,
nullptr );
properties.resize( propertyCount );
getDispatcher()->vkGetPhysicalDeviceSparseImageFormatProperties( static_cast<VkPhysicalDevice>( m_physicalDevice ),
static_cast<VkFormat>( format ),
static_cast<VkImageType>( type ),
static_cast<VkSampleCountFlagBits>( samples ),
static_cast<VkImageUsageFlags>( usage ),
static_cast<VkImageTiling>( tiling ),
&propertyCount,
reinterpret_cast<VkSparseImageFormatProperties *>( properties.data() ) );
VULKAN_HPP_ASSERT( propertyCount <= properties.size() );
if ( propertyCount < properties.size() )
{
properties.resize( propertyCount );
}
return properties;
}
VULKAN_HPP_INLINE void Queue::bindSparse( VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::BindSparseInfo> const & bindInfo,
VULKAN_HPP_NAMESPACE::Fence fence ) const
{
VULKAN_HPP_ASSERT( getDispatcher()->vkQueueBindSparse && "Function <vkQueueBindSparse> requires <VK_VERSION_1_0>" );
VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( getDispatcher()->vkQueueBindSparse(
static_cast<VkQueue>( m_queue ), bindInfo.size(), reinterpret_cast<const VkBindSparseInfo *>( bindInfo.data() ), static_cast<VkFence>( fence ) ) );
VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Queue::bindSparse" );
}
VULKAN_HPP_NODISCARD
VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::CreateReturnType<VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Fence>::Type
Device::createFence( VULKAN_HPP_NAMESPACE::FenceCreateInfo const & createInfo,
VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator ) const VULKAN_HPP_RAII_CREATE_NOEXCEPT
{
VULKAN_HPP_NAMESPACE::Fence fence;
VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( getDispatcher()->vkCreateFence(
static_cast<VkDevice>( m_device ),
reinterpret_cast<const VkFenceCreateInfo *>( &createInfo ),
reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
reinterpret_cast<VkFence *>( &fence ) ) );
if ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess )
{
# if defined( VULKAN_HPP_RAII_NO_EXCEPTIONS )
return VULKAN_HPP_UNEXPECTED( result );
# else
VULKAN_HPP_NAMESPACE::detail::throwResultException( result, "Device::createFence" );
# endif
}
return VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Fence( *this, *reinterpret_cast<VkFence *>( &fence ), allocator );
}
VULKAN_HPP_INLINE void Device::resetFences( VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::Fence> const & fences ) const
{
VULKAN_HPP_ASSERT( getDispatcher()->vkResetFences && "Function <vkResetFences> requires <VK_VERSION_1_0>" );
VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>(
getDispatcher()->vkResetFences( static_cast<VkDevice>( m_device ), fences.size(), reinterpret_cast<const VkFence *>( fences.data() ) ) );
VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::resetFences" );
}
VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::Result Fence::getStatus() const
{
VULKAN_HPP_ASSERT( getDispatcher()->vkGetFenceStatus && "Function <vkGetFenceStatus> requires <VK_VERSION_1_0>" );
VULKAN_HPP_NAMESPACE::Result result =
static_cast<VULKAN_HPP_NAMESPACE::Result>( getDispatcher()->vkGetFenceStatus( static_cast<VkDevice>( m_device ), static_cast<VkFence>( m_fence ) ) );
VULKAN_HPP_NAMESPACE::detail::resultCheck(
result, VULKAN_HPP_NAMESPACE_STRING "::Fence::getStatus", { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::eNotReady } );
return static_cast<VULKAN_HPP_NAMESPACE::Result>( result );
}
VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::Result Device::waitForFences(
VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::Fence> const & fences, VULKAN_HPP_NAMESPACE::Bool32 waitAll, uint64_t timeout ) const
{
VULKAN_HPP_ASSERT( getDispatcher()->vkWaitForFences && "Function <vkWaitForFences> requires <VK_VERSION_1_0>" );
VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( getDispatcher()->vkWaitForFences(
static_cast<VkDevice>( m_device ), fences.size(), reinterpret_cast<const VkFence *>( fences.data() ), static_cast<VkBool32>( waitAll ), timeout ) );
VULKAN_HPP_NAMESPACE::detail::resultCheck(
result, VULKAN_HPP_NAMESPACE_STRING "::Device::waitForFences", { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::eTimeout } );
return static_cast<VULKAN_HPP_NAMESPACE::Result>( result );
}
VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE
VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::CreateReturnType<VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Semaphore>::Type
Device::createSemaphore( VULKAN_HPP_NAMESPACE::SemaphoreCreateInfo const & createInfo,
VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator ) const VULKAN_HPP_RAII_CREATE_NOEXCEPT
{
VULKAN_HPP_NAMESPACE::Semaphore semaphore;
VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( getDispatcher()->vkCreateSemaphore(
static_cast<VkDevice>( m_device ),
reinterpret_cast<const VkSemaphoreCreateInfo *>( &createInfo ),
reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
reinterpret_cast<VkSemaphore *>( &semaphore ) ) );
if ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess )
{
# if defined( VULKAN_HPP_RAII_NO_EXCEPTIONS )
return VULKAN_HPP_UNEXPECTED( result );
# else
VULKAN_HPP_NAMESPACE::detail::throwResultException( result, "Device::createSemaphore" );
# endif
}
return VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Semaphore( *this, *reinterpret_cast<VkSemaphore *>( &semaphore ), allocator );
}
VULKAN_HPP_NODISCARD
VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::CreateReturnType<VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Event>::Type
Device::createEvent( VULKAN_HPP_NAMESPACE::EventCreateInfo const & createInfo,
VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator ) const VULKAN_HPP_RAII_CREATE_NOEXCEPT
{
VULKAN_HPP_NAMESPACE::Event event;
VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( getDispatcher()->vkCreateEvent(
static_cast<VkDevice>( m_device ),
reinterpret_cast<const VkEventCreateInfo *>( &createInfo ),
reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
reinterpret_cast<VkEvent *>( &event ) ) );
if ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess )
{
# if defined( VULKAN_HPP_RAII_NO_EXCEPTIONS )
return VULKAN_HPP_UNEXPECTED( result );
# else
VULKAN_HPP_NAMESPACE::detail::throwResultException( result, "Device::createEvent" );
# endif
}
return VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Event( *this, *reinterpret_cast<VkEvent *>( &event ), allocator );
}
VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::Result Event::getStatus() const
{
VULKAN_HPP_ASSERT( getDispatcher()->vkGetEventStatus && "Function <vkGetEventStatus> requires <VK_VERSION_1_0>" );
VULKAN_HPP_NAMESPACE::Result result =
static_cast<VULKAN_HPP_NAMESPACE::Result>( getDispatcher()->vkGetEventStatus( static_cast<VkDevice>( m_device ), static_cast<VkEvent>( m_event ) ) );
VULKAN_HPP_NAMESPACE::detail::resultCheck(
result, VULKAN_HPP_NAMESPACE_STRING "::Event::getStatus", { VULKAN_HPP_NAMESPACE::Result::eEventSet, VULKAN_HPP_NAMESPACE::Result::eEventReset } );
return static_cast<VULKAN_HPP_NAMESPACE::Result>( result );
}
VULKAN_HPP_INLINE void Event::set() const
{
VULKAN_HPP_ASSERT( getDispatcher()->vkSetEvent && "Function <vkSetEvent> requires <VK_VERSION_1_0>" );
VULKAN_HPP_NAMESPACE::Result result =
static_cast<VULKAN_HPP_NAMESPACE::Result>( getDispatcher()->vkSetEvent( static_cast<VkDevice>( m_device ), static_cast<VkEvent>( m_event ) ) );
VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Event::set" );
}
VULKAN_HPP_INLINE void Event::reset() const
{
VULKAN_HPP_ASSERT( getDispatcher()->vkResetEvent && "Function <vkResetEvent> requires <VK_VERSION_1_0>" );
VULKAN_HPP_NAMESPACE::Result result =
static_cast<VULKAN_HPP_NAMESPACE::Result>( getDispatcher()->vkResetEvent( static_cast<VkDevice>( m_device ), static_cast<VkEvent>( m_event ) ) );
VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Event::reset" );
}
VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE
VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::CreateReturnType<VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::QueryPool>::Type
Device::createQueryPool( VULKAN_HPP_NAMESPACE::QueryPoolCreateInfo const & createInfo,
VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator ) const VULKAN_HPP_RAII_CREATE_NOEXCEPT
{
VULKAN_HPP_NAMESPACE::QueryPool queryPool;
VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( getDispatcher()->vkCreateQueryPool(
static_cast<VkDevice>( m_device ),
reinterpret_cast<const VkQueryPoolCreateInfo *>( &createInfo ),
reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
reinterpret_cast<VkQueryPool *>( &queryPool ) ) );
if ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess )
{
# if defined( VULKAN_HPP_RAII_NO_EXCEPTIONS )
return VULKAN_HPP_UNEXPECTED( result );
# else
VULKAN_HPP_NAMESPACE::detail::throwResultException( result, "Device::createQueryPool" );
# endif
}
return VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::QueryPool( *this, *reinterpret_cast<VkQueryPool *>( &queryPool ), allocator );
}
template <typename DataType>
VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::pair<VULKAN_HPP_NAMESPACE::Result, std::vector<DataType>> QueryPool::getResults(
uint32_t firstQuery, uint32_t queryCount, size_t dataSize, VULKAN_HPP_NAMESPACE::DeviceSize stride, VULKAN_HPP_NAMESPACE::QueryResultFlags flags ) const
{
VULKAN_HPP_ASSERT( getDispatcher()->vkGetQueryPoolResults && "Function <vkGetQueryPoolResults> requires <VK_VERSION_1_0>" );
VULKAN_HPP_ASSERT( dataSize % sizeof( DataType ) == 0 );
std::vector<DataType> data( dataSize / sizeof( DataType ) );
VULKAN_HPP_NAMESPACE::Result result =
static_cast<VULKAN_HPP_NAMESPACE::Result>( getDispatcher()->vkGetQueryPoolResults( static_cast<VkDevice>( m_device ),
static_cast<VkQueryPool>( m_queryPool ),
firstQuery,
queryCount,
data.size() * sizeof( DataType ),
reinterpret_cast<void *>( data.data() ),
static_cast<VkDeviceSize>( stride ),
static_cast<VkQueryResultFlags>( flags ) ) );
VULKAN_HPP_NAMESPACE::detail::resultCheck(
result, VULKAN_HPP_NAMESPACE_STRING "::QueryPool::getResults", { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::eNotReady } );
return std::make_pair( result, std::move( data ) );
}
template <typename DataType>
VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::pair<VULKAN_HPP_NAMESPACE::Result, DataType> QueryPool::getResult(
uint32_t firstQuery, uint32_t queryCount, VULKAN_HPP_NAMESPACE::DeviceSize stride, VULKAN_HPP_NAMESPACE::QueryResultFlags flags ) const
{
VULKAN_HPP_ASSERT( getDispatcher()->vkGetQueryPoolResults && "Function <vkGetQueryPoolResults> requires <VK_VERSION_1_0>" );
DataType data;
VULKAN_HPP_NAMESPACE::Result result =
static_cast<VULKAN_HPP_NAMESPACE::Result>( getDispatcher()->vkGetQueryPoolResults( static_cast<VkDevice>( m_device ),
static_cast<VkQueryPool>( m_queryPool ),
firstQuery,
queryCount,
sizeof( DataType ),
reinterpret_cast<void *>( &data ),
static_cast<VkDeviceSize>( stride ),
static_cast<VkQueryResultFlags>( flags ) ) );
VULKAN_HPP_NAMESPACE::detail::resultCheck(
result, VULKAN_HPP_NAMESPACE_STRING "::QueryPool::getResult", { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::eNotReady } );
return std::make_pair( result, std::move( data ) );
}
VULKAN_HPP_NODISCARD
VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::CreateReturnType<VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Buffer>::Type
Device::createBuffer( VULKAN_HPP_NAMESPACE::BufferCreateInfo const & createInfo,
VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator ) const VULKAN_HPP_RAII_CREATE_NOEXCEPT
{
VULKAN_HPP_NAMESPACE::Buffer buffer;
VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( getDispatcher()->vkCreateBuffer(
static_cast<VkDevice>( m_device ),
reinterpret_cast<const VkBufferCreateInfo *>( &createInfo ),
reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
reinterpret_cast<VkBuffer *>( &buffer ) ) );
if ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess )
{
# if defined( VULKAN_HPP_RAII_NO_EXCEPTIONS )
return VULKAN_HPP_UNEXPECTED( result );
# else
VULKAN_HPP_NAMESPACE::detail::throwResultException( result, "Device::createBuffer" );
# endif
}
return VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Buffer( *this, *reinterpret_cast<VkBuffer *>( &buffer ), allocator );
}
VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE
VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::CreateReturnType<VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::BufferView>::Type
Device::createBufferView( VULKAN_HPP_NAMESPACE::BufferViewCreateInfo const & createInfo,
VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator ) const
VULKAN_HPP_RAII_CREATE_NOEXCEPT
{
VULKAN_HPP_NAMESPACE::BufferView view;
VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( getDispatcher()->vkCreateBufferView(
static_cast<VkDevice>( m_device ),
reinterpret_cast<const VkBufferViewCreateInfo *>( &createInfo ),
reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
reinterpret_cast<VkBufferView *>( &view ) ) );
if ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess )
{
# if defined( VULKAN_HPP_RAII_NO_EXCEPTIONS )
return VULKAN_HPP_UNEXPECTED( result );
# else
VULKAN_HPP_NAMESPACE::detail::throwResultException( result, "Device::createBufferView" );
# endif
}
return VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::BufferView( *this, *reinterpret_cast<VkBufferView *>( &view ), allocator );
}
VULKAN_HPP_NODISCARD
VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::CreateReturnType<VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Image>::Type
Device::createImage( VULKAN_HPP_NAMESPACE::ImageCreateInfo const & createInfo,
VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator ) const VULKAN_HPP_RAII_CREATE_NOEXCEPT
{
VULKAN_HPP_NAMESPACE::Image image;
VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( getDispatcher()->vkCreateImage(
static_cast<VkDevice>( m_device ),
reinterpret_cast<const VkImageCreateInfo *>( &createInfo ),
reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
reinterpret_cast<VkImage *>( &image ) ) );
if ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess )
{
# if defined( VULKAN_HPP_RAII_NO_EXCEPTIONS )
return VULKAN_HPP_UNEXPECTED( result );
# else
VULKAN_HPP_NAMESPACE::detail::throwResultException( result, "Device::createImage" );
# endif
}
return VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Image( *this, *reinterpret_cast<VkImage *>( &image ), allocator );
}
VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::SubresourceLayout
Image::getSubresourceLayout( const VULKAN_HPP_NAMESPACE::ImageSubresource & subresource ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( getDispatcher()->vkGetImageSubresourceLayout && "Function <vkGetImageSubresourceLayout> requires <VK_VERSION_1_0>" );
VULKAN_HPP_NAMESPACE::SubresourceLayout layout;
getDispatcher()->vkGetImageSubresourceLayout( static_cast<VkDevice>( m_device ),
static_cast<VkImage>( m_image ),
reinterpret_cast<const VkImageSubresource *>( &subresource ),
reinterpret_cast<VkSubresourceLayout *>( &layout ) );
return layout;
}
VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE
VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::CreateReturnType<VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::ImageView>::Type
Device::createImageView( VULKAN_HPP_NAMESPACE::ImageViewCreateInfo const & createInfo,
VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator ) const VULKAN_HPP_RAII_CREATE_NOEXCEPT
{
VULKAN_HPP_NAMESPACE::ImageView view;
VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( getDispatcher()->vkCreateImageView(
static_cast<VkDevice>( m_device ),
reinterpret_cast<const VkImageViewCreateInfo *>( &createInfo ),
reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
reinterpret_cast<VkImageView *>( &view ) ) );
if ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess )
{
# if defined( VULKAN_HPP_RAII_NO_EXCEPTIONS )
return VULKAN_HPP_UNEXPECTED( result );
# else
VULKAN_HPP_NAMESPACE::detail::throwResultException( result, "Device::createImageView" );
# endif
}
return VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::ImageView( *this, *reinterpret_cast<VkImageView *>( &view ), allocator );
}
VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE
VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::CreateReturnType<VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::ShaderModule>::Type
Device::createShaderModule( VULKAN_HPP_NAMESPACE::ShaderModuleCreateInfo const & createInfo,
VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator ) const
VULKAN_HPP_RAII_CREATE_NOEXCEPT
{
VULKAN_HPP_NAMESPACE::ShaderModule shaderModule;
VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( getDispatcher()->vkCreateShaderModule(
static_cast<VkDevice>( m_device ),
reinterpret_cast<const VkShaderModuleCreateInfo *>( &createInfo ),
reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
reinterpret_cast<VkShaderModule *>( &shaderModule ) ) );
if ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess )
{
# if defined( VULKAN_HPP_RAII_NO_EXCEPTIONS )
return VULKAN_HPP_UNEXPECTED( result );
# else
VULKAN_HPP_NAMESPACE::detail::throwResultException( result, "Device::createShaderModule" );
# endif
}
return VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::ShaderModule( *this, *reinterpret_cast<VkShaderModule *>( &shaderModule ), allocator );
}
VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE
VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::CreateReturnType<VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::PipelineCache>::Type
Device::createPipelineCache( VULKAN_HPP_NAMESPACE::PipelineCacheCreateInfo const & createInfo,
VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator ) const
VULKAN_HPP_RAII_CREATE_NOEXCEPT
{
VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache;
VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( getDispatcher()->vkCreatePipelineCache(
static_cast<VkDevice>( m_device ),
reinterpret_cast<const VkPipelineCacheCreateInfo *>( &createInfo ),
reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
reinterpret_cast<VkPipelineCache *>( &pipelineCache ) ) );
if ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess )
{
# if defined( VULKAN_HPP_RAII_NO_EXCEPTIONS )
return VULKAN_HPP_UNEXPECTED( result );
# else
VULKAN_HPP_NAMESPACE::detail::throwResultException( result, "Device::createPipelineCache" );
# endif
}
return VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::PipelineCache( *this, *reinterpret_cast<VkPipelineCache *>( &pipelineCache ), allocator );
}
VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector<uint8_t> PipelineCache::getData() const
{
VULKAN_HPP_ASSERT( getDispatcher()->vkGetPipelineCacheData && "Function <vkGetPipelineCacheData> requires <VK_VERSION_1_0>" );
std::vector<uint8_t> data;
size_t dataSize;
VULKAN_HPP_NAMESPACE::Result result;
do
{
result = static_cast<VULKAN_HPP_NAMESPACE::Result>(
getDispatcher()->vkGetPipelineCacheData( static_cast<VkDevice>( m_device ), static_cast<VkPipelineCache>( m_pipelineCache ), &dataSize, nullptr ) );
if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) && dataSize )
{
data.resize( dataSize );
result = static_cast<VULKAN_HPP_NAMESPACE::Result>( getDispatcher()->vkGetPipelineCacheData(
static_cast<VkDevice>( m_device ), static_cast<VkPipelineCache>( m_pipelineCache ), &dataSize, reinterpret_cast<void *>( data.data() ) ) );
}
} while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete );
VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PipelineCache::getData" );
VULKAN_HPP_ASSERT( dataSize <= data.size() );
if ( dataSize < data.size() )
{
data.resize( dataSize );
}
return data;
}
VULKAN_HPP_INLINE void PipelineCache::merge( VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::PipelineCache> const & srcCaches ) const
{
VULKAN_HPP_ASSERT( getDispatcher()->vkMergePipelineCaches && "Function <vkMergePipelineCaches> requires <VK_VERSION_1_0>" );
VULKAN_HPP_NAMESPACE::Result result =
static_cast<VULKAN_HPP_NAMESPACE::Result>( getDispatcher()->vkMergePipelineCaches( static_cast<VkDevice>( m_device ),
static_cast<VkPipelineCache>( m_pipelineCache ),
srcCaches.size(),
reinterpret_cast<const VkPipelineCache *>( srcCaches.data() ) ) );
VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PipelineCache::merge" );
}
VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE
VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::CreateReturnType<std::vector<VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Pipeline>>::Type
Device::createGraphicsPipelines(
VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::PipelineCache> const & pipelineCache,
VULKAN_HPP_NAMESPACE::ArrayProxy<VULKAN_HPP_NAMESPACE::GraphicsPipelineCreateInfo> const & createInfos,
VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator ) const
{
std::vector<VULKAN_HPP_NAMESPACE::Pipeline> pipelines( createInfos.size() );
VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( getDispatcher()->vkCreateGraphicsPipelines(
static_cast<VkDevice>( m_device ),
pipelineCache ? static_cast<VkPipelineCache>( **pipelineCache ) : 0,
createInfos.size(),
reinterpret_cast<const VkGraphicsPipelineCreateInfo *>( createInfos.data() ),
reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
reinterpret_cast<VkPipeline *>( pipelines.data() ) ) );
if ( ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess ) && ( result != VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT ) )
{
# if defined( VULKAN_HPP_RAII_NO_EXCEPTIONS )
return VULKAN_HPP_UNEXPECTED( result );
# else
VULKAN_HPP_NAMESPACE::detail::throwResultException( result, "Device::createGraphicsPipelines" );
# endif
}
std::vector<VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Pipeline> pipelinesRAII;
pipelinesRAII.reserve( pipelines.size() );
for ( auto & pipeline : pipelines )
{
pipelinesRAII.emplace_back( *this, *reinterpret_cast<VkPipeline *>( &pipeline ), allocator, result );
}
return pipelinesRAII;
}
VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE
VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::CreateReturnType<VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Pipeline>::Type
Device::createGraphicsPipeline(
VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::PipelineCache> const & pipelineCache,
VULKAN_HPP_NAMESPACE::GraphicsPipelineCreateInfo const & createInfo,
VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator ) const VULKAN_HPP_RAII_CREATE_NOEXCEPT
{
VULKAN_HPP_NAMESPACE::Pipeline pipeline;
VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( getDispatcher()->vkCreateGraphicsPipelines(
static_cast<VkDevice>( m_device ),
pipelineCache ? static_cast<VkPipelineCache>( **pipelineCache ) : 0,
1,
reinterpret_cast<const VkGraphicsPipelineCreateInfo *>( &createInfo ),
reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
reinterpret_cast<VkPipeline *>( &pipeline ) ) );
if ( ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess ) && ( result != VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT ) )
{
# if defined( VULKAN_HPP_RAII_NO_EXCEPTIONS )
return VULKAN_HPP_UNEXPECTED( result );
# else
VULKAN_HPP_NAMESPACE::detail::throwResultException( result, "Device::createGraphicsPipeline" );
# endif
}
return VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Pipeline( *this, *reinterpret_cast<VkPipeline *>( &pipeline ), allocator, result );
}
VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE
VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::CreateReturnType<std::vector<VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Pipeline>>::Type
Device::createComputePipelines(
VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::PipelineCache> const & pipelineCache,
VULKAN_HPP_NAMESPACE::ArrayProxy<VULKAN_HPP_NAMESPACE::ComputePipelineCreateInfo> const & createInfos,
VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator ) const
{
std::vector<VULKAN_HPP_NAMESPACE::Pipeline> pipelines( createInfos.size() );
VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( getDispatcher()->vkCreateComputePipelines(
static_cast<VkDevice>( m_device ),
pipelineCache ? static_cast<VkPipelineCache>( **pipelineCache ) : 0,
createInfos.size(),
reinterpret_cast<const VkComputePipelineCreateInfo *>( createInfos.data() ),
reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
reinterpret_cast<VkPipeline *>( pipelines.data() ) ) );
if ( ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess ) && ( result != VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT ) )
{
# if defined( VULKAN_HPP_RAII_NO_EXCEPTIONS )
return VULKAN_HPP_UNEXPECTED( result );
# else
VULKAN_HPP_NAMESPACE::detail::throwResultException( result, "Device::createComputePipelines" );
# endif
}
std::vector<VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Pipeline> pipelinesRAII;
pipelinesRAII.reserve( pipelines.size() );
for ( auto & pipeline : pipelines )
{
pipelinesRAII.emplace_back( *this, *reinterpret_cast<VkPipeline *>( &pipeline ), allocator, result );
}
return pipelinesRAII;
}
VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE
VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::CreateReturnType<VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Pipeline>::Type
Device::createComputePipeline( VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::PipelineCache> const & pipelineCache,
VULKAN_HPP_NAMESPACE::ComputePipelineCreateInfo const & createInfo,
VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator ) const
VULKAN_HPP_RAII_CREATE_NOEXCEPT
{
VULKAN_HPP_NAMESPACE::Pipeline pipeline;
VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( getDispatcher()->vkCreateComputePipelines(
static_cast<VkDevice>( m_device ),
pipelineCache ? static_cast<VkPipelineCache>( **pipelineCache ) : 0,
1,
reinterpret_cast<const VkComputePipelineCreateInfo *>( &createInfo ),
reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
reinterpret_cast<VkPipeline *>( &pipeline ) ) );
if ( ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess ) && ( result != VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT ) )
{
# if defined( VULKAN_HPP_RAII_NO_EXCEPTIONS )
return VULKAN_HPP_UNEXPECTED( result );
# else
VULKAN_HPP_NAMESPACE::detail::throwResultException( result, "Device::createComputePipeline" );
# endif
}
return VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Pipeline( *this, *reinterpret_cast<VkPipeline *>( &pipeline ), allocator, result );
}
VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE
VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::CreateReturnType<VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::PipelineLayout>::Type
Device::createPipelineLayout( VULKAN_HPP_NAMESPACE::PipelineLayoutCreateInfo const & createInfo,
VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator ) const
VULKAN_HPP_RAII_CREATE_NOEXCEPT
{
VULKAN_HPP_NAMESPACE::PipelineLayout pipelineLayout;
VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( getDispatcher()->vkCreatePipelineLayout(
static_cast<VkDevice>( m_device ),
reinterpret_cast<const VkPipelineLayoutCreateInfo *>( &createInfo ),
reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
reinterpret_cast<VkPipelineLayout *>( &pipelineLayout ) ) );
if ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess )
{
# if defined( VULKAN_HPP_RAII_NO_EXCEPTIONS )
return VULKAN_HPP_UNEXPECTED( result );
# else
VULKAN_HPP_NAMESPACE::detail::throwResultException( result, "Device::createPipelineLayout" );
# endif
}
return VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::PipelineLayout( *this, *reinterpret_cast<VkPipelineLayout *>( &pipelineLayout ), allocator );
}
VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE
VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::CreateReturnType<VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Sampler>::Type
Device::createSampler( VULKAN_HPP_NAMESPACE::SamplerCreateInfo const & createInfo,
VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator ) const VULKAN_HPP_RAII_CREATE_NOEXCEPT
{
VULKAN_HPP_NAMESPACE::Sampler sampler;
VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( getDispatcher()->vkCreateSampler(
static_cast<VkDevice>( m_device ),
reinterpret_cast<const VkSamplerCreateInfo *>( &createInfo ),
reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
reinterpret_cast<VkSampler *>( &sampler ) ) );
if ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess )
{
# if defined( VULKAN_HPP_RAII_NO_EXCEPTIONS )
return VULKAN_HPP_UNEXPECTED( result );
# else
VULKAN_HPP_NAMESPACE::detail::throwResultException( result, "Device::createSampler" );
# endif
}
return VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Sampler( *this, *reinterpret_cast<VkSampler *>( &sampler ), allocator );
}
VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE
VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::CreateReturnType<VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::DescriptorSetLayout>::Type
Device::createDescriptorSetLayout( VULKAN_HPP_NAMESPACE::DescriptorSetLayoutCreateInfo const & createInfo,
VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator ) const
VULKAN_HPP_RAII_CREATE_NOEXCEPT
{
VULKAN_HPP_NAMESPACE::DescriptorSetLayout setLayout;
VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( getDispatcher()->vkCreateDescriptorSetLayout(
static_cast<VkDevice>( m_device ),
reinterpret_cast<const VkDescriptorSetLayoutCreateInfo *>( &createInfo ),
reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
reinterpret_cast<VkDescriptorSetLayout *>( &setLayout ) ) );
if ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess )
{
# if defined( VULKAN_HPP_RAII_NO_EXCEPTIONS )
return VULKAN_HPP_UNEXPECTED( result );
# else
VULKAN_HPP_NAMESPACE::detail::throwResultException( result, "Device::createDescriptorSetLayout" );
# endif
}
return VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::DescriptorSetLayout( *this, *reinterpret_cast<VkDescriptorSetLayout *>( &setLayout ), allocator );
}
VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE
VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::CreateReturnType<VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::DescriptorPool>::Type
Device::createDescriptorPool( VULKAN_HPP_NAMESPACE::DescriptorPoolCreateInfo const & createInfo,
VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator ) const
VULKAN_HPP_RAII_CREATE_NOEXCEPT
{
VULKAN_HPP_ASSERT(
createInfo.flags & vk::DescriptorPoolCreateFlagBits::eFreeDescriptorSet &&
"createInfo.flags need to have vk::DescriptorPoolCreateFlagBits::eFreeDesriptors set in order to allow destruction of VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::DescriptorSet which requires to return individual allocations to the pool" );
VULKAN_HPP_NAMESPACE::DescriptorPool descriptorPool;
VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( getDispatcher()->vkCreateDescriptorPool(
static_cast<VkDevice>( m_device ),
reinterpret_cast<const VkDescriptorPoolCreateInfo *>( &createInfo ),
reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
reinterpret_cast<VkDescriptorPool *>( &descriptorPool ) ) );
if ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess )
{
# if defined( VULKAN_HPP_RAII_NO_EXCEPTIONS )
return VULKAN_HPP_UNEXPECTED( result );
# else
VULKAN_HPP_NAMESPACE::detail::throwResultException( result, "Device::createDescriptorPool" );
# endif
}
return VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::DescriptorPool( *this, *reinterpret_cast<VkDescriptorPool *>( &descriptorPool ), allocator );
}
VULKAN_HPP_INLINE void DescriptorPool::reset( VULKAN_HPP_NAMESPACE::DescriptorPoolResetFlags flags ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( getDispatcher()->vkResetDescriptorPool && "Function <vkResetDescriptorPool> requires <VK_VERSION_1_0>" );
getDispatcher()->vkResetDescriptorPool(
static_cast<VkDevice>( m_device ), static_cast<VkDescriptorPool>( m_descriptorPool ), static_cast<VkDescriptorPoolResetFlags>( flags ) );
}
VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::CreateReturnType<
std::vector<VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::DescriptorSet>>::Type
Device::allocateDescriptorSets( VULKAN_HPP_NAMESPACE::DescriptorSetAllocateInfo const & allocateInfo ) const
{
std::vector<VULKAN_HPP_NAMESPACE::DescriptorSet> descriptorSets( allocateInfo.descriptorSetCount );
VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>(
getDispatcher()->vkAllocateDescriptorSets( static_cast<VkDevice>( m_device ),
reinterpret_cast<const VkDescriptorSetAllocateInfo *>( &allocateInfo ),
reinterpret_cast<VkDescriptorSet *>( descriptorSets.data() ) ) );
if ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess )
{
# if defined( VULKAN_HPP_RAII_NO_EXCEPTIONS )
return VULKAN_HPP_UNEXPECTED( result );
# else
VULKAN_HPP_NAMESPACE::detail::throwResultException( result, "Device::allocateDescriptorSets" );
# endif
}
std::vector<VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::DescriptorSet> descriptorSetsRAII;
descriptorSetsRAII.reserve( descriptorSets.size() );
for ( auto & descriptorSet : descriptorSets )
{
descriptorSetsRAII.emplace_back(
*this, *reinterpret_cast<VkDescriptorSet *>( &descriptorSet ), static_cast<VkDescriptorPool>( allocateInfo.descriptorPool ) );
}
return descriptorSetsRAII;
}
VULKAN_HPP_INLINE void Device::updateDescriptorSets(
VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::WriteDescriptorSet> const & descriptorWrites,
VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::CopyDescriptorSet> const & descriptorCopies ) const VULKAN_HPP_NOEXCEPT
2021-04-28 11:35:14 +00:00
{
VULKAN_HPP_ASSERT( getDispatcher()->vkUpdateDescriptorSets && "Function <vkUpdateDescriptorSets> requires <VK_VERSION_1_0>" );
getDispatcher()->vkUpdateDescriptorSets( static_cast<VkDevice>( m_device ),
descriptorWrites.size(),
reinterpret_cast<const VkWriteDescriptorSet *>( descriptorWrites.data() ),
descriptorCopies.size(),
reinterpret_cast<const VkCopyDescriptorSet *>( descriptorCopies.data() ) );
2021-04-28 11:35:14 +00:00
}
VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE
VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::CreateReturnType<VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Framebuffer>::Type
Device::createFramebuffer( VULKAN_HPP_NAMESPACE::FramebufferCreateInfo const & createInfo,
VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator ) const
VULKAN_HPP_RAII_CREATE_NOEXCEPT
{
VULKAN_HPP_NAMESPACE::Framebuffer framebuffer;
VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( getDispatcher()->vkCreateFramebuffer(
static_cast<VkDevice>( m_device ),
reinterpret_cast<const VkFramebufferCreateInfo *>( &createInfo ),
reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
reinterpret_cast<VkFramebuffer *>( &framebuffer ) ) );
if ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess )
{
# if defined( VULKAN_HPP_RAII_NO_EXCEPTIONS )
return VULKAN_HPP_UNEXPECTED( result );
# else
VULKAN_HPP_NAMESPACE::detail::throwResultException( result, "Device::createFramebuffer" );
# endif
}
return VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Framebuffer( *this, *reinterpret_cast<VkFramebuffer *>( &framebuffer ), allocator );
}
VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE
VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::CreateReturnType<VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::RenderPass>::Type
Device::createRenderPass( VULKAN_HPP_NAMESPACE::RenderPassCreateInfo const & createInfo,
VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator ) const
VULKAN_HPP_RAII_CREATE_NOEXCEPT
{
VULKAN_HPP_NAMESPACE::RenderPass renderPass;
VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( getDispatcher()->vkCreateRenderPass(
static_cast<VkDevice>( m_device ),
reinterpret_cast<const VkRenderPassCreateInfo *>( &createInfo ),
reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
reinterpret_cast<VkRenderPass *>( &renderPass ) ) );
if ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess )
{
# if defined( VULKAN_HPP_RAII_NO_EXCEPTIONS )
return VULKAN_HPP_UNEXPECTED( result );
# else
VULKAN_HPP_NAMESPACE::detail::throwResultException( result, "Device::createRenderPass" );
# endif
}
return VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::RenderPass( *this, *reinterpret_cast<VkRenderPass *>( &renderPass ), allocator );
}
VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::Extent2D RenderPass::getRenderAreaGranularity() const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( getDispatcher()->vkGetRenderAreaGranularity && "Function <vkGetRenderAreaGranularity> requires <VK_VERSION_1_0>" );
VULKAN_HPP_NAMESPACE::Extent2D granularity;
getDispatcher()->vkGetRenderAreaGranularity(
static_cast<VkDevice>( m_device ), static_cast<VkRenderPass>( m_renderPass ), reinterpret_cast<VkExtent2D *>( &granularity ) );
return granularity;
}
VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE
VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::CreateReturnType<VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::CommandPool>::Type
Device::createCommandPool( VULKAN_HPP_NAMESPACE::CommandPoolCreateInfo const & createInfo,
VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator ) const
VULKAN_HPP_RAII_CREATE_NOEXCEPT
{
VULKAN_HPP_NAMESPACE::CommandPool commandPool;
VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( getDispatcher()->vkCreateCommandPool(
static_cast<VkDevice>( m_device ),
reinterpret_cast<const VkCommandPoolCreateInfo *>( &createInfo ),
reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
reinterpret_cast<VkCommandPool *>( &commandPool ) ) );
if ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess )
{
# if defined( VULKAN_HPP_RAII_NO_EXCEPTIONS )
return VULKAN_HPP_UNEXPECTED( result );
# else
VULKAN_HPP_NAMESPACE::detail::throwResultException( result, "Device::createCommandPool" );
# endif
}
return VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::CommandPool( *this, *reinterpret_cast<VkCommandPool *>( &commandPool ), allocator );
}
VULKAN_HPP_INLINE void CommandPool::reset( VULKAN_HPP_NAMESPACE::CommandPoolResetFlags flags ) const
2021-04-28 11:35:14 +00:00
{
VULKAN_HPP_ASSERT( getDispatcher()->vkResetCommandPool && "Function <vkResetCommandPool> requires <VK_VERSION_1_0>" );
VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( getDispatcher()->vkResetCommandPool(
static_cast<VkDevice>( m_device ), static_cast<VkCommandPool>( m_commandPool ), static_cast<VkCommandPoolResetFlags>( flags ) ) );
VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::CommandPool::reset" );
2021-04-28 11:35:14 +00:00
}
VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::CreateReturnType<
std::vector<VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::CommandBuffer>>::Type
Device::allocateCommandBuffers( VULKAN_HPP_NAMESPACE::CommandBufferAllocateInfo const & allocateInfo ) const
{
std::vector<VULKAN_HPP_NAMESPACE::CommandBuffer> commandBuffers( allocateInfo.commandBufferCount );
VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>(
getDispatcher()->vkAllocateCommandBuffers( static_cast<VkDevice>( m_device ),
reinterpret_cast<const VkCommandBufferAllocateInfo *>( &allocateInfo ),
reinterpret_cast<VkCommandBuffer *>( commandBuffers.data() ) ) );
if ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess )
{
# if defined( VULKAN_HPP_RAII_NO_EXCEPTIONS )
return VULKAN_HPP_UNEXPECTED( result );
# else
VULKAN_HPP_NAMESPACE::detail::throwResultException( result, "Device::allocateCommandBuffers" );
# endif
}
std::vector<VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::CommandBuffer> commandBuffersRAII;
commandBuffersRAII.reserve( commandBuffers.size() );
for ( auto & commandBuffer : commandBuffers )
{
commandBuffersRAII.emplace_back(
*this, *reinterpret_cast<VkCommandBuffer *>( &commandBuffer ), static_cast<VkCommandPool>( allocateInfo.commandPool ) );
}
return commandBuffersRAII;
}
VULKAN_HPP_INLINE void CommandBuffer::begin( const VULKAN_HPP_NAMESPACE::CommandBufferBeginInfo & beginInfo ) const
{
VULKAN_HPP_ASSERT( getDispatcher()->vkBeginCommandBuffer && "Function <vkBeginCommandBuffer> requires <VK_VERSION_1_0>" );
VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( getDispatcher()->vkBeginCommandBuffer(
static_cast<VkCommandBuffer>( m_commandBuffer ), reinterpret_cast<const VkCommandBufferBeginInfo *>( &beginInfo ) ) );
VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::CommandBuffer::begin" );
}
VULKAN_HPP_INLINE void CommandBuffer::end() const
{
VULKAN_HPP_ASSERT( getDispatcher()->vkEndCommandBuffer && "Function <vkEndCommandBuffer> requires <VK_VERSION_1_0>" );
VULKAN_HPP_NAMESPACE::Result result =
static_cast<VULKAN_HPP_NAMESPACE::Result>( getDispatcher()->vkEndCommandBuffer( static_cast<VkCommandBuffer>( m_commandBuffer ) ) );
VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::CommandBuffer::end" );
}
VULKAN_HPP_INLINE void CommandBuffer::reset( VULKAN_HPP_NAMESPACE::CommandBufferResetFlags flags ) const
2021-04-28 11:35:14 +00:00
{
VULKAN_HPP_ASSERT( getDispatcher()->vkResetCommandBuffer && "Function <vkResetCommandBuffer> requires <VK_VERSION_1_0>" );
VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>(
getDispatcher()->vkResetCommandBuffer( static_cast<VkCommandBuffer>( m_commandBuffer ), static_cast<VkCommandBufferResetFlags>( flags ) ) );
VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::CommandBuffer::reset" );
2021-04-28 11:35:14 +00:00
}
VULKAN_HPP_INLINE void CommandBuffer::bindPipeline( VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint,
VULKAN_HPP_NAMESPACE::Pipeline pipeline ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( getDispatcher()->vkCmdBindPipeline && "Function <vkCmdBindPipeline> requires <VK_VERSION_1_0>" );
getDispatcher()->vkCmdBindPipeline(
static_cast<VkCommandBuffer>( m_commandBuffer ), static_cast<VkPipelineBindPoint>( pipelineBindPoint ), static_cast<VkPipeline>( pipeline ) );
}
VULKAN_HPP_INLINE void
CommandBuffer::setViewport( uint32_t firstViewport,
VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::Viewport> const & viewports ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSetViewport && "Function <vkCmdSetViewport> requires <VK_VERSION_1_0>" );
getDispatcher()->vkCmdSetViewport(
static_cast<VkCommandBuffer>( m_commandBuffer ), firstViewport, viewports.size(), reinterpret_cast<const VkViewport *>( viewports.data() ) );
}
VULKAN_HPP_INLINE void
CommandBuffer::setScissor( uint32_t firstScissor,
VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::Rect2D> const & scissors ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSetScissor && "Function <vkCmdSetScissor> requires <VK_VERSION_1_0>" );
getDispatcher()->vkCmdSetScissor(
static_cast<VkCommandBuffer>( m_commandBuffer ), firstScissor, scissors.size(), reinterpret_cast<const VkRect2D *>( scissors.data() ) );
}
VULKAN_HPP_INLINE void CommandBuffer::setLineWidth( float lineWidth ) const VULKAN_HPP_NOEXCEPT
2021-04-28 11:35:14 +00:00
{
VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSetLineWidth && "Function <vkCmdSetLineWidth> requires <VK_VERSION_1_0>" );
getDispatcher()->vkCmdSetLineWidth( static_cast<VkCommandBuffer>( m_commandBuffer ), lineWidth );
2021-04-28 11:35:14 +00:00
}
VULKAN_HPP_INLINE void
CommandBuffer::setDepthBias( float depthBiasConstantFactor, float depthBiasClamp, float depthBiasSlopeFactor ) const VULKAN_HPP_NOEXCEPT
2021-04-28 11:35:14 +00:00
{
VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSetDepthBias && "Function <vkCmdSetDepthBias> requires <VK_VERSION_1_0>" );
getDispatcher()->vkCmdSetDepthBias( static_cast<VkCommandBuffer>( m_commandBuffer ), depthBiasConstantFactor, depthBiasClamp, depthBiasSlopeFactor );
2021-04-28 11:35:14 +00:00
}
VULKAN_HPP_INLINE void CommandBuffer::setBlendConstants( const float blendConstants[4] ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSetBlendConstants && "Function <vkCmdSetBlendConstants> requires <VK_VERSION_1_0>" );
getDispatcher()->vkCmdSetBlendConstants( static_cast<VkCommandBuffer>( m_commandBuffer ), blendConstants );
}
VULKAN_HPP_INLINE void CommandBuffer::setDepthBounds( float minDepthBounds, float maxDepthBounds ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSetDepthBounds && "Function <vkCmdSetDepthBounds> requires <VK_VERSION_1_0>" );
getDispatcher()->vkCmdSetDepthBounds( static_cast<VkCommandBuffer>( m_commandBuffer ), minDepthBounds, maxDepthBounds );
}
VULKAN_HPP_INLINE void CommandBuffer::setStencilCompareMask( VULKAN_HPP_NAMESPACE::StencilFaceFlags faceMask,
uint32_t compareMask ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSetStencilCompareMask && "Function <vkCmdSetStencilCompareMask> requires <VK_VERSION_1_0>" );
getDispatcher()->vkCmdSetStencilCompareMask( static_cast<VkCommandBuffer>( m_commandBuffer ), static_cast<VkStencilFaceFlags>( faceMask ), compareMask );
}
VULKAN_HPP_INLINE void CommandBuffer::setStencilWriteMask( VULKAN_HPP_NAMESPACE::StencilFaceFlags faceMask, uint32_t writeMask ) const VULKAN_HPP_NOEXCEPT
2021-04-28 11:35:14 +00:00
{
VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSetStencilWriteMask && "Function <vkCmdSetStencilWriteMask> requires <VK_VERSION_1_0>" );
getDispatcher()->vkCmdSetStencilWriteMask( static_cast<VkCommandBuffer>( m_commandBuffer ), static_cast<VkStencilFaceFlags>( faceMask ), writeMask );
2021-04-28 11:35:14 +00:00
}
VULKAN_HPP_INLINE void CommandBuffer::setStencilReference( VULKAN_HPP_NAMESPACE::StencilFaceFlags faceMask, uint32_t reference ) const VULKAN_HPP_NOEXCEPT
2021-04-28 11:35:14 +00:00
{
VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSetStencilReference && "Function <vkCmdSetStencilReference> requires <VK_VERSION_1_0>" );
getDispatcher()->vkCmdSetStencilReference( static_cast<VkCommandBuffer>( m_commandBuffer ), static_cast<VkStencilFaceFlags>( faceMask ), reference );
}
2021-04-28 11:35:14 +00:00
VULKAN_HPP_INLINE void
CommandBuffer::bindDescriptorSets( VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint,
VULKAN_HPP_NAMESPACE::PipelineLayout layout,
uint32_t firstSet,
VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::DescriptorSet> const & descriptorSets,
VULKAN_HPP_NAMESPACE::ArrayProxy<const uint32_t> const & dynamicOffsets ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( getDispatcher()->vkCmdBindDescriptorSets && "Function <vkCmdBindDescriptorSets> requires <VK_VERSION_1_0>" );
getDispatcher()->vkCmdBindDescriptorSets( static_cast<VkCommandBuffer>( m_commandBuffer ),
static_cast<VkPipelineBindPoint>( pipelineBindPoint ),
static_cast<VkPipelineLayout>( layout ),
firstSet,
descriptorSets.size(),
reinterpret_cast<const VkDescriptorSet *>( descriptorSets.data() ),
dynamicOffsets.size(),
dynamicOffsets.data() );
2021-04-28 11:35:14 +00:00
}
VULKAN_HPP_INLINE void CommandBuffer::bindIndexBuffer( VULKAN_HPP_NAMESPACE::Buffer buffer,
VULKAN_HPP_NAMESPACE::DeviceSize offset,
VULKAN_HPP_NAMESPACE::IndexType indexType ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( getDispatcher()->vkCmdBindIndexBuffer && "Function <vkCmdBindIndexBuffer> requires <VK_VERSION_1_0>" );
getDispatcher()->vkCmdBindIndexBuffer( static_cast<VkCommandBuffer>( m_commandBuffer ),
static_cast<VkBuffer>( buffer ),
static_cast<VkDeviceSize>( offset ),
static_cast<VkIndexType>( indexType ) );
}
VULKAN_HPP_INLINE void CommandBuffer::bindVertexBuffers( uint32_t firstBinding,
VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::Buffer> const & buffers,
VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::DeviceSize> const & offsets ) const
{
VULKAN_HPP_ASSERT( getDispatcher()->vkCmdBindVertexBuffers && "Function <vkCmdBindVertexBuffers> requires <VK_VERSION_1_0>" );
# ifdef VULKAN_HPP_NO_EXCEPTIONS
VULKAN_HPP_ASSERT( buffers.size() == offsets.size() );
# else
if ( buffers.size() != offsets.size() )
{
throw LogicError( VULKAN_HPP_NAMESPACE_STRING "::CommandBuffer::bindVertexBuffers: buffers.size() != offsets.size()" );
}
# endif /*VULKAN_HPP_NO_EXCEPTIONS*/
getDispatcher()->vkCmdBindVertexBuffers( static_cast<VkCommandBuffer>( m_commandBuffer ),
firstBinding,
buffers.size(),
reinterpret_cast<const VkBuffer *>( buffers.data() ),
reinterpret_cast<const VkDeviceSize *>( offsets.data() ) );
}
VULKAN_HPP_INLINE void
CommandBuffer::draw( uint32_t vertexCount, uint32_t instanceCount, uint32_t firstVertex, uint32_t firstInstance ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( getDispatcher()->vkCmdDraw && "Function <vkCmdDraw> requires <VK_VERSION_1_0>" );
getDispatcher()->vkCmdDraw( static_cast<VkCommandBuffer>( m_commandBuffer ), vertexCount, instanceCount, firstVertex, firstInstance );
}
VULKAN_HPP_INLINE void CommandBuffer::drawIndexed(
uint32_t indexCount, uint32_t instanceCount, uint32_t firstIndex, int32_t vertexOffset, uint32_t firstInstance ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( getDispatcher()->vkCmdDrawIndexed && "Function <vkCmdDrawIndexed> requires <VK_VERSION_1_0>" );
getDispatcher()->vkCmdDrawIndexed( static_cast<VkCommandBuffer>( m_commandBuffer ), indexCount, instanceCount, firstIndex, vertexOffset, firstInstance );
}
VULKAN_HPP_INLINE void CommandBuffer::drawIndirect( VULKAN_HPP_NAMESPACE::Buffer buffer,
VULKAN_HPP_NAMESPACE::DeviceSize offset,
uint32_t drawCount,
uint32_t stride ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( getDispatcher()->vkCmdDrawIndirect && "Function <vkCmdDrawIndirect> requires <VK_VERSION_1_0>" );
getDispatcher()->vkCmdDrawIndirect(
static_cast<VkCommandBuffer>( m_commandBuffer ), static_cast<VkBuffer>( buffer ), static_cast<VkDeviceSize>( offset ), drawCount, stride );
}
VULKAN_HPP_INLINE void CommandBuffer::drawIndexedIndirect( VULKAN_HPP_NAMESPACE::Buffer buffer,
VULKAN_HPP_NAMESPACE::DeviceSize offset,
uint32_t drawCount,
uint32_t stride ) const VULKAN_HPP_NOEXCEPT
2021-04-28 11:35:14 +00:00
{
VULKAN_HPP_ASSERT( getDispatcher()->vkCmdDrawIndexedIndirect && "Function <vkCmdDrawIndexedIndirect> requires <VK_VERSION_1_0>" );
getDispatcher()->vkCmdDrawIndexedIndirect(
static_cast<VkCommandBuffer>( m_commandBuffer ), static_cast<VkBuffer>( buffer ), static_cast<VkDeviceSize>( offset ), drawCount, stride );
2021-04-28 11:35:14 +00:00
}
VULKAN_HPP_INLINE void CommandBuffer::dispatch( uint32_t groupCountX, uint32_t groupCountY, uint32_t groupCountZ ) const VULKAN_HPP_NOEXCEPT
2021-04-28 11:35:14 +00:00
{
VULKAN_HPP_ASSERT( getDispatcher()->vkCmdDispatch && "Function <vkCmdDispatch> requires <VK_VERSION_1_0>" );
getDispatcher()->vkCmdDispatch( static_cast<VkCommandBuffer>( m_commandBuffer ), groupCountX, groupCountY, groupCountZ );
2021-04-28 11:35:14 +00:00
}
VULKAN_HPP_INLINE void CommandBuffer::dispatchIndirect( VULKAN_HPP_NAMESPACE::Buffer buffer,
VULKAN_HPP_NAMESPACE::DeviceSize offset ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( getDispatcher()->vkCmdDispatchIndirect && "Function <vkCmdDispatchIndirect> requires <VK_VERSION_1_0>" );
getDispatcher()->vkCmdDispatchIndirect(
static_cast<VkCommandBuffer>( m_commandBuffer ), static_cast<VkBuffer>( buffer ), static_cast<VkDeviceSize>( offset ) );
}
VULKAN_HPP_INLINE void
CommandBuffer::copyBuffer( VULKAN_HPP_NAMESPACE::Buffer srcBuffer,
VULKAN_HPP_NAMESPACE::Buffer dstBuffer,
VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::BufferCopy> const & regions ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( getDispatcher()->vkCmdCopyBuffer && "Function <vkCmdCopyBuffer> requires <VK_VERSION_1_0>" );
getDispatcher()->vkCmdCopyBuffer( static_cast<VkCommandBuffer>( m_commandBuffer ),
static_cast<VkBuffer>( srcBuffer ),
static_cast<VkBuffer>( dstBuffer ),
regions.size(),
reinterpret_cast<const VkBufferCopy *>( regions.data() ) );
}
VULKAN_HPP_INLINE void
CommandBuffer::copyImage( VULKAN_HPP_NAMESPACE::Image srcImage,
VULKAN_HPP_NAMESPACE::ImageLayout srcImageLayout,
VULKAN_HPP_NAMESPACE::Image dstImage,
VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout,
VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::ImageCopy> const & regions ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( getDispatcher()->vkCmdCopyImage && "Function <vkCmdCopyImage> requires <VK_VERSION_1_0>" );
getDispatcher()->vkCmdCopyImage( static_cast<VkCommandBuffer>( m_commandBuffer ),
static_cast<VkImage>( srcImage ),
static_cast<VkImageLayout>( srcImageLayout ),
static_cast<VkImage>( dstImage ),
static_cast<VkImageLayout>( dstImageLayout ),
regions.size(),
reinterpret_cast<const VkImageCopy *>( regions.data() ) );
}
VULKAN_HPP_INLINE void CommandBuffer::blitImage( VULKAN_HPP_NAMESPACE::Image srcImage,
VULKAN_HPP_NAMESPACE::ImageLayout srcImageLayout,
VULKAN_HPP_NAMESPACE::Image dstImage,
VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout,
VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::ImageBlit> const & regions,
VULKAN_HPP_NAMESPACE::Filter filter ) const VULKAN_HPP_NOEXCEPT
2021-04-28 11:35:14 +00:00
{
VULKAN_HPP_ASSERT( getDispatcher()->vkCmdBlitImage && "Function <vkCmdBlitImage> requires <VK_VERSION_1_0>" );
getDispatcher()->vkCmdBlitImage( static_cast<VkCommandBuffer>( m_commandBuffer ),
static_cast<VkImage>( srcImage ),
static_cast<VkImageLayout>( srcImageLayout ),
static_cast<VkImage>( dstImage ),
static_cast<VkImageLayout>( dstImageLayout ),
regions.size(),
reinterpret_cast<const VkImageBlit *>( regions.data() ),
static_cast<VkFilter>( filter ) );
2021-04-28 11:35:14 +00:00
}
VULKAN_HPP_INLINE void CommandBuffer::copyBufferToImage(
VULKAN_HPP_NAMESPACE::Buffer srcBuffer,
VULKAN_HPP_NAMESPACE::Image dstImage,
VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout,
VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::BufferImageCopy> const & regions ) const VULKAN_HPP_NOEXCEPT
2021-04-28 11:35:14 +00:00
{
VULKAN_HPP_ASSERT( getDispatcher()->vkCmdCopyBufferToImage && "Function <vkCmdCopyBufferToImage> requires <VK_VERSION_1_0>" );
getDispatcher()->vkCmdCopyBufferToImage( static_cast<VkCommandBuffer>( m_commandBuffer ),
static_cast<VkBuffer>( srcBuffer ),
static_cast<VkImage>( dstImage ),
static_cast<VkImageLayout>( dstImageLayout ),
regions.size(),
reinterpret_cast<const VkBufferImageCopy *>( regions.data() ) );
2021-04-28 11:35:14 +00:00
}
VULKAN_HPP_INLINE void CommandBuffer::copyImageToBuffer(
VULKAN_HPP_NAMESPACE::Image srcImage,
VULKAN_HPP_NAMESPACE::ImageLayout srcImageLayout,
VULKAN_HPP_NAMESPACE::Buffer dstBuffer,
VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::BufferImageCopy> const & regions ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( getDispatcher()->vkCmdCopyImageToBuffer && "Function <vkCmdCopyImageToBuffer> requires <VK_VERSION_1_0>" );
getDispatcher()->vkCmdCopyImageToBuffer( static_cast<VkCommandBuffer>( m_commandBuffer ),
static_cast<VkImage>( srcImage ),
static_cast<VkImageLayout>( srcImageLayout ),
static_cast<VkBuffer>( dstBuffer ),
regions.size(),
reinterpret_cast<const VkBufferImageCopy *>( regions.data() ) );
}
template <typename DataType>
VULKAN_HPP_INLINE void CommandBuffer::updateBuffer( VULKAN_HPP_NAMESPACE::Buffer dstBuffer,
VULKAN_HPP_NAMESPACE::DeviceSize dstOffset,
VULKAN_HPP_NAMESPACE::ArrayProxy<const DataType> const & data ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( getDispatcher()->vkCmdUpdateBuffer && "Function <vkCmdUpdateBuffer> requires <VK_VERSION_1_0>" );
getDispatcher()->vkCmdUpdateBuffer( static_cast<VkCommandBuffer>( m_commandBuffer ),
static_cast<VkBuffer>( dstBuffer ),
static_cast<VkDeviceSize>( dstOffset ),
data.size() * sizeof( DataType ),
reinterpret_cast<const void *>( data.data() ) );
}
VULKAN_HPP_INLINE void CommandBuffer::fillBuffer( VULKAN_HPP_NAMESPACE::Buffer dstBuffer,
VULKAN_HPP_NAMESPACE::DeviceSize dstOffset,
VULKAN_HPP_NAMESPACE::DeviceSize size,
uint32_t data ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( getDispatcher()->vkCmdFillBuffer && "Function <vkCmdFillBuffer> requires <VK_VERSION_1_0>" );
getDispatcher()->vkCmdFillBuffer( static_cast<VkCommandBuffer>( m_commandBuffer ),
static_cast<VkBuffer>( dstBuffer ),
static_cast<VkDeviceSize>( dstOffset ),
static_cast<VkDeviceSize>( size ),
data );
}
VULKAN_HPP_INLINE void CommandBuffer::clearColorImage(
VULKAN_HPP_NAMESPACE::Image image,
VULKAN_HPP_NAMESPACE::ImageLayout imageLayout,
const VULKAN_HPP_NAMESPACE::ClearColorValue & color,
VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::ImageSubresourceRange> const & ranges ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( getDispatcher()->vkCmdClearColorImage && "Function <vkCmdClearColorImage> requires <VK_VERSION_1_0>" );
getDispatcher()->vkCmdClearColorImage( static_cast<VkCommandBuffer>( m_commandBuffer ),
static_cast<VkImage>( image ),
static_cast<VkImageLayout>( imageLayout ),
reinterpret_cast<const VkClearColorValue *>( &color ),
ranges.size(),
reinterpret_cast<const VkImageSubresourceRange *>( ranges.data() ) );
}
VULKAN_HPP_INLINE void CommandBuffer::clearDepthStencilImage(
VULKAN_HPP_NAMESPACE::Image image,
VULKAN_HPP_NAMESPACE::ImageLayout imageLayout,
const VULKAN_HPP_NAMESPACE::ClearDepthStencilValue & depthStencil,
VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::ImageSubresourceRange> const & ranges ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( getDispatcher()->vkCmdClearDepthStencilImage && "Function <vkCmdClearDepthStencilImage> requires <VK_VERSION_1_0>" );
getDispatcher()->vkCmdClearDepthStencilImage( static_cast<VkCommandBuffer>( m_commandBuffer ),
static_cast<VkImage>( image ),
static_cast<VkImageLayout>( imageLayout ),
reinterpret_cast<const VkClearDepthStencilValue *>( &depthStencil ),
ranges.size(),
reinterpret_cast<const VkImageSubresourceRange *>( ranges.data() ) );
}
VULKAN_HPP_INLINE void
CommandBuffer::clearAttachments( VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::ClearAttachment> const & attachments,
VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::ClearRect> const & rects ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( getDispatcher()->vkCmdClearAttachments && "Function <vkCmdClearAttachments> requires <VK_VERSION_1_0>" );
getDispatcher()->vkCmdClearAttachments( static_cast<VkCommandBuffer>( m_commandBuffer ),
attachments.size(),
reinterpret_cast<const VkClearAttachment *>( attachments.data() ),
rects.size(),
reinterpret_cast<const VkClearRect *>( rects.data() ) );
2021-04-28 11:35:14 +00:00
}
VULKAN_HPP_INLINE void
CommandBuffer::resolveImage( VULKAN_HPP_NAMESPACE::Image srcImage,
VULKAN_HPP_NAMESPACE::ImageLayout srcImageLayout,
VULKAN_HPP_NAMESPACE::Image dstImage,
VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout,
VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::ImageResolve> const & regions ) const VULKAN_HPP_NOEXCEPT
2021-04-28 11:35:14 +00:00
{
VULKAN_HPP_ASSERT( getDispatcher()->vkCmdResolveImage && "Function <vkCmdResolveImage> requires <VK_VERSION_1_0>" );
getDispatcher()->vkCmdResolveImage( static_cast<VkCommandBuffer>( m_commandBuffer ),
static_cast<VkImage>( srcImage ),
static_cast<VkImageLayout>( srcImageLayout ),
static_cast<VkImage>( dstImage ),
static_cast<VkImageLayout>( dstImageLayout ),
regions.size(),
reinterpret_cast<const VkImageResolve *>( regions.data() ) );
2021-04-28 11:35:14 +00:00
}
VULKAN_HPP_INLINE void CommandBuffer::setEvent( VULKAN_HPP_NAMESPACE::Event event,
VULKAN_HPP_NAMESPACE::PipelineStageFlags stageMask ) const VULKAN_HPP_NOEXCEPT
2021-04-28 11:35:14 +00:00
{
VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSetEvent && "Function <vkCmdSetEvent> requires <VK_VERSION_1_0>" );
getDispatcher()->vkCmdSetEvent(
static_cast<VkCommandBuffer>( m_commandBuffer ), static_cast<VkEvent>( event ), static_cast<VkPipelineStageFlags>( stageMask ) );
}
VULKAN_HPP_INLINE void CommandBuffer::resetEvent( VULKAN_HPP_NAMESPACE::Event event,
VULKAN_HPP_NAMESPACE::PipelineStageFlags stageMask ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( getDispatcher()->vkCmdResetEvent && "Function <vkCmdResetEvent> requires <VK_VERSION_1_0>" );
getDispatcher()->vkCmdResetEvent(
static_cast<VkCommandBuffer>( m_commandBuffer ), static_cast<VkEvent>( event ), static_cast<VkPipelineStageFlags>( stageMask ) );
}
VULKAN_HPP_INLINE void CommandBuffer::waitEvents(
VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::Event> const & events,
VULKAN_HPP_NAMESPACE::PipelineStageFlags srcStageMask,
VULKAN_HPP_NAMESPACE::PipelineStageFlags dstStageMask,
VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::MemoryBarrier> const & memoryBarriers,
VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::BufferMemoryBarrier> const & bufferMemoryBarriers,
VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::ImageMemoryBarrier> const & imageMemoryBarriers ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( getDispatcher()->vkCmdWaitEvents && "Function <vkCmdWaitEvents> requires <VK_VERSION_1_0>" );
getDispatcher()->vkCmdWaitEvents( static_cast<VkCommandBuffer>( m_commandBuffer ),
events.size(),
reinterpret_cast<const VkEvent *>( events.data() ),
static_cast<VkPipelineStageFlags>( srcStageMask ),
static_cast<VkPipelineStageFlags>( dstStageMask ),
memoryBarriers.size(),
reinterpret_cast<const VkMemoryBarrier *>( memoryBarriers.data() ),
bufferMemoryBarriers.size(),
reinterpret_cast<const VkBufferMemoryBarrier *>( bufferMemoryBarriers.data() ),
imageMemoryBarriers.size(),
reinterpret_cast<const VkImageMemoryBarrier *>( imageMemoryBarriers.data() ) );
}
VULKAN_HPP_INLINE void CommandBuffer::pipelineBarrier(
VULKAN_HPP_NAMESPACE::PipelineStageFlags srcStageMask,
VULKAN_HPP_NAMESPACE::PipelineStageFlags dstStageMask,
VULKAN_HPP_NAMESPACE::DependencyFlags dependencyFlags,
VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::MemoryBarrier> const & memoryBarriers,
VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::BufferMemoryBarrier> const & bufferMemoryBarriers,
VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::ImageMemoryBarrier> const & imageMemoryBarriers ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( getDispatcher()->vkCmdPipelineBarrier && "Function <vkCmdPipelineBarrier> requires <VK_VERSION_1_0>" );
getDispatcher()->vkCmdPipelineBarrier( static_cast<VkCommandBuffer>( m_commandBuffer ),
static_cast<VkPipelineStageFlags>( srcStageMask ),
static_cast<VkPipelineStageFlags>( dstStageMask ),
static_cast<VkDependencyFlags>( dependencyFlags ),
memoryBarriers.size(),
reinterpret_cast<const VkMemoryBarrier *>( memoryBarriers.data() ),
bufferMemoryBarriers.size(),
reinterpret_cast<const VkBufferMemoryBarrier *>( bufferMemoryBarriers.data() ),
imageMemoryBarriers.size(),
reinterpret_cast<const VkImageMemoryBarrier *>( imageMemoryBarriers.data() ) );
}
VULKAN_HPP_INLINE void CommandBuffer::beginQuery( VULKAN_HPP_NAMESPACE::QueryPool queryPool,
uint32_t query,
VULKAN_HPP_NAMESPACE::QueryControlFlags flags ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( getDispatcher()->vkCmdBeginQuery && "Function <vkCmdBeginQuery> requires <VK_VERSION_1_0>" );
getDispatcher()->vkCmdBeginQuery(
static_cast<VkCommandBuffer>( m_commandBuffer ), static_cast<VkQueryPool>( queryPool ), query, static_cast<VkQueryControlFlags>( flags ) );
}
VULKAN_HPP_INLINE void CommandBuffer::endQuery( VULKAN_HPP_NAMESPACE::QueryPool queryPool, uint32_t query ) const VULKAN_HPP_NOEXCEPT
2021-04-28 11:35:14 +00:00
{
VULKAN_HPP_ASSERT( getDispatcher()->vkCmdEndQuery && "Function <vkCmdEndQuery> requires <VK_VERSION_1_0>" );
getDispatcher()->vkCmdEndQuery( static_cast<VkCommandBuffer>( m_commandBuffer ), static_cast<VkQueryPool>( queryPool ), query );
}
2021-04-28 11:35:14 +00:00
VULKAN_HPP_INLINE void
CommandBuffer::resetQueryPool( VULKAN_HPP_NAMESPACE::QueryPool queryPool, uint32_t firstQuery, uint32_t queryCount ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( getDispatcher()->vkCmdResetQueryPool && "Function <vkCmdResetQueryPool> requires <VK_VERSION_1_0>" );
getDispatcher()->vkCmdResetQueryPool( static_cast<VkCommandBuffer>( m_commandBuffer ), static_cast<VkQueryPool>( queryPool ), firstQuery, queryCount );
2021-04-28 11:35:14 +00:00
}
VULKAN_HPP_INLINE void CommandBuffer::writeTimestamp( VULKAN_HPP_NAMESPACE::PipelineStageFlagBits pipelineStage,
VULKAN_HPP_NAMESPACE::QueryPool queryPool,
uint32_t query ) const VULKAN_HPP_NOEXCEPT
2021-04-28 11:35:14 +00:00
{
VULKAN_HPP_ASSERT( getDispatcher()->vkCmdWriteTimestamp && "Function <vkCmdWriteTimestamp> requires <VK_VERSION_1_0>" );
getDispatcher()->vkCmdWriteTimestamp(
static_cast<VkCommandBuffer>( m_commandBuffer ), static_cast<VkPipelineStageFlagBits>( pipelineStage ), static_cast<VkQueryPool>( queryPool ), query );
2021-04-28 11:35:14 +00:00
}
VULKAN_HPP_INLINE void CommandBuffer::copyQueryPoolResults( VULKAN_HPP_NAMESPACE::QueryPool queryPool,
uint32_t firstQuery,
uint32_t queryCount,
VULKAN_HPP_NAMESPACE::Buffer dstBuffer,
VULKAN_HPP_NAMESPACE::DeviceSize dstOffset,
VULKAN_HPP_NAMESPACE::DeviceSize stride,
VULKAN_HPP_NAMESPACE::QueryResultFlags flags ) const VULKAN_HPP_NOEXCEPT
2021-03-02 08:06:31 +00:00
{
VULKAN_HPP_ASSERT( getDispatcher()->vkCmdCopyQueryPoolResults && "Function <vkCmdCopyQueryPoolResults> requires <VK_VERSION_1_0>" );
getDispatcher()->vkCmdCopyQueryPoolResults( static_cast<VkCommandBuffer>( m_commandBuffer ),
static_cast<VkQueryPool>( queryPool ),
firstQuery,
queryCount,
static_cast<VkBuffer>( dstBuffer ),
static_cast<VkDeviceSize>( dstOffset ),
static_cast<VkDeviceSize>( stride ),
static_cast<VkQueryResultFlags>( flags ) );
2021-03-02 08:06:31 +00:00
}
template <typename ValuesType>
VULKAN_HPP_INLINE void CommandBuffer::pushConstants( VULKAN_HPP_NAMESPACE::PipelineLayout layout,
VULKAN_HPP_NAMESPACE::ShaderStageFlags stageFlags,
uint32_t offset,
VULKAN_HPP_NAMESPACE::ArrayProxy<const ValuesType> const & values ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( getDispatcher()->vkCmdPushConstants && "Function <vkCmdPushConstants> requires <VK_VERSION_1_0>" );
getDispatcher()->vkCmdPushConstants( static_cast<VkCommandBuffer>( m_commandBuffer ),
static_cast<VkPipelineLayout>( layout ),
static_cast<VkShaderStageFlags>( stageFlags ),
offset,
values.size() * sizeof( ValuesType ),
reinterpret_cast<const void *>( values.data() ) );
}
VULKAN_HPP_INLINE void CommandBuffer::beginRenderPass( const VULKAN_HPP_NAMESPACE::RenderPassBeginInfo & renderPassBegin,
VULKAN_HPP_NAMESPACE::SubpassContents contents ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( getDispatcher()->vkCmdBeginRenderPass && "Function <vkCmdBeginRenderPass> requires <VK_VERSION_1_0>" );
getDispatcher()->vkCmdBeginRenderPass( static_cast<VkCommandBuffer>( m_commandBuffer ),
reinterpret_cast<const VkRenderPassBeginInfo *>( &renderPassBegin ),
static_cast<VkSubpassContents>( contents ) );
}
VULKAN_HPP_INLINE void CommandBuffer::nextSubpass( VULKAN_HPP_NAMESPACE::SubpassContents contents ) const VULKAN_HPP_NOEXCEPT
2021-04-28 11:35:14 +00:00
{
VULKAN_HPP_ASSERT( getDispatcher()->vkCmdNextSubpass && "Function <vkCmdNextSubpass> requires <VK_VERSION_1_0>" );
getDispatcher()->vkCmdNextSubpass( static_cast<VkCommandBuffer>( m_commandBuffer ), static_cast<VkSubpassContents>( contents ) );
}
2021-04-28 11:35:14 +00:00
VULKAN_HPP_INLINE void CommandBuffer::endRenderPass() const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( getDispatcher()->vkCmdEndRenderPass && "Function <vkCmdEndRenderPass> requires <VK_VERSION_1_0>" );
getDispatcher()->vkCmdEndRenderPass( static_cast<VkCommandBuffer>( m_commandBuffer ) );
2021-04-28 11:35:14 +00:00
}
VULKAN_HPP_INLINE void CommandBuffer::executeCommands(
VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::CommandBuffer> const & commandBuffers ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( getDispatcher()->vkCmdExecuteCommands && "Function <vkCmdExecuteCommands> requires <VK_VERSION_1_0>" );
getDispatcher()->vkCmdExecuteCommands(
static_cast<VkCommandBuffer>( m_commandBuffer ), commandBuffers.size(), reinterpret_cast<const VkCommandBuffer *>( commandBuffers.data() ) );
}
//=== VK_VERSION_1_1 ===
VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE uint32_t Context::enumerateInstanceVersion() const
{
VULKAN_HPP_ASSERT( getDispatcher()->vkEnumerateInstanceVersion && "Function <vkEnumerateInstanceVersion> requires <VK_VERSION_1_1>" );
uint32_t apiVersion;
VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( getDispatcher()->vkEnumerateInstanceVersion( &apiVersion ) );
VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Context::enumerateInstanceVersion" );
return apiVersion;
}
VULKAN_HPP_INLINE void
Device::bindBufferMemory2( VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::BindBufferMemoryInfo> const & bindInfos ) const
{
2023-03-01 09:17:02 +00:00
VULKAN_HPP_ASSERT( getDispatcher()->vkBindBufferMemory2 && "Function <vkBindBufferMemory2> requires <VK_KHR_bind_memory2> or <VK_VERSION_1_1>" );
VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( getDispatcher()->vkBindBufferMemory2(
static_cast<VkDevice>( m_device ), bindInfos.size(), reinterpret_cast<const VkBindBufferMemoryInfo *>( bindInfos.data() ) ) );
VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::bindBufferMemory2" );
}
VULKAN_HPP_INLINE void Device::bindImageMemory2( VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::BindImageMemoryInfo> const & bindInfos ) const
{
2023-03-01 09:17:02 +00:00
VULKAN_HPP_ASSERT( getDispatcher()->vkBindImageMemory2 && "Function <vkBindImageMemory2> requires <VK_KHR_bind_memory2> or <VK_VERSION_1_1>" );
VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( getDispatcher()->vkBindImageMemory2(
static_cast<VkDevice>( m_device ), bindInfos.size(), reinterpret_cast<const VkBindImageMemoryInfo *>( bindInfos.data() ) ) );
VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::bindImageMemory2" );
}
VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::PeerMemoryFeatureFlags
Device::getGroupPeerMemoryFeatures( uint32_t heapIndex, uint32_t localDeviceIndex, uint32_t remoteDeviceIndex ) const VULKAN_HPP_NOEXCEPT
{
2023-03-01 09:17:02 +00:00
VULKAN_HPP_ASSERT( getDispatcher()->vkGetDeviceGroupPeerMemoryFeatures &&
"Function <vkGetDeviceGroupPeerMemoryFeatures> requires <VK_KHR_device_group> or <VK_VERSION_1_1>" );
VULKAN_HPP_NAMESPACE::PeerMemoryFeatureFlags peerMemoryFeatures;
getDispatcher()->vkGetDeviceGroupPeerMemoryFeatures( static_cast<VkDevice>( m_device ),
heapIndex,
localDeviceIndex,
remoteDeviceIndex,
reinterpret_cast<VkPeerMemoryFeatureFlags *>( &peerMemoryFeatures ) );
return peerMemoryFeatures;
}
VULKAN_HPP_INLINE void CommandBuffer::setDeviceMask( uint32_t deviceMask ) const VULKAN_HPP_NOEXCEPT
{
2023-03-01 09:17:02 +00:00
VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSetDeviceMask && "Function <vkCmdSetDeviceMask> requires <VK_KHR_device_group> or <VK_VERSION_1_1>" );
getDispatcher()->vkCmdSetDeviceMask( static_cast<VkCommandBuffer>( m_commandBuffer ), deviceMask );
}
VULKAN_HPP_INLINE void CommandBuffer::dispatchBase( uint32_t baseGroupX,
uint32_t baseGroupY,
uint32_t baseGroupZ,
uint32_t groupCountX,
uint32_t groupCountY,
uint32_t groupCountZ ) const VULKAN_HPP_NOEXCEPT
{
2023-03-01 09:17:02 +00:00
VULKAN_HPP_ASSERT( getDispatcher()->vkCmdDispatchBase && "Function <vkCmdDispatchBase> requires <VK_KHR_device_group> or <VK_VERSION_1_1>" );
getDispatcher()->vkCmdDispatchBase(
static_cast<VkCommandBuffer>( m_commandBuffer ), baseGroupX, baseGroupY, baseGroupZ, groupCountX, groupCountY, groupCountZ );
}
VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector<VULKAN_HPP_NAMESPACE::PhysicalDeviceGroupProperties> Instance::enumeratePhysicalDeviceGroups() const
{
2023-03-01 09:17:02 +00:00
VULKAN_HPP_ASSERT( getDispatcher()->vkEnumeratePhysicalDeviceGroups &&
"Function <vkEnumeratePhysicalDeviceGroups> requires <VK_KHR_device_group_creation> or <VK_VERSION_1_1>" );
std::vector<VULKAN_HPP_NAMESPACE::PhysicalDeviceGroupProperties> physicalDeviceGroupProperties;
uint32_t physicalDeviceGroupCount;
VULKAN_HPP_NAMESPACE::Result result;
do
{
result = static_cast<VULKAN_HPP_NAMESPACE::Result>(
getDispatcher()->vkEnumeratePhysicalDeviceGroups( static_cast<VkInstance>( m_instance ), &physicalDeviceGroupCount, nullptr ) );
if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) && physicalDeviceGroupCount )
{
physicalDeviceGroupProperties.resize( physicalDeviceGroupCount );
result = static_cast<VULKAN_HPP_NAMESPACE::Result>(
getDispatcher()->vkEnumeratePhysicalDeviceGroups( static_cast<VkInstance>( m_instance ),
&physicalDeviceGroupCount,
reinterpret_cast<VkPhysicalDeviceGroupProperties *>( physicalDeviceGroupProperties.data() ) ) );
}
} while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete );
VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Instance::enumeratePhysicalDeviceGroups" );
VULKAN_HPP_ASSERT( physicalDeviceGroupCount <= physicalDeviceGroupProperties.size() );
if ( physicalDeviceGroupCount < physicalDeviceGroupProperties.size() )
{
physicalDeviceGroupProperties.resize( physicalDeviceGroupCount );
}
return physicalDeviceGroupProperties;
}
VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::MemoryRequirements2
Device::getImageMemoryRequirements2( const VULKAN_HPP_NAMESPACE::ImageMemoryRequirementsInfo2 & info ) const VULKAN_HPP_NOEXCEPT
{
2023-03-01 09:17:02 +00:00
VULKAN_HPP_ASSERT( getDispatcher()->vkGetImageMemoryRequirements2 &&
"Function <vkGetImageMemoryRequirements2> requires <VK_KHR_get_memory_requirements2> or <VK_VERSION_1_1>" );
VULKAN_HPP_NAMESPACE::MemoryRequirements2 memoryRequirements;
getDispatcher()->vkGetImageMemoryRequirements2( static_cast<VkDevice>( m_device ),
reinterpret_cast<const VkImageMemoryRequirementsInfo2 *>( &info ),
reinterpret_cast<VkMemoryRequirements2 *>( &memoryRequirements ) );
return memoryRequirements;
}
template <typename X, typename Y, typename... Z>
VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::StructureChain<X, Y, Z...>
Device::getImageMemoryRequirements2( const VULKAN_HPP_NAMESPACE::ImageMemoryRequirementsInfo2 & info ) const VULKAN_HPP_NOEXCEPT
{
2023-03-01 09:17:02 +00:00
VULKAN_HPP_ASSERT( getDispatcher()->vkGetImageMemoryRequirements2 &&
"Function <vkGetImageMemoryRequirements2> requires <VK_KHR_get_memory_requirements2> or <VK_VERSION_1_1>" );
VULKAN_HPP_NAMESPACE::StructureChain<X, Y, Z...> structureChain;
VULKAN_HPP_NAMESPACE::MemoryRequirements2 & memoryRequirements = structureChain.template get<VULKAN_HPP_NAMESPACE::MemoryRequirements2>();
getDispatcher()->vkGetImageMemoryRequirements2( static_cast<VkDevice>( m_device ),
reinterpret_cast<const VkImageMemoryRequirementsInfo2 *>( &info ),
reinterpret_cast<VkMemoryRequirements2 *>( &memoryRequirements ) );
return structureChain;
}
VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::MemoryRequirements2
Device::getBufferMemoryRequirements2( const VULKAN_HPP_NAMESPACE::BufferMemoryRequirementsInfo2 & info ) const VULKAN_HPP_NOEXCEPT
{
2023-03-01 09:17:02 +00:00
VULKAN_HPP_ASSERT( getDispatcher()->vkGetBufferMemoryRequirements2 &&
"Function <vkGetBufferMemoryRequirements2> requires <VK_KHR_get_memory_requirements2> or <VK_VERSION_1_1>" );
VULKAN_HPP_NAMESPACE::MemoryRequirements2 memoryRequirements;
getDispatcher()->vkGetBufferMemoryRequirements2( static_cast<VkDevice>( m_device ),
reinterpret_cast<const VkBufferMemoryRequirementsInfo2 *>( &info ),
reinterpret_cast<VkMemoryRequirements2 *>( &memoryRequirements ) );
return memoryRequirements;
}
template <typename X, typename Y, typename... Z>
VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::StructureChain<X, Y, Z...>
Device::getBufferMemoryRequirements2( const VULKAN_HPP_NAMESPACE::BufferMemoryRequirementsInfo2 & info ) const VULKAN_HPP_NOEXCEPT
{
2023-03-01 09:17:02 +00:00
VULKAN_HPP_ASSERT( getDispatcher()->vkGetBufferMemoryRequirements2 &&
"Function <vkGetBufferMemoryRequirements2> requires <VK_KHR_get_memory_requirements2> or <VK_VERSION_1_1>" );
VULKAN_HPP_NAMESPACE::StructureChain<X, Y, Z...> structureChain;
VULKAN_HPP_NAMESPACE::MemoryRequirements2 & memoryRequirements = structureChain.template get<VULKAN_HPP_NAMESPACE::MemoryRequirements2>();
getDispatcher()->vkGetBufferMemoryRequirements2( static_cast<VkDevice>( m_device ),
reinterpret_cast<const VkBufferMemoryRequirementsInfo2 *>( &info ),
reinterpret_cast<VkMemoryRequirements2 *>( &memoryRequirements ) );
return structureChain;
}
VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector<VULKAN_HPP_NAMESPACE::SparseImageMemoryRequirements2>
Device::getImageSparseMemoryRequirements2( const VULKAN_HPP_NAMESPACE::ImageSparseMemoryRequirementsInfo2 & info ) const
{
2023-03-01 09:17:02 +00:00
VULKAN_HPP_ASSERT( getDispatcher()->vkGetImageSparseMemoryRequirements2 &&
"Function <vkGetImageSparseMemoryRequirements2> requires <VK_KHR_get_memory_requirements2> or <VK_VERSION_1_1>" );
std::vector<VULKAN_HPP_NAMESPACE::SparseImageMemoryRequirements2> sparseMemoryRequirements;
uint32_t sparseMemoryRequirementCount;
getDispatcher()->vkGetImageSparseMemoryRequirements2(
static_cast<VkDevice>( m_device ), reinterpret_cast<const VkImageSparseMemoryRequirementsInfo2 *>( &info ), &sparseMemoryRequirementCount, nullptr );
sparseMemoryRequirements.resize( sparseMemoryRequirementCount );
getDispatcher()->vkGetImageSparseMemoryRequirements2( static_cast<VkDevice>( m_device ),
reinterpret_cast<const VkImageSparseMemoryRequirementsInfo2 *>( &info ),
&sparseMemoryRequirementCount,
reinterpret_cast<VkSparseImageMemoryRequirements2 *>( sparseMemoryRequirements.data() ) );
VULKAN_HPP_ASSERT( sparseMemoryRequirementCount <= sparseMemoryRequirements.size() );
if ( sparseMemoryRequirementCount < sparseMemoryRequirements.size() )
{
sparseMemoryRequirements.resize( sparseMemoryRequirementCount );
}
return sparseMemoryRequirements;
}
VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::PhysicalDeviceFeatures2 PhysicalDevice::getFeatures2() const VULKAN_HPP_NOEXCEPT
{
2023-03-01 09:17:02 +00:00
VULKAN_HPP_ASSERT( getDispatcher()->vkGetPhysicalDeviceFeatures2 &&
"Function <vkGetPhysicalDeviceFeatures2> requires <VK_KHR_get_physical_device_properties2> or <VK_VERSION_1_1>" );
VULKAN_HPP_NAMESPACE::PhysicalDeviceFeatures2 features;
getDispatcher()->vkGetPhysicalDeviceFeatures2( static_cast<VkPhysicalDevice>( m_physicalDevice ),
reinterpret_cast<VkPhysicalDeviceFeatures2 *>( &features ) );
return features;
}
template <typename X, typename Y, typename... Z>
VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::StructureChain<X, Y, Z...> PhysicalDevice::getFeatures2() const VULKAN_HPP_NOEXCEPT
2021-04-15 08:49:54 +00:00
{
2023-03-01 09:17:02 +00:00
VULKAN_HPP_ASSERT( getDispatcher()->vkGetPhysicalDeviceFeatures2 &&
"Function <vkGetPhysicalDeviceFeatures2> requires <VK_KHR_get_physical_device_properties2> or <VK_VERSION_1_1>" );
VULKAN_HPP_NAMESPACE::StructureChain<X, Y, Z...> structureChain;
VULKAN_HPP_NAMESPACE::PhysicalDeviceFeatures2 & features = structureChain.template get<VULKAN_HPP_NAMESPACE::PhysicalDeviceFeatures2>();
getDispatcher()->vkGetPhysicalDeviceFeatures2( static_cast<VkPhysicalDevice>( m_physicalDevice ),
reinterpret_cast<VkPhysicalDeviceFeatures2 *>( &features ) );
return structureChain;
}
2021-04-15 08:49:54 +00:00
VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::PhysicalDeviceProperties2 PhysicalDevice::getProperties2() const VULKAN_HPP_NOEXCEPT
{
2023-03-01 09:17:02 +00:00
VULKAN_HPP_ASSERT( getDispatcher()->vkGetPhysicalDeviceProperties2 &&
"Function <vkGetPhysicalDeviceProperties2> requires <VK_KHR_get_physical_device_properties2> or <VK_VERSION_1_1>" );
VULKAN_HPP_NAMESPACE::PhysicalDeviceProperties2 properties;
getDispatcher()->vkGetPhysicalDeviceProperties2( static_cast<VkPhysicalDevice>( m_physicalDevice ),
reinterpret_cast<VkPhysicalDeviceProperties2 *>( &properties ) );
return properties;
}
template <typename X, typename Y, typename... Z>
VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::StructureChain<X, Y, Z...> PhysicalDevice::getProperties2() const VULKAN_HPP_NOEXCEPT
{
2023-03-01 09:17:02 +00:00
VULKAN_HPP_ASSERT( getDispatcher()->vkGetPhysicalDeviceProperties2 &&
"Function <vkGetPhysicalDeviceProperties2> requires <VK_KHR_get_physical_device_properties2> or <VK_VERSION_1_1>" );
VULKAN_HPP_NAMESPACE::StructureChain<X, Y, Z...> structureChain;
VULKAN_HPP_NAMESPACE::PhysicalDeviceProperties2 & properties = structureChain.template get<VULKAN_HPP_NAMESPACE::PhysicalDeviceProperties2>();
getDispatcher()->vkGetPhysicalDeviceProperties2( static_cast<VkPhysicalDevice>( m_physicalDevice ),
reinterpret_cast<VkPhysicalDeviceProperties2 *>( &properties ) );
return structureChain;
}
VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::FormatProperties2
PhysicalDevice::getFormatProperties2( VULKAN_HPP_NAMESPACE::Format format ) const VULKAN_HPP_NOEXCEPT
{
2023-03-01 09:17:02 +00:00
VULKAN_HPP_ASSERT( getDispatcher()->vkGetPhysicalDeviceFormatProperties2 &&
"Function <vkGetPhysicalDeviceFormatProperties2> requires <VK_KHR_get_physical_device_properties2> or <VK_VERSION_1_1>" );
VULKAN_HPP_NAMESPACE::FormatProperties2 formatProperties;
getDispatcher()->vkGetPhysicalDeviceFormatProperties2(
static_cast<VkPhysicalDevice>( m_physicalDevice ), static_cast<VkFormat>( format ), reinterpret_cast<VkFormatProperties2 *>( &formatProperties ) );
return formatProperties;
}
template <typename X, typename Y, typename... Z>
VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::StructureChain<X, Y, Z...>
PhysicalDevice::getFormatProperties2( VULKAN_HPP_NAMESPACE::Format format ) const VULKAN_HPP_NOEXCEPT
{
2023-03-01 09:17:02 +00:00
VULKAN_HPP_ASSERT( getDispatcher()->vkGetPhysicalDeviceFormatProperties2 &&
"Function <vkGetPhysicalDeviceFormatProperties2> requires <VK_KHR_get_physical_device_properties2> or <VK_VERSION_1_1>" );
VULKAN_HPP_NAMESPACE::StructureChain<X, Y, Z...> structureChain;
VULKAN_HPP_NAMESPACE::FormatProperties2 & formatProperties = structureChain.template get<VULKAN_HPP_NAMESPACE::FormatProperties2>();
getDispatcher()->vkGetPhysicalDeviceFormatProperties2(
static_cast<VkPhysicalDevice>( m_physicalDevice ), static_cast<VkFormat>( format ), reinterpret_cast<VkFormatProperties2 *>( &formatProperties ) );
return structureChain;
}
VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::ImageFormatProperties2
PhysicalDevice::getImageFormatProperties2( const VULKAN_HPP_NAMESPACE::PhysicalDeviceImageFormatInfo2 & imageFormatInfo ) const
{
VULKAN_HPP_ASSERT( getDispatcher()->vkGetPhysicalDeviceImageFormatProperties2 &&
2023-03-01 09:17:02 +00:00
"Function <vkGetPhysicalDeviceImageFormatProperties2> requires <VK_KHR_get_physical_device_properties2> or <VK_VERSION_1_1>" );
VULKAN_HPP_NAMESPACE::ImageFormatProperties2 imageFormatProperties;
VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>(
getDispatcher()->vkGetPhysicalDeviceImageFormatProperties2( static_cast<VkPhysicalDevice>( m_physicalDevice ),
reinterpret_cast<const VkPhysicalDeviceImageFormatInfo2 *>( &imageFormatInfo ),
reinterpret_cast<VkImageFormatProperties2 *>( &imageFormatProperties ) ) );
VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getImageFormatProperties2" );
return imageFormatProperties;
2021-04-15 08:49:54 +00:00
}
template <typename X, typename Y, typename... Z>
VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::StructureChain<X, Y, Z...>
PhysicalDevice::getImageFormatProperties2( const VULKAN_HPP_NAMESPACE::PhysicalDeviceImageFormatInfo2 & imageFormatInfo ) const
2021-04-15 08:49:54 +00:00
{
VULKAN_HPP_ASSERT( getDispatcher()->vkGetPhysicalDeviceImageFormatProperties2 &&
2023-03-01 09:17:02 +00:00
"Function <vkGetPhysicalDeviceImageFormatProperties2> requires <VK_KHR_get_physical_device_properties2> or <VK_VERSION_1_1>" );
StructureChain<X, Y, Z...> structureChain;
VULKAN_HPP_NAMESPACE::ImageFormatProperties2 & imageFormatProperties = structureChain.template get<VULKAN_HPP_NAMESPACE::ImageFormatProperties2>();
VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>(
getDispatcher()->vkGetPhysicalDeviceImageFormatProperties2( static_cast<VkPhysicalDevice>( m_physicalDevice ),
reinterpret_cast<const VkPhysicalDeviceImageFormatInfo2 *>( &imageFormatInfo ),
reinterpret_cast<VkImageFormatProperties2 *>( &imageFormatProperties ) ) );
VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getImageFormatProperties2" );
2021-04-15 08:49:54 +00:00
return structureChain;
}
VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector<VULKAN_HPP_NAMESPACE::QueueFamilyProperties2> PhysicalDevice::getQueueFamilyProperties2() const
2021-04-15 08:49:54 +00:00
{
VULKAN_HPP_ASSERT( getDispatcher()->vkGetPhysicalDeviceQueueFamilyProperties2 &&
2023-03-01 09:17:02 +00:00
"Function <vkGetPhysicalDeviceQueueFamilyProperties2> requires <VK_KHR_get_physical_device_properties2> or <VK_VERSION_1_1>" );
std::vector<VULKAN_HPP_NAMESPACE::QueueFamilyProperties2> queueFamilyProperties;
uint32_t queueFamilyPropertyCount;
getDispatcher()->vkGetPhysicalDeviceQueueFamilyProperties2( static_cast<VkPhysicalDevice>( m_physicalDevice ), &queueFamilyPropertyCount, nullptr );
queueFamilyProperties.resize( queueFamilyPropertyCount );
getDispatcher()->vkGetPhysicalDeviceQueueFamilyProperties2( static_cast<VkPhysicalDevice>( m_physicalDevice ),
&queueFamilyPropertyCount,
reinterpret_cast<VkQueueFamilyProperties2 *>( queueFamilyProperties.data() ) );
VULKAN_HPP_ASSERT( queueFamilyPropertyCount <= queueFamilyProperties.size() );
if ( queueFamilyPropertyCount < queueFamilyProperties.size() )
{
queueFamilyProperties.resize( queueFamilyPropertyCount );
}
return queueFamilyProperties;
}
2021-04-15 08:49:54 +00:00
template <typename StructureChain>
VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector<StructureChain> PhysicalDevice::getQueueFamilyProperties2() const
{
VULKAN_HPP_ASSERT( getDispatcher()->vkGetPhysicalDeviceQueueFamilyProperties2 &&
2023-03-01 09:17:02 +00:00
"Function <vkGetPhysicalDeviceQueueFamilyProperties2> requires <VK_KHR_get_physical_device_properties2> or <VK_VERSION_1_1>" );
std::vector<StructureChain> structureChains;
std::vector<VULKAN_HPP_NAMESPACE::QueueFamilyProperties2> queueFamilyProperties;
uint32_t queueFamilyPropertyCount;
getDispatcher()->vkGetPhysicalDeviceQueueFamilyProperties2( static_cast<VkPhysicalDevice>( m_physicalDevice ), &queueFamilyPropertyCount, nullptr );
structureChains.resize( queueFamilyPropertyCount );
queueFamilyProperties.resize( queueFamilyPropertyCount );
for ( uint32_t i = 0; i < queueFamilyPropertyCount; i++ )
2021-04-15 08:49:54 +00:00
{
queueFamilyProperties[i].pNext = structureChains[i].template get<VULKAN_HPP_NAMESPACE::QueueFamilyProperties2>().pNext;
2021-04-15 08:49:54 +00:00
}
getDispatcher()->vkGetPhysicalDeviceQueueFamilyProperties2( static_cast<VkPhysicalDevice>( m_physicalDevice ),
&queueFamilyPropertyCount,
reinterpret_cast<VkQueueFamilyProperties2 *>( queueFamilyProperties.data() ) );
VULKAN_HPP_ASSERT( queueFamilyPropertyCount <= queueFamilyProperties.size() );
if ( queueFamilyPropertyCount < queueFamilyProperties.size() )
{
structureChains.resize( queueFamilyPropertyCount );
}
for ( uint32_t i = 0; i < queueFamilyPropertyCount; i++ )
2021-04-15 08:49:54 +00:00
{
structureChains[i].template get<VULKAN_HPP_NAMESPACE::QueueFamilyProperties2>() = queueFamilyProperties[i];
2021-04-15 08:49:54 +00:00
}
return structureChains;
2021-04-15 08:49:54 +00:00
}
VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::PhysicalDeviceMemoryProperties2
PhysicalDevice::getMemoryProperties2() const VULKAN_HPP_NOEXCEPT
{
2023-03-01 09:17:02 +00:00
VULKAN_HPP_ASSERT( getDispatcher()->vkGetPhysicalDeviceMemoryProperties2 &&
"Function <vkGetPhysicalDeviceMemoryProperties2> requires <VK_KHR_get_physical_device_properties2> or <VK_VERSION_1_1>" );
VULKAN_HPP_NAMESPACE::PhysicalDeviceMemoryProperties2 memoryProperties;
getDispatcher()->vkGetPhysicalDeviceMemoryProperties2( static_cast<VkPhysicalDevice>( m_physicalDevice ),
reinterpret_cast<VkPhysicalDeviceMemoryProperties2 *>( &memoryProperties ) );
return memoryProperties;
}
template <typename X, typename Y, typename... Z>
VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::StructureChain<X, Y, Z...> PhysicalDevice::getMemoryProperties2() const VULKAN_HPP_NOEXCEPT
{
2023-03-01 09:17:02 +00:00
VULKAN_HPP_ASSERT( getDispatcher()->vkGetPhysicalDeviceMemoryProperties2 &&
"Function <vkGetPhysicalDeviceMemoryProperties2> requires <VK_KHR_get_physical_device_properties2> or <VK_VERSION_1_1>" );
VULKAN_HPP_NAMESPACE::StructureChain<X, Y, Z...> structureChain;
VULKAN_HPP_NAMESPACE::PhysicalDeviceMemoryProperties2 & memoryProperties =
structureChain.template get<VULKAN_HPP_NAMESPACE::PhysicalDeviceMemoryProperties2>();
getDispatcher()->vkGetPhysicalDeviceMemoryProperties2( static_cast<VkPhysicalDevice>( m_physicalDevice ),
reinterpret_cast<VkPhysicalDeviceMemoryProperties2 *>( &memoryProperties ) );
return structureChain;
}
VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector<VULKAN_HPP_NAMESPACE::SparseImageFormatProperties2>
PhysicalDevice::getSparseImageFormatProperties2( const VULKAN_HPP_NAMESPACE::PhysicalDeviceSparseImageFormatInfo2 & formatInfo ) const
{
VULKAN_HPP_ASSERT( getDispatcher()->vkGetPhysicalDeviceSparseImageFormatProperties2 &&
2023-03-01 09:17:02 +00:00
"Function <vkGetPhysicalDeviceSparseImageFormatProperties2> requires <VK_KHR_get_physical_device_properties2> or <VK_VERSION_1_1>" );
std::vector<VULKAN_HPP_NAMESPACE::SparseImageFormatProperties2> properties;
uint32_t propertyCount;
getDispatcher()->vkGetPhysicalDeviceSparseImageFormatProperties2( static_cast<VkPhysicalDevice>( m_physicalDevice ),
reinterpret_cast<const VkPhysicalDeviceSparseImageFormatInfo2 *>( &formatInfo ),
&propertyCount,
nullptr );
properties.resize( propertyCount );
getDispatcher()->vkGetPhysicalDeviceSparseImageFormatProperties2( static_cast<VkPhysicalDevice>( m_physicalDevice ),
reinterpret_cast<const VkPhysicalDeviceSparseImageFormatInfo2 *>( &formatInfo ),
&propertyCount,
reinterpret_cast<VkSparseImageFormatProperties2 *>( properties.data() ) );
VULKAN_HPP_ASSERT( propertyCount <= properties.size() );
if ( propertyCount < properties.size() )
{
properties.resize( propertyCount );
}
return properties;
}
VULKAN_HPP_INLINE void CommandPool::trim( VULKAN_HPP_NAMESPACE::CommandPoolTrimFlags flags ) const VULKAN_HPP_NOEXCEPT
{
2023-03-01 09:17:02 +00:00
VULKAN_HPP_ASSERT( getDispatcher()->vkTrimCommandPool && "Function <vkTrimCommandPool> requires <VK_KHR_maintenance1> or <VK_VERSION_1_1>" );
getDispatcher()->vkTrimCommandPool(
static_cast<VkDevice>( m_device ), static_cast<VkCommandPool>( m_commandPool ), static_cast<VkCommandPoolTrimFlags>( flags ) );
}
VULKAN_HPP_NODISCARD
VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::CreateReturnType<VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Queue>::Type
Device::getQueue2( VULKAN_HPP_NAMESPACE::DeviceQueueInfo2 const & queueInfo ) const VULKAN_HPP_RAII_CREATE_NOEXCEPT
{
VULKAN_HPP_NAMESPACE::Queue queue;
getDispatcher()->vkGetDeviceQueue2(
static_cast<VkDevice>( m_device ), reinterpret_cast<const VkDeviceQueueInfo2 *>( &queueInfo ), reinterpret_cast<VkQueue *>( &queue ) );
return VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Queue( *this, *reinterpret_cast<VkQueue *>( &queue ) );
}
VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE
VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::CreateReturnType<VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::SamplerYcbcrConversion>::Type
Device::createSamplerYcbcrConversion( VULKAN_HPP_NAMESPACE::SamplerYcbcrConversionCreateInfo const & createInfo,
VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator ) const
VULKAN_HPP_RAII_CREATE_NOEXCEPT
{
VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion ycbcrConversion;
VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( getDispatcher()->vkCreateSamplerYcbcrConversion(
static_cast<VkDevice>( m_device ),
reinterpret_cast<const VkSamplerYcbcrConversionCreateInfo *>( &createInfo ),
reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
reinterpret_cast<VkSamplerYcbcrConversion *>( &ycbcrConversion ) ) );
if ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess )
{
# if defined( VULKAN_HPP_RAII_NO_EXCEPTIONS )
return VULKAN_HPP_UNEXPECTED( result );
# else
VULKAN_HPP_NAMESPACE::detail::throwResultException( result, "Device::createSamplerYcbcrConversion" );
# endif
}
return VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::SamplerYcbcrConversion(
*this, *reinterpret_cast<VkSamplerYcbcrConversion *>( &ycbcrConversion ), allocator );
}
VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE
VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::CreateReturnType<VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::DescriptorUpdateTemplate>::Type
Device::createDescriptorUpdateTemplate( VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplateCreateInfo const & createInfo,
VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator ) const
VULKAN_HPP_RAII_CREATE_NOEXCEPT
{
VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate descriptorUpdateTemplate;
VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( getDispatcher()->vkCreateDescriptorUpdateTemplate(
static_cast<VkDevice>( m_device ),
reinterpret_cast<const VkDescriptorUpdateTemplateCreateInfo *>( &createInfo ),
reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
reinterpret_cast<VkDescriptorUpdateTemplate *>( &descriptorUpdateTemplate ) ) );
if ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess )
{
# if defined( VULKAN_HPP_RAII_NO_EXCEPTIONS )
return VULKAN_HPP_UNEXPECTED( result );
# else
VULKAN_HPP_NAMESPACE::detail::throwResultException( result, "Device::createDescriptorUpdateTemplate" );
# endif
}
return VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::DescriptorUpdateTemplate(
*this, *reinterpret_cast<VkDescriptorUpdateTemplate *>( &descriptorUpdateTemplate ), allocator );
}
template <typename DataType>
VULKAN_HPP_INLINE void DescriptorSet::updateWithTemplate( VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate descriptorUpdateTemplate,
DataType const & data ) const VULKAN_HPP_NOEXCEPT
{
2023-03-01 09:17:02 +00:00
VULKAN_HPP_ASSERT( getDispatcher()->vkUpdateDescriptorSetWithTemplate &&
"Function <vkUpdateDescriptorSetWithTemplate> requires <VK_KHR_descriptor_update_template> or <VK_VERSION_1_1>" );
getDispatcher()->vkUpdateDescriptorSetWithTemplate( static_cast<VkDevice>( m_device ),
static_cast<VkDescriptorSet>( m_descriptorSet ),
static_cast<VkDescriptorUpdateTemplate>( descriptorUpdateTemplate ),
reinterpret_cast<const void *>( &data ) );
}
VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::ExternalBufferProperties
PhysicalDevice::getExternalBufferProperties( const VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalBufferInfo & externalBufferInfo ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( getDispatcher()->vkGetPhysicalDeviceExternalBufferProperties &&
2023-03-01 09:17:02 +00:00
"Function <vkGetPhysicalDeviceExternalBufferProperties> requires <VK_KHR_external_memory_capabilities> or <VK_VERSION_1_1>" );
VULKAN_HPP_NAMESPACE::ExternalBufferProperties externalBufferProperties;
getDispatcher()->vkGetPhysicalDeviceExternalBufferProperties( static_cast<VkPhysicalDevice>( m_physicalDevice ),
reinterpret_cast<const VkPhysicalDeviceExternalBufferInfo *>( &externalBufferInfo ),
reinterpret_cast<VkExternalBufferProperties *>( &externalBufferProperties ) );
return externalBufferProperties;
}
VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::ExternalFenceProperties
PhysicalDevice::getExternalFenceProperties( const VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalFenceInfo & externalFenceInfo ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( getDispatcher()->vkGetPhysicalDeviceExternalFenceProperties &&
2023-03-01 09:17:02 +00:00
"Function <vkGetPhysicalDeviceExternalFenceProperties> requires <VK_KHR_external_fence_capabilities> or <VK_VERSION_1_1>" );
VULKAN_HPP_NAMESPACE::ExternalFenceProperties externalFenceProperties;
getDispatcher()->vkGetPhysicalDeviceExternalFenceProperties( static_cast<VkPhysicalDevice>( m_physicalDevice ),
reinterpret_cast<const VkPhysicalDeviceExternalFenceInfo *>( &externalFenceInfo ),
reinterpret_cast<VkExternalFenceProperties *>( &externalFenceProperties ) );
return externalFenceProperties;
}
VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::ExternalSemaphoreProperties PhysicalDevice::getExternalSemaphoreProperties(
const VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalSemaphoreInfo & externalSemaphoreInfo ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( getDispatcher()->vkGetPhysicalDeviceExternalSemaphoreProperties &&
2023-03-01 09:17:02 +00:00
"Function <vkGetPhysicalDeviceExternalSemaphoreProperties> requires <VK_KHR_external_semaphore_capabilities> or <VK_VERSION_1_1>" );
VULKAN_HPP_NAMESPACE::ExternalSemaphoreProperties externalSemaphoreProperties;
getDispatcher()->vkGetPhysicalDeviceExternalSemaphoreProperties(
static_cast<VkPhysicalDevice>( m_physicalDevice ),
reinterpret_cast<const VkPhysicalDeviceExternalSemaphoreInfo *>( &externalSemaphoreInfo ),
reinterpret_cast<VkExternalSemaphoreProperties *>( &externalSemaphoreProperties ) );
return externalSemaphoreProperties;
}
VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::DescriptorSetLayoutSupport
Device::getDescriptorSetLayoutSupport( const VULKAN_HPP_NAMESPACE::DescriptorSetLayoutCreateInfo & createInfo ) const VULKAN_HPP_NOEXCEPT
{
2023-03-01 09:17:02 +00:00
VULKAN_HPP_ASSERT( getDispatcher()->vkGetDescriptorSetLayoutSupport &&
"Function <vkGetDescriptorSetLayoutSupport> requires <VK_KHR_maintenance3> or <VK_VERSION_1_1>" );
VULKAN_HPP_NAMESPACE::DescriptorSetLayoutSupport support;
getDispatcher()->vkGetDescriptorSetLayoutSupport( static_cast<VkDevice>( m_device ),
reinterpret_cast<const VkDescriptorSetLayoutCreateInfo *>( &createInfo ),
reinterpret_cast<VkDescriptorSetLayoutSupport *>( &support ) );
return support;
}
template <typename X, typename Y, typename... Z>
VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::StructureChain<X, Y, Z...>
Device::getDescriptorSetLayoutSupport( const VULKAN_HPP_NAMESPACE::DescriptorSetLayoutCreateInfo & createInfo ) const VULKAN_HPP_NOEXCEPT
{
2023-03-01 09:17:02 +00:00
VULKAN_HPP_ASSERT( getDispatcher()->vkGetDescriptorSetLayoutSupport &&
"Function <vkGetDescriptorSetLayoutSupport> requires <VK_KHR_maintenance3> or <VK_VERSION_1_1>" );
VULKAN_HPP_NAMESPACE::StructureChain<X, Y, Z...> structureChain;
VULKAN_HPP_NAMESPACE::DescriptorSetLayoutSupport & support = structureChain.template get<VULKAN_HPP_NAMESPACE::DescriptorSetLayoutSupport>();
getDispatcher()->vkGetDescriptorSetLayoutSupport( static_cast<VkDevice>( m_device ),
reinterpret_cast<const VkDescriptorSetLayoutCreateInfo *>( &createInfo ),
reinterpret_cast<VkDescriptorSetLayoutSupport *>( &support ) );
return structureChain;
}
//=== VK_VERSION_1_2 ===
VULKAN_HPP_INLINE void CommandBuffer::drawIndirectCount( VULKAN_HPP_NAMESPACE::Buffer buffer,
VULKAN_HPP_NAMESPACE::DeviceSize offset,
VULKAN_HPP_NAMESPACE::Buffer countBuffer,
VULKAN_HPP_NAMESPACE::DeviceSize countBufferOffset,
uint32_t maxDrawCount,
uint32_t stride ) const VULKAN_HPP_NOEXCEPT
{
2023-03-01 09:17:02 +00:00
VULKAN_HPP_ASSERT( getDispatcher()->vkCmdDrawIndirectCount &&
"Function <vkCmdDrawIndirectCount> requires <VK_AMD_draw_indirect_count> or <VK_KHR_draw_indirect_count> or <VK_VERSION_1_2>" );
getDispatcher()->vkCmdDrawIndirectCount( static_cast<VkCommandBuffer>( m_commandBuffer ),
static_cast<VkBuffer>( buffer ),
static_cast<VkDeviceSize>( offset ),
static_cast<VkBuffer>( countBuffer ),
static_cast<VkDeviceSize>( countBufferOffset ),
maxDrawCount,
stride );
}
VULKAN_HPP_INLINE void CommandBuffer::drawIndexedIndirectCount( VULKAN_HPP_NAMESPACE::Buffer buffer,
VULKAN_HPP_NAMESPACE::DeviceSize offset,
VULKAN_HPP_NAMESPACE::Buffer countBuffer,
VULKAN_HPP_NAMESPACE::DeviceSize countBufferOffset,
uint32_t maxDrawCount,
uint32_t stride ) const VULKAN_HPP_NOEXCEPT
{
2023-03-01 09:17:02 +00:00
VULKAN_HPP_ASSERT( getDispatcher()->vkCmdDrawIndexedIndirectCount &&
"Function <vkCmdDrawIndexedIndirectCount> requires <VK_AMD_draw_indirect_count> or <VK_KHR_draw_indirect_count> or <VK_VERSION_1_2>" );
getDispatcher()->vkCmdDrawIndexedIndirectCount( static_cast<VkCommandBuffer>( m_commandBuffer ),
static_cast<VkBuffer>( buffer ),
static_cast<VkDeviceSize>( offset ),
static_cast<VkBuffer>( countBuffer ),
static_cast<VkDeviceSize>( countBufferOffset ),
maxDrawCount,
stride );
}
VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE
VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::CreateReturnType<VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::RenderPass>::Type
Device::createRenderPass2( VULKAN_HPP_NAMESPACE::RenderPassCreateInfo2 const & createInfo,
VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator ) const
VULKAN_HPP_RAII_CREATE_NOEXCEPT
{
VULKAN_HPP_NAMESPACE::RenderPass renderPass;
VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( getDispatcher()->vkCreateRenderPass2(
static_cast<VkDevice>( m_device ),
reinterpret_cast<const VkRenderPassCreateInfo2 *>( &createInfo ),
reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
reinterpret_cast<VkRenderPass *>( &renderPass ) ) );
if ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess )
{
# if defined( VULKAN_HPP_RAII_NO_EXCEPTIONS )
return VULKAN_HPP_UNEXPECTED( result );
# else
VULKAN_HPP_NAMESPACE::detail::throwResultException( result, "Device::createRenderPass2" );
# endif
}
return VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::RenderPass( *this, *reinterpret_cast<VkRenderPass *>( &renderPass ), allocator );
}
VULKAN_HPP_INLINE void CommandBuffer::beginRenderPass2( const VULKAN_HPP_NAMESPACE::RenderPassBeginInfo & renderPassBegin,
const VULKAN_HPP_NAMESPACE::SubpassBeginInfo & subpassBeginInfo ) const VULKAN_HPP_NOEXCEPT
{
2023-03-01 09:17:02 +00:00
VULKAN_HPP_ASSERT( getDispatcher()->vkCmdBeginRenderPass2 &&
"Function <vkCmdBeginRenderPass2> requires <VK_KHR_create_renderpass2> or <VK_VERSION_1_2>" );
getDispatcher()->vkCmdBeginRenderPass2( static_cast<VkCommandBuffer>( m_commandBuffer ),
reinterpret_cast<const VkRenderPassBeginInfo *>( &renderPassBegin ),
reinterpret_cast<const VkSubpassBeginInfo *>( &subpassBeginInfo ) );
}
VULKAN_HPP_INLINE void CommandBuffer::nextSubpass2( const VULKAN_HPP_NAMESPACE::SubpassBeginInfo & subpassBeginInfo,
const VULKAN_HPP_NAMESPACE::SubpassEndInfo & subpassEndInfo ) const VULKAN_HPP_NOEXCEPT
{
2023-03-01 09:17:02 +00:00
VULKAN_HPP_ASSERT( getDispatcher()->vkCmdNextSubpass2 && "Function <vkCmdNextSubpass2> requires <VK_KHR_create_renderpass2> or <VK_VERSION_1_2>" );
getDispatcher()->vkCmdNextSubpass2( static_cast<VkCommandBuffer>( m_commandBuffer ),
reinterpret_cast<const VkSubpassBeginInfo *>( &subpassBeginInfo ),
reinterpret_cast<const VkSubpassEndInfo *>( &subpassEndInfo ) );
}
VULKAN_HPP_INLINE void CommandBuffer::endRenderPass2( const VULKAN_HPP_NAMESPACE::SubpassEndInfo & subpassEndInfo ) const VULKAN_HPP_NOEXCEPT
{
2023-03-01 09:17:02 +00:00
VULKAN_HPP_ASSERT( getDispatcher()->vkCmdEndRenderPass2 && "Function <vkCmdEndRenderPass2> requires <VK_KHR_create_renderpass2> or <VK_VERSION_1_2>" );
getDispatcher()->vkCmdEndRenderPass2( static_cast<VkCommandBuffer>( m_commandBuffer ), reinterpret_cast<const VkSubpassEndInfo *>( &subpassEndInfo ) );
}
VULKAN_HPP_INLINE void QueryPool::reset( uint32_t firstQuery, uint32_t queryCount ) const VULKAN_HPP_NOEXCEPT
2021-04-28 11:35:14 +00:00
{
2023-03-01 09:17:02 +00:00
VULKAN_HPP_ASSERT( getDispatcher()->vkResetQueryPool && "Function <vkResetQueryPool> requires <VK_EXT_host_query_reset> or <VK_VERSION_1_2>" );
getDispatcher()->vkResetQueryPool( static_cast<VkDevice>( m_device ), static_cast<VkQueryPool>( m_queryPool ), firstQuery, queryCount );
}
2021-04-28 11:35:14 +00:00
VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE uint64_t Semaphore::getCounterValue() const
{
2023-03-01 09:17:02 +00:00
VULKAN_HPP_ASSERT( getDispatcher()->vkGetSemaphoreCounterValue &&
"Function <vkGetSemaphoreCounterValue> requires <VK_KHR_timeline_semaphore> or <VK_VERSION_1_2>" );
uint64_t value;
VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>(
getDispatcher()->vkGetSemaphoreCounterValue( static_cast<VkDevice>( m_device ), static_cast<VkSemaphore>( m_semaphore ), &value ) );
VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Semaphore::getCounterValue" );
return value;
2021-04-28 11:35:14 +00:00
}
VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::Result Device::waitSemaphores( const VULKAN_HPP_NAMESPACE::SemaphoreWaitInfo & waitInfo,
uint64_t timeout ) const
{
2023-03-01 09:17:02 +00:00
VULKAN_HPP_ASSERT( getDispatcher()->vkWaitSemaphores && "Function <vkWaitSemaphores> requires <VK_KHR_timeline_semaphore> or <VK_VERSION_1_2>" );
VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>(
getDispatcher()->vkWaitSemaphores( static_cast<VkDevice>( m_device ), reinterpret_cast<const VkSemaphoreWaitInfo *>( &waitInfo ), timeout ) );
VULKAN_HPP_NAMESPACE::detail::resultCheck(
result, VULKAN_HPP_NAMESPACE_STRING "::Device::waitSemaphores", { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::eTimeout } );
return static_cast<VULKAN_HPP_NAMESPACE::Result>( result );
}
VULKAN_HPP_INLINE void Device::signalSemaphore( const VULKAN_HPP_NAMESPACE::SemaphoreSignalInfo & signalInfo ) const
2021-04-28 11:35:14 +00:00
{
2023-03-01 09:17:02 +00:00
VULKAN_HPP_ASSERT( getDispatcher()->vkSignalSemaphore && "Function <vkSignalSemaphore> requires <VK_KHR_timeline_semaphore> or <VK_VERSION_1_2>" );
VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>(
getDispatcher()->vkSignalSemaphore( static_cast<VkDevice>( m_device ), reinterpret_cast<const VkSemaphoreSignalInfo *>( &signalInfo ) ) );
VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::signalSemaphore" );
2021-04-28 11:35:14 +00:00
}
VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::DeviceAddress
Device::getBufferAddress( const VULKAN_HPP_NAMESPACE::BufferDeviceAddressInfo & info ) const VULKAN_HPP_NOEXCEPT
{
2023-03-01 09:17:02 +00:00
VULKAN_HPP_ASSERT( getDispatcher()->vkGetBufferDeviceAddress &&
"Function <vkGetBufferDeviceAddress> requires <VK_EXT_buffer_device_address> or <VK_KHR_buffer_device_address> or <VK_VERSION_1_2>" );
VkDeviceAddress result =
getDispatcher()->vkGetBufferDeviceAddress( static_cast<VkDevice>( m_device ), reinterpret_cast<const VkBufferDeviceAddressInfo *>( &info ) );
return static_cast<VULKAN_HPP_NAMESPACE::DeviceAddress>( result );
}
VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE uint64_t
Device::getBufferOpaqueCaptureAddress( const VULKAN_HPP_NAMESPACE::BufferDeviceAddressInfo & info ) const VULKAN_HPP_NOEXCEPT
{
2023-03-01 09:17:02 +00:00
VULKAN_HPP_ASSERT( getDispatcher()->vkGetBufferOpaqueCaptureAddress &&
"Function <vkGetBufferOpaqueCaptureAddress> requires <VK_KHR_buffer_device_address> or <VK_VERSION_1_2>" );
uint64_t result =
getDispatcher()->vkGetBufferOpaqueCaptureAddress( static_cast<VkDevice>( m_device ), reinterpret_cast<const VkBufferDeviceAddressInfo *>( &info ) );
return result;
}
VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE uint64_t
Device::getMemoryOpaqueCaptureAddress( const VULKAN_HPP_NAMESPACE::DeviceMemoryOpaqueCaptureAddressInfo & info ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( getDispatcher()->vkGetDeviceMemoryOpaqueCaptureAddress &&
2023-03-01 09:17:02 +00:00
"Function <vkGetDeviceMemoryOpaqueCaptureAddress> requires <VK_KHR_buffer_device_address> or <VK_VERSION_1_2>" );
uint64_t result = getDispatcher()->vkGetDeviceMemoryOpaqueCaptureAddress( static_cast<VkDevice>( m_device ),
reinterpret_cast<const VkDeviceMemoryOpaqueCaptureAddressInfo *>( &info ) );
return result;
}
2022-01-26 00:42:08 +00:00
//=== VK_VERSION_1_3 ===
VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector<VULKAN_HPP_NAMESPACE::PhysicalDeviceToolProperties> PhysicalDevice::getToolProperties() const
2022-01-26 00:42:08 +00:00
{
2023-03-01 09:17:02 +00:00
VULKAN_HPP_ASSERT( getDispatcher()->vkGetPhysicalDeviceToolProperties &&
"Function <vkGetPhysicalDeviceToolProperties> requires <VK_EXT_tooling_info> or <VK_VERSION_1_3>" );
2022-01-26 00:42:08 +00:00
std::vector<VULKAN_HPP_NAMESPACE::PhysicalDeviceToolProperties> toolProperties;
uint32_t toolCount;
VULKAN_HPP_NAMESPACE::Result result;
2022-01-26 00:42:08 +00:00
do
{
result = static_cast<VULKAN_HPP_NAMESPACE::Result>(
getDispatcher()->vkGetPhysicalDeviceToolProperties( static_cast<VkPhysicalDevice>( m_physicalDevice ), &toolCount, nullptr ) );
if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) && toolCount )
2022-01-26 00:42:08 +00:00
{
toolProperties.resize( toolCount );
result = static_cast<VULKAN_HPP_NAMESPACE::Result>( getDispatcher()->vkGetPhysicalDeviceToolProperties(
static_cast<VkPhysicalDevice>( m_physicalDevice ), &toolCount, reinterpret_cast<VkPhysicalDeviceToolProperties *>( toolProperties.data() ) ) );
2022-01-26 00:42:08 +00:00
}
} while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete );
VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getToolProperties" );
VULKAN_HPP_ASSERT( toolCount <= toolProperties.size() );
if ( toolCount < toolProperties.size() )
2022-01-26 00:42:08 +00:00
{
toolProperties.resize( toolCount );
2022-01-26 00:42:08 +00:00
}
return toolProperties;
}
VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE
VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::CreateReturnType<VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::PrivateDataSlot>::Type
Device::createPrivateDataSlot( VULKAN_HPP_NAMESPACE::PrivateDataSlotCreateInfo const & createInfo,
VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator ) const
VULKAN_HPP_RAII_CREATE_NOEXCEPT
2022-01-26 00:42:08 +00:00
{
VULKAN_HPP_NAMESPACE::PrivateDataSlot privateDataSlot;
VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( getDispatcher()->vkCreatePrivateDataSlot(
static_cast<VkDevice>( m_device ),
reinterpret_cast<const VkPrivateDataSlotCreateInfo *>( &createInfo ),
reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
reinterpret_cast<VkPrivateDataSlot *>( &privateDataSlot ) ) );
if ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess )
{
# if defined( VULKAN_HPP_RAII_NO_EXCEPTIONS )
return VULKAN_HPP_UNEXPECTED( result );
# else
VULKAN_HPP_NAMESPACE::detail::throwResultException( result, "Device::createPrivateDataSlot" );
# endif
}
return VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::PrivateDataSlot( *this, *reinterpret_cast<VkPrivateDataSlot *>( &privateDataSlot ), allocator );
2022-01-26 00:42:08 +00:00
}
VULKAN_HPP_INLINE void Device::setPrivateData( VULKAN_HPP_NAMESPACE::ObjectType objectType_,
uint64_t objectHandle,
VULKAN_HPP_NAMESPACE::PrivateDataSlot privateDataSlot,
uint64_t data ) const
{
2023-03-01 09:17:02 +00:00
VULKAN_HPP_ASSERT( getDispatcher()->vkSetPrivateData && "Function <vkSetPrivateData> requires <VK_EXT_private_data> or <VK_VERSION_1_3>" );
VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( getDispatcher()->vkSetPrivateData(
static_cast<VkDevice>( m_device ), static_cast<VkObjectType>( objectType_ ), objectHandle, static_cast<VkPrivateDataSlot>( privateDataSlot ), data ) );
VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::setPrivateData" );
2022-01-26 00:42:08 +00:00
}
VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE uint64_t Device::getPrivateData( VULKAN_HPP_NAMESPACE::ObjectType objectType_,
uint64_t objectHandle,
VULKAN_HPP_NAMESPACE::PrivateDataSlot privateDataSlot ) const VULKAN_HPP_NOEXCEPT
2022-01-26 00:42:08 +00:00
{
2023-03-01 09:17:02 +00:00
VULKAN_HPP_ASSERT( getDispatcher()->vkGetPrivateData && "Function <vkGetPrivateData> requires <VK_EXT_private_data> or <VK_VERSION_1_3>" );
2022-01-26 00:42:08 +00:00
uint64_t data;
getDispatcher()->vkGetPrivateData(
static_cast<VkDevice>( m_device ), static_cast<VkObjectType>( objectType_ ), objectHandle, static_cast<VkPrivateDataSlot>( privateDataSlot ), &data );
2022-01-26 00:42:08 +00:00
return data;
}
VULKAN_HPP_INLINE void CommandBuffer::setEvent2( VULKAN_HPP_NAMESPACE::Event event,
const VULKAN_HPP_NAMESPACE::DependencyInfo & dependencyInfo ) const VULKAN_HPP_NOEXCEPT
2022-01-26 00:42:08 +00:00
{
2023-03-01 09:17:02 +00:00
VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSetEvent2 && "Function <vkCmdSetEvent2> requires <VK_KHR_synchronization2> or <VK_VERSION_1_3>" );
getDispatcher()->vkCmdSetEvent2(
static_cast<VkCommandBuffer>( m_commandBuffer ), static_cast<VkEvent>( event ), reinterpret_cast<const VkDependencyInfo *>( &dependencyInfo ) );
2022-01-26 00:42:08 +00:00
}
VULKAN_HPP_INLINE void CommandBuffer::resetEvent2( VULKAN_HPP_NAMESPACE::Event event,
VULKAN_HPP_NAMESPACE::PipelineStageFlags2 stageMask ) const VULKAN_HPP_NOEXCEPT
2022-01-26 00:42:08 +00:00
{
2023-03-01 09:17:02 +00:00
VULKAN_HPP_ASSERT( getDispatcher()->vkCmdResetEvent2 && "Function <vkCmdResetEvent2> requires <VK_KHR_synchronization2> or <VK_VERSION_1_3>" );
getDispatcher()->vkCmdResetEvent2(
static_cast<VkCommandBuffer>( m_commandBuffer ), static_cast<VkEvent>( event ), static_cast<VkPipelineStageFlags2>( stageMask ) );
2022-01-26 00:42:08 +00:00
}
VULKAN_HPP_INLINE void
CommandBuffer::waitEvents2( VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::Event> const & events,
VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::DependencyInfo> const & dependencyInfos ) const
2022-01-26 00:42:08 +00:00
{
2023-03-01 09:17:02 +00:00
VULKAN_HPP_ASSERT( getDispatcher()->vkCmdWaitEvents2 && "Function <vkCmdWaitEvents2> requires <VK_KHR_synchronization2> or <VK_VERSION_1_3>" );
# ifdef VULKAN_HPP_NO_EXCEPTIONS
VULKAN_HPP_ASSERT( events.size() == dependencyInfos.size() );
# else
2022-01-26 00:42:08 +00:00
if ( events.size() != dependencyInfos.size() )
{
throw LogicError( VULKAN_HPP_NAMESPACE_STRING "::CommandBuffer::waitEvents2: events.size() != dependencyInfos.size()" );
2022-01-26 00:42:08 +00:00
}
# endif /*VULKAN_HPP_NO_EXCEPTIONS*/
2022-01-26 00:42:08 +00:00
getDispatcher()->vkCmdWaitEvents2( static_cast<VkCommandBuffer>( m_commandBuffer ),
events.size(),
reinterpret_cast<const VkEvent *>( events.data() ),
reinterpret_cast<const VkDependencyInfo *>( dependencyInfos.data() ) );
}
VULKAN_HPP_INLINE void CommandBuffer::pipelineBarrier2( const VULKAN_HPP_NAMESPACE::DependencyInfo & dependencyInfo ) const VULKAN_HPP_NOEXCEPT
2022-01-26 00:42:08 +00:00
{
2023-03-01 09:17:02 +00:00
VULKAN_HPP_ASSERT( getDispatcher()->vkCmdPipelineBarrier2 && "Function <vkCmdPipelineBarrier2> requires <VK_KHR_synchronization2> or <VK_VERSION_1_3>" );
getDispatcher()->vkCmdPipelineBarrier2( static_cast<VkCommandBuffer>( m_commandBuffer ), reinterpret_cast<const VkDependencyInfo *>( &dependencyInfo ) );
2022-01-26 00:42:08 +00:00
}
VULKAN_HPP_INLINE void CommandBuffer::writeTimestamp2( VULKAN_HPP_NAMESPACE::PipelineStageFlags2 stage,
VULKAN_HPP_NAMESPACE::QueryPool queryPool,
uint32_t query ) const VULKAN_HPP_NOEXCEPT
2022-01-26 00:42:08 +00:00
{
2023-03-01 09:17:02 +00:00
VULKAN_HPP_ASSERT( getDispatcher()->vkCmdWriteTimestamp2 && "Function <vkCmdWriteTimestamp2> requires <VK_KHR_synchronization2> or <VK_VERSION_1_3>" );
getDispatcher()->vkCmdWriteTimestamp2(
static_cast<VkCommandBuffer>( m_commandBuffer ), static_cast<VkPipelineStageFlags2>( stage ), static_cast<VkQueryPool>( queryPool ), query );
2022-01-26 00:42:08 +00:00
}
VULKAN_HPP_INLINE void Queue::submit2( VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::SubmitInfo2> const & submits,
VULKAN_HPP_NAMESPACE::Fence fence ) const
2022-01-26 00:42:08 +00:00
{
2023-03-01 09:17:02 +00:00
VULKAN_HPP_ASSERT( getDispatcher()->vkQueueSubmit2 && "Function <vkQueueSubmit2> requires <VK_KHR_synchronization2> or <VK_VERSION_1_3>" );
VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( getDispatcher()->vkQueueSubmit2(
static_cast<VkQueue>( m_queue ), submits.size(), reinterpret_cast<const VkSubmitInfo2 *>( submits.data() ), static_cast<VkFence>( fence ) ) );
VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Queue::submit2" );
2022-01-26 00:42:08 +00:00
}
VULKAN_HPP_INLINE void CommandBuffer::copyBuffer2( const VULKAN_HPP_NAMESPACE::CopyBufferInfo2 & copyBufferInfo ) const VULKAN_HPP_NOEXCEPT
2022-01-26 00:42:08 +00:00
{
2023-03-01 09:17:02 +00:00
VULKAN_HPP_ASSERT( getDispatcher()->vkCmdCopyBuffer2 && "Function <vkCmdCopyBuffer2> requires <VK_KHR_copy_commands2> or <VK_VERSION_1_3>" );
getDispatcher()->vkCmdCopyBuffer2( static_cast<VkCommandBuffer>( m_commandBuffer ), reinterpret_cast<const VkCopyBufferInfo2 *>( &copyBufferInfo ) );
2022-01-26 00:42:08 +00:00
}
VULKAN_HPP_INLINE void CommandBuffer::copyImage2( const VULKAN_HPP_NAMESPACE::CopyImageInfo2 & copyImageInfo ) const VULKAN_HPP_NOEXCEPT
2022-01-26 00:42:08 +00:00
{
2023-03-01 09:17:02 +00:00
VULKAN_HPP_ASSERT( getDispatcher()->vkCmdCopyImage2 && "Function <vkCmdCopyImage2> requires <VK_KHR_copy_commands2> or <VK_VERSION_1_3>" );
getDispatcher()->vkCmdCopyImage2( static_cast<VkCommandBuffer>( m_commandBuffer ), reinterpret_cast<const VkCopyImageInfo2 *>( &copyImageInfo ) );
2022-01-26 00:42:08 +00:00
}
VULKAN_HPP_INLINE void
CommandBuffer::copyBufferToImage2( const VULKAN_HPP_NAMESPACE::CopyBufferToImageInfo2 & copyBufferToImageInfo ) const VULKAN_HPP_NOEXCEPT
2022-01-26 00:42:08 +00:00
{
2023-03-01 09:17:02 +00:00
VULKAN_HPP_ASSERT( getDispatcher()->vkCmdCopyBufferToImage2 &&
"Function <vkCmdCopyBufferToImage2> requires <VK_KHR_copy_commands2> or <VK_VERSION_1_3>" );
getDispatcher()->vkCmdCopyBufferToImage2( static_cast<VkCommandBuffer>( m_commandBuffer ),
reinterpret_cast<const VkCopyBufferToImageInfo2 *>( &copyBufferToImageInfo ) );
2022-01-26 00:42:08 +00:00
}
VULKAN_HPP_INLINE void
CommandBuffer::copyImageToBuffer2( const VULKAN_HPP_NAMESPACE::CopyImageToBufferInfo2 & copyImageToBufferInfo ) const VULKAN_HPP_NOEXCEPT
2022-01-26 00:42:08 +00:00
{
2023-03-01 09:17:02 +00:00
VULKAN_HPP_ASSERT( getDispatcher()->vkCmdCopyImageToBuffer2 &&
"Function <vkCmdCopyImageToBuffer2> requires <VK_KHR_copy_commands2> or <VK_VERSION_1_3>" );
getDispatcher()->vkCmdCopyImageToBuffer2( static_cast<VkCommandBuffer>( m_commandBuffer ),
reinterpret_cast<const VkCopyImageToBufferInfo2 *>( &copyImageToBufferInfo ) );
2022-01-26 00:42:08 +00:00
}
VULKAN_HPP_INLINE void CommandBuffer::blitImage2( const VULKAN_HPP_NAMESPACE::BlitImageInfo2 & blitImageInfo ) const VULKAN_HPP_NOEXCEPT
2022-01-26 00:42:08 +00:00
{
2023-03-01 09:17:02 +00:00
VULKAN_HPP_ASSERT( getDispatcher()->vkCmdBlitImage2 && "Function <vkCmdBlitImage2> requires <VK_KHR_copy_commands2> or <VK_VERSION_1_3>" );
getDispatcher()->vkCmdBlitImage2( static_cast<VkCommandBuffer>( m_commandBuffer ), reinterpret_cast<const VkBlitImageInfo2 *>( &blitImageInfo ) );
2022-01-26 00:42:08 +00:00
}
VULKAN_HPP_INLINE void CommandBuffer::resolveImage2( const VULKAN_HPP_NAMESPACE::ResolveImageInfo2 & resolveImageInfo ) const VULKAN_HPP_NOEXCEPT
2022-01-26 00:42:08 +00:00
{
2023-03-01 09:17:02 +00:00
VULKAN_HPP_ASSERT( getDispatcher()->vkCmdResolveImage2 && "Function <vkCmdResolveImage2> requires <VK_KHR_copy_commands2> or <VK_VERSION_1_3>" );
2022-01-26 00:42:08 +00:00
getDispatcher()->vkCmdResolveImage2( static_cast<VkCommandBuffer>( m_commandBuffer ),
reinterpret_cast<const VkResolveImageInfo2 *>( &resolveImageInfo ) );
}
VULKAN_HPP_INLINE void CommandBuffer::beginRendering( const VULKAN_HPP_NAMESPACE::RenderingInfo & renderingInfo ) const VULKAN_HPP_NOEXCEPT
2022-01-26 00:42:08 +00:00
{
2023-03-01 09:17:02 +00:00
VULKAN_HPP_ASSERT( getDispatcher()->vkCmdBeginRendering && "Function <vkCmdBeginRendering> requires <VK_KHR_dynamic_rendering> or <VK_VERSION_1_3>" );
getDispatcher()->vkCmdBeginRendering( static_cast<VkCommandBuffer>( m_commandBuffer ), reinterpret_cast<const VkRenderingInfo *>( &renderingInfo ) );
2022-01-26 00:42:08 +00:00
}
VULKAN_HPP_INLINE void CommandBuffer::endRendering() const VULKAN_HPP_NOEXCEPT
{
2023-03-01 09:17:02 +00:00
VULKAN_HPP_ASSERT( getDispatcher()->vkCmdEndRendering && "Function <vkCmdEndRendering> requires <VK_KHR_dynamic_rendering> or <VK_VERSION_1_3>" );
2022-01-26 00:42:08 +00:00
getDispatcher()->vkCmdEndRendering( static_cast<VkCommandBuffer>( m_commandBuffer ) );
}
VULKAN_HPP_INLINE void CommandBuffer::setCullMode( VULKAN_HPP_NAMESPACE::CullModeFlags cullMode ) const VULKAN_HPP_NOEXCEPT
2022-01-26 00:42:08 +00:00
{
VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSetCullMode &&
"Function <vkCmdSetCullMode> requires <VK_EXT_extended_dynamic_state> or <VK_EXT_shader_object> or <VK_VERSION_1_3>" );
getDispatcher()->vkCmdSetCullMode( static_cast<VkCommandBuffer>( m_commandBuffer ), static_cast<VkCullModeFlags>( cullMode ) );
2022-01-26 00:42:08 +00:00
}
VULKAN_HPP_INLINE void CommandBuffer::setFrontFace( VULKAN_HPP_NAMESPACE::FrontFace frontFace ) const VULKAN_HPP_NOEXCEPT
2022-01-26 00:42:08 +00:00
{
VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSetFrontFace &&
"Function <vkCmdSetFrontFace> requires <VK_EXT_extended_dynamic_state> or <VK_EXT_shader_object> or <VK_VERSION_1_3>" );
getDispatcher()->vkCmdSetFrontFace( static_cast<VkCommandBuffer>( m_commandBuffer ), static_cast<VkFrontFace>( frontFace ) );
2022-01-26 00:42:08 +00:00
}
VULKAN_HPP_INLINE void CommandBuffer::setPrimitiveTopology( VULKAN_HPP_NAMESPACE::PrimitiveTopology primitiveTopology ) const VULKAN_HPP_NOEXCEPT
2022-01-26 00:42:08 +00:00
{
2023-03-01 09:17:02 +00:00
VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSetPrimitiveTopology &&
"Function <vkCmdSetPrimitiveTopology> requires <VK_EXT_extended_dynamic_state> or <VK_EXT_shader_object> or <VK_VERSION_1_3>" );
getDispatcher()->vkCmdSetPrimitiveTopology( static_cast<VkCommandBuffer>( m_commandBuffer ), static_cast<VkPrimitiveTopology>( primitiveTopology ) );
2022-01-26 00:42:08 +00:00
}
VULKAN_HPP_INLINE void
CommandBuffer::setViewportWithCount( VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::Viewport> const & viewports ) const VULKAN_HPP_NOEXCEPT
2022-01-26 00:42:08 +00:00
{
2023-03-01 09:17:02 +00:00
VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSetViewportWithCount &&
"Function <vkCmdSetViewportWithCount> requires <VK_EXT_extended_dynamic_state> or <VK_EXT_shader_object> or <VK_VERSION_1_3>" );
getDispatcher()->vkCmdSetViewportWithCount(
static_cast<VkCommandBuffer>( m_commandBuffer ), viewports.size(), reinterpret_cast<const VkViewport *>( viewports.data() ) );
2022-01-26 00:42:08 +00:00
}
VULKAN_HPP_INLINE void
CommandBuffer::setScissorWithCount( VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::Rect2D> const & scissors ) const VULKAN_HPP_NOEXCEPT
2022-01-26 00:42:08 +00:00
{
2023-03-01 09:17:02 +00:00
VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSetScissorWithCount &&
"Function <vkCmdSetScissorWithCount> requires <VK_EXT_extended_dynamic_state> or <VK_EXT_shader_object> or <VK_VERSION_1_3>" );
getDispatcher()->vkCmdSetScissorWithCount(
static_cast<VkCommandBuffer>( m_commandBuffer ), scissors.size(), reinterpret_cast<const VkRect2D *>( scissors.data() ) );
2022-01-26 00:42:08 +00:00
}
VULKAN_HPP_INLINE void CommandBuffer::bindVertexBuffers2( uint32_t firstBinding,
VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::Buffer> const & buffers,
VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::DeviceSize> const & offsets,
VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::DeviceSize> const & sizes,
VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::DeviceSize> const & strides ) const
{
2023-03-01 09:17:02 +00:00
VULKAN_HPP_ASSERT( getDispatcher()->vkCmdBindVertexBuffers2 &&
"Function <vkCmdBindVertexBuffers2> requires <VK_EXT_extended_dynamic_state> or <VK_EXT_shader_object> or <VK_VERSION_1_3>" );
# ifdef VULKAN_HPP_NO_EXCEPTIONS
VULKAN_HPP_ASSERT( buffers.size() == offsets.size() );
VULKAN_HPP_ASSERT( sizes.empty() || buffers.size() == sizes.size() );
VULKAN_HPP_ASSERT( strides.empty() || buffers.size() == strides.size() );
# else
2022-01-26 00:42:08 +00:00
if ( buffers.size() != offsets.size() )
{
throw LogicError( VULKAN_HPP_NAMESPACE_STRING "::CommandBuffer::bindVertexBuffers2: buffers.size() != offsets.size()" );
2022-01-26 00:42:08 +00:00
}
if ( !sizes.empty() && buffers.size() != sizes.size() )
{
throw LogicError( VULKAN_HPP_NAMESPACE_STRING "::CommandBuffer::bindVertexBuffers2: buffers.size() != sizes.size()" );
2022-01-26 00:42:08 +00:00
}
if ( !strides.empty() && buffers.size() != strides.size() )
{
throw LogicError( VULKAN_HPP_NAMESPACE_STRING "::CommandBuffer::bindVertexBuffers2: buffers.size() != strides.size()" );
2022-01-26 00:42:08 +00:00
}
# endif /*VULKAN_HPP_NO_EXCEPTIONS*/
2022-01-26 00:42:08 +00:00
getDispatcher()->vkCmdBindVertexBuffers2( static_cast<VkCommandBuffer>( m_commandBuffer ),
firstBinding,
buffers.size(),
reinterpret_cast<const VkBuffer *>( buffers.data() ),
reinterpret_cast<const VkDeviceSize *>( offsets.data() ),
reinterpret_cast<const VkDeviceSize *>( sizes.data() ),
reinterpret_cast<const VkDeviceSize *>( strides.data() ) );
}
VULKAN_HPP_INLINE void CommandBuffer::setDepthTestEnable( VULKAN_HPP_NAMESPACE::Bool32 depthTestEnable ) const VULKAN_HPP_NOEXCEPT
2022-01-26 00:42:08 +00:00
{
2023-03-01 09:17:02 +00:00
VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSetDepthTestEnable &&
"Function <vkCmdSetDepthTestEnable> requires <VK_EXT_extended_dynamic_state> or <VK_EXT_shader_object> or <VK_VERSION_1_3>" );
getDispatcher()->vkCmdSetDepthTestEnable( static_cast<VkCommandBuffer>( m_commandBuffer ), static_cast<VkBool32>( depthTestEnable ) );
2022-01-26 00:42:08 +00:00
}
VULKAN_HPP_INLINE void CommandBuffer::setDepthWriteEnable( VULKAN_HPP_NAMESPACE::Bool32 depthWriteEnable ) const VULKAN_HPP_NOEXCEPT
2022-01-26 00:42:08 +00:00
{
2023-03-01 09:17:02 +00:00
VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSetDepthWriteEnable &&
"Function <vkCmdSetDepthWriteEnable> requires <VK_EXT_extended_dynamic_state> or <VK_EXT_shader_object> or <VK_VERSION_1_3>" );
getDispatcher()->vkCmdSetDepthWriteEnable( static_cast<VkCommandBuffer>( m_commandBuffer ), static_cast<VkBool32>( depthWriteEnable ) );
2022-01-26 00:42:08 +00:00
}
VULKAN_HPP_INLINE void CommandBuffer::setDepthCompareOp( VULKAN_HPP_NAMESPACE::CompareOp depthCompareOp ) const VULKAN_HPP_NOEXCEPT
2022-01-26 00:42:08 +00:00
{
2023-03-01 09:17:02 +00:00
VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSetDepthCompareOp &&
"Function <vkCmdSetDepthCompareOp> requires <VK_EXT_extended_dynamic_state> or <VK_EXT_shader_object> or <VK_VERSION_1_3>" );
getDispatcher()->vkCmdSetDepthCompareOp( static_cast<VkCommandBuffer>( m_commandBuffer ), static_cast<VkCompareOp>( depthCompareOp ) );
2022-01-26 00:42:08 +00:00
}
VULKAN_HPP_INLINE void CommandBuffer::setDepthBoundsTestEnable( VULKAN_HPP_NAMESPACE::Bool32 depthBoundsTestEnable ) const VULKAN_HPP_NOEXCEPT
2022-01-26 00:42:08 +00:00
{
2023-03-01 09:17:02 +00:00
VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSetDepthBoundsTestEnable &&
"Function <vkCmdSetDepthBoundsTestEnable> requires <VK_EXT_extended_dynamic_state> or <VK_EXT_shader_object> or <VK_VERSION_1_3>" );
getDispatcher()->vkCmdSetDepthBoundsTestEnable( static_cast<VkCommandBuffer>( m_commandBuffer ), static_cast<VkBool32>( depthBoundsTestEnable ) );
2022-01-26 00:42:08 +00:00
}
VULKAN_HPP_INLINE void CommandBuffer::setStencilTestEnable( VULKAN_HPP_NAMESPACE::Bool32 stencilTestEnable ) const VULKAN_HPP_NOEXCEPT
2022-01-26 00:42:08 +00:00
{
2023-03-01 09:17:02 +00:00
VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSetStencilTestEnable &&
"Function <vkCmdSetStencilTestEnable> requires <VK_EXT_extended_dynamic_state> or <VK_EXT_shader_object> or <VK_VERSION_1_3>" );
getDispatcher()->vkCmdSetStencilTestEnable( static_cast<VkCommandBuffer>( m_commandBuffer ), static_cast<VkBool32>( stencilTestEnable ) );
2022-01-26 00:42:08 +00:00
}
VULKAN_HPP_INLINE void CommandBuffer::setStencilOp( VULKAN_HPP_NAMESPACE::StencilFaceFlags faceMask,
VULKAN_HPP_NAMESPACE::StencilOp failOp,
VULKAN_HPP_NAMESPACE::StencilOp passOp,
VULKAN_HPP_NAMESPACE::StencilOp depthFailOp,
VULKAN_HPP_NAMESPACE::CompareOp compareOp ) const VULKAN_HPP_NOEXCEPT
2022-01-26 00:42:08 +00:00
{
VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSetStencilOp &&
"Function <vkCmdSetStencilOp> requires <VK_EXT_extended_dynamic_state> or <VK_EXT_shader_object> or <VK_VERSION_1_3>" );
2022-01-26 00:42:08 +00:00
getDispatcher()->vkCmdSetStencilOp( static_cast<VkCommandBuffer>( m_commandBuffer ),
static_cast<VkStencilFaceFlags>( faceMask ),
static_cast<VkStencilOp>( failOp ),
static_cast<VkStencilOp>( passOp ),
static_cast<VkStencilOp>( depthFailOp ),
static_cast<VkCompareOp>( compareOp ) );
}
VULKAN_HPP_INLINE void CommandBuffer::setRasterizerDiscardEnable( VULKAN_HPP_NAMESPACE::Bool32 rasterizerDiscardEnable ) const VULKAN_HPP_NOEXCEPT
2022-01-26 00:42:08 +00:00
{
2023-03-01 09:17:02 +00:00
VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSetRasterizerDiscardEnable &&
"Function <vkCmdSetRasterizerDiscardEnable> requires <VK_EXT_extended_dynamic_state2> or <VK_EXT_shader_object> or <VK_VERSION_1_3>" );
getDispatcher()->vkCmdSetRasterizerDiscardEnable( static_cast<VkCommandBuffer>( m_commandBuffer ), static_cast<VkBool32>( rasterizerDiscardEnable ) );
2022-01-26 00:42:08 +00:00
}
VULKAN_HPP_INLINE void CommandBuffer::setDepthBiasEnable( VULKAN_HPP_NAMESPACE::Bool32 depthBiasEnable ) const VULKAN_HPP_NOEXCEPT
2022-01-26 00:42:08 +00:00
{
2023-03-01 09:17:02 +00:00
VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSetDepthBiasEnable &&
"Function <vkCmdSetDepthBiasEnable> requires <VK_EXT_extended_dynamic_state2> or <VK_EXT_shader_object> or <VK_VERSION_1_3>" );
getDispatcher()->vkCmdSetDepthBiasEnable( static_cast<VkCommandBuffer>( m_commandBuffer ), static_cast<VkBool32>( depthBiasEnable ) );
2022-01-26 00:42:08 +00:00
}
VULKAN_HPP_INLINE void CommandBuffer::setPrimitiveRestartEnable( VULKAN_HPP_NAMESPACE::Bool32 primitiveRestartEnable ) const VULKAN_HPP_NOEXCEPT
2022-01-26 00:42:08 +00:00
{
2023-03-01 09:17:02 +00:00
VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSetPrimitiveRestartEnable &&
"Function <vkCmdSetPrimitiveRestartEnable> requires <VK_EXT_extended_dynamic_state2> or <VK_EXT_shader_object> or <VK_VERSION_1_3>" );
getDispatcher()->vkCmdSetPrimitiveRestartEnable( static_cast<VkCommandBuffer>( m_commandBuffer ), static_cast<VkBool32>( primitiveRestartEnable ) );
2022-01-26 00:42:08 +00:00
}
VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::MemoryRequirements2
Device::getBufferMemoryRequirements( const VULKAN_HPP_NAMESPACE::DeviceBufferMemoryRequirements & info ) const VULKAN_HPP_NOEXCEPT
2022-01-26 00:42:08 +00:00
{
2023-03-01 09:17:02 +00:00
VULKAN_HPP_ASSERT( getDispatcher()->vkGetDeviceBufferMemoryRequirements &&
"Function <vkGetDeviceBufferMemoryRequirements> requires <VK_KHR_maintenance4> or <VK_VERSION_1_3>" );
2022-01-26 00:42:08 +00:00
VULKAN_HPP_NAMESPACE::MemoryRequirements2 memoryRequirements;
getDispatcher()->vkGetDeviceBufferMemoryRequirements( static_cast<VkDevice>( m_device ),
reinterpret_cast<const VkDeviceBufferMemoryRequirements *>( &info ),
reinterpret_cast<VkMemoryRequirements2 *>( &memoryRequirements ) );
2022-01-26 00:42:08 +00:00
return memoryRequirements;
}
template <typename X, typename Y, typename... Z>
VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::StructureChain<X, Y, Z...>
Device::getBufferMemoryRequirements( const VULKAN_HPP_NAMESPACE::DeviceBufferMemoryRequirements & info ) const VULKAN_HPP_NOEXCEPT
2022-01-26 00:42:08 +00:00
{
2023-03-01 09:17:02 +00:00
VULKAN_HPP_ASSERT( getDispatcher()->vkGetDeviceBufferMemoryRequirements &&
"Function <vkGetDeviceBufferMemoryRequirements> requires <VK_KHR_maintenance4> or <VK_VERSION_1_3>" );
VULKAN_HPP_NAMESPACE::StructureChain<X, Y, Z...> structureChain;
VULKAN_HPP_NAMESPACE::MemoryRequirements2 & memoryRequirements = structureChain.template get<VULKAN_HPP_NAMESPACE::MemoryRequirements2>();
getDispatcher()->vkGetDeviceBufferMemoryRequirements( static_cast<VkDevice>( m_device ),
reinterpret_cast<const VkDeviceBufferMemoryRequirements *>( &info ),
reinterpret_cast<VkMemoryRequirements2 *>( &memoryRequirements ) );
2022-01-26 00:42:08 +00:00
return structureChain;
}
VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::MemoryRequirements2
Device::getImageMemoryRequirements( const VULKAN_HPP_NAMESPACE::DeviceImageMemoryRequirements & info ) const VULKAN_HPP_NOEXCEPT
2022-01-26 00:42:08 +00:00
{
2023-03-01 09:17:02 +00:00
VULKAN_HPP_ASSERT( getDispatcher()->vkGetDeviceImageMemoryRequirements &&
"Function <vkGetDeviceImageMemoryRequirements> requires <VK_KHR_maintenance4> or <VK_VERSION_1_3>" );
2022-01-26 00:42:08 +00:00
VULKAN_HPP_NAMESPACE::MemoryRequirements2 memoryRequirements;
getDispatcher()->vkGetDeviceImageMemoryRequirements( static_cast<VkDevice>( m_device ),
reinterpret_cast<const VkDeviceImageMemoryRequirements *>( &info ),
reinterpret_cast<VkMemoryRequirements2 *>( &memoryRequirements ) );
2022-01-26 00:42:08 +00:00
return memoryRequirements;
}
template <typename X, typename Y, typename... Z>
VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::StructureChain<X, Y, Z...>
Device::getImageMemoryRequirements( const VULKAN_HPP_NAMESPACE::DeviceImageMemoryRequirements & info ) const VULKAN_HPP_NOEXCEPT
2022-01-26 00:42:08 +00:00
{
2023-03-01 09:17:02 +00:00
VULKAN_HPP_ASSERT( getDispatcher()->vkGetDeviceImageMemoryRequirements &&
"Function <vkGetDeviceImageMemoryRequirements> requires <VK_KHR_maintenance4> or <VK_VERSION_1_3>" );
VULKAN_HPP_NAMESPACE::StructureChain<X, Y, Z...> structureChain;
VULKAN_HPP_NAMESPACE::MemoryRequirements2 & memoryRequirements = structureChain.template get<VULKAN_HPP_NAMESPACE::MemoryRequirements2>();
getDispatcher()->vkGetDeviceImageMemoryRequirements( static_cast<VkDevice>( m_device ),
reinterpret_cast<const VkDeviceImageMemoryRequirements *>( &info ),
reinterpret_cast<VkMemoryRequirements2 *>( &memoryRequirements ) );
2022-01-26 00:42:08 +00:00
return structureChain;
}
VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector<VULKAN_HPP_NAMESPACE::SparseImageMemoryRequirements2>
Device::getImageSparseMemoryRequirements( const VULKAN_HPP_NAMESPACE::DeviceImageMemoryRequirements & info ) const
2022-01-26 00:42:08 +00:00
{
VULKAN_HPP_ASSERT( getDispatcher()->vkGetDeviceImageSparseMemoryRequirements &&
2023-03-01 09:17:02 +00:00
"Function <vkGetDeviceImageSparseMemoryRequirements> requires <VK_KHR_maintenance4> or <VK_VERSION_1_3>" );
std::vector<VULKAN_HPP_NAMESPACE::SparseImageMemoryRequirements2> sparseMemoryRequirements;
uint32_t sparseMemoryRequirementCount;
2022-01-26 00:42:08 +00:00
getDispatcher()->vkGetDeviceImageSparseMemoryRequirements(
static_cast<VkDevice>( m_device ), reinterpret_cast<const VkDeviceImageMemoryRequirements *>( &info ), &sparseMemoryRequirementCount, nullptr );
sparseMemoryRequirements.resize( sparseMemoryRequirementCount );
getDispatcher()->vkGetDeviceImageSparseMemoryRequirements( static_cast<VkDevice>( m_device ),
reinterpret_cast<const VkDeviceImageMemoryRequirements *>( &info ),
&sparseMemoryRequirementCount,
reinterpret_cast<VkSparseImageMemoryRequirements2 *>( sparseMemoryRequirements.data() ) );
VULKAN_HPP_ASSERT( sparseMemoryRequirementCount <= sparseMemoryRequirements.size() );
if ( sparseMemoryRequirementCount < sparseMemoryRequirements.size() )
{
sparseMemoryRequirements.resize( sparseMemoryRequirementCount );
}
2022-01-26 00:42:08 +00:00
return sparseMemoryRequirements;
}
//=== VK_KHR_surface ===
VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::Bool32 PhysicalDevice::getSurfaceSupportKHR( uint32_t queueFamilyIndex,
VULKAN_HPP_NAMESPACE::SurfaceKHR surface ) const
{
VULKAN_HPP_ASSERT( getDispatcher()->vkGetPhysicalDeviceSurfaceSupportKHR && "Function <vkGetPhysicalDeviceSurfaceSupportKHR> requires <VK_KHR_surface>" );
VULKAN_HPP_NAMESPACE::Bool32 supported;
VULKAN_HPP_NAMESPACE::Result result =
static_cast<VULKAN_HPP_NAMESPACE::Result>( getDispatcher()->vkGetPhysicalDeviceSurfaceSupportKHR( static_cast<VkPhysicalDevice>( m_physicalDevice ),
queueFamilyIndex,
static_cast<VkSurfaceKHR>( surface ),
reinterpret_cast<VkBool32 *>( &supported ) ) );
VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getSurfaceSupportKHR" );
return supported;
}
VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::SurfaceCapabilitiesKHR
PhysicalDevice::getSurfaceCapabilitiesKHR( VULKAN_HPP_NAMESPACE::SurfaceKHR surface ) const
{
VULKAN_HPP_ASSERT( getDispatcher()->vkGetPhysicalDeviceSurfaceCapabilitiesKHR &&
"Function <vkGetPhysicalDeviceSurfaceCapabilitiesKHR> requires <VK_KHR_surface>" );
VULKAN_HPP_NAMESPACE::SurfaceCapabilitiesKHR surfaceCapabilities;
VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>(
getDispatcher()->vkGetPhysicalDeviceSurfaceCapabilitiesKHR( static_cast<VkPhysicalDevice>( m_physicalDevice ),
static_cast<VkSurfaceKHR>( surface ),
reinterpret_cast<VkSurfaceCapabilitiesKHR *>( &surfaceCapabilities ) ) );
VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getSurfaceCapabilitiesKHR" );
return surfaceCapabilities;
}
VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector<VULKAN_HPP_NAMESPACE::SurfaceFormatKHR>
PhysicalDevice::getSurfaceFormatsKHR( VULKAN_HPP_NAMESPACE::SurfaceKHR surface ) const
{
VULKAN_HPP_ASSERT( getDispatcher()->vkGetPhysicalDeviceSurfaceFormatsKHR && "Function <vkGetPhysicalDeviceSurfaceFormatsKHR> requires <VK_KHR_surface>" );
std::vector<VULKAN_HPP_NAMESPACE::SurfaceFormatKHR> surfaceFormats;
uint32_t surfaceFormatCount;
VULKAN_HPP_NAMESPACE::Result result;
do
{
result = static_cast<VULKAN_HPP_NAMESPACE::Result>( getDispatcher()->vkGetPhysicalDeviceSurfaceFormatsKHR(
static_cast<VkPhysicalDevice>( m_physicalDevice ), static_cast<VkSurfaceKHR>( surface ), &surfaceFormatCount, nullptr ) );
if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) && surfaceFormatCount )
{
surfaceFormats.resize( surfaceFormatCount );
result = static_cast<VULKAN_HPP_NAMESPACE::Result>(
getDispatcher()->vkGetPhysicalDeviceSurfaceFormatsKHR( static_cast<VkPhysicalDevice>( m_physicalDevice ),
static_cast<VkSurfaceKHR>( surface ),
&surfaceFormatCount,
reinterpret_cast<VkSurfaceFormatKHR *>( surfaceFormats.data() ) ) );
}
} while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete );
VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getSurfaceFormatsKHR" );
VULKAN_HPP_ASSERT( surfaceFormatCount <= surfaceFormats.size() );
if ( surfaceFormatCount < surfaceFormats.size() )
{
surfaceFormats.resize( surfaceFormatCount );
}
return surfaceFormats;
}
VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector<VULKAN_HPP_NAMESPACE::PresentModeKHR>
PhysicalDevice::getSurfacePresentModesKHR( VULKAN_HPP_NAMESPACE::SurfaceKHR surface ) const
2021-04-28 11:35:14 +00:00
{
VULKAN_HPP_ASSERT( getDispatcher()->vkGetPhysicalDeviceSurfacePresentModesKHR &&
"Function <vkGetPhysicalDeviceSurfacePresentModesKHR> requires <VK_KHR_surface>" );
std::vector<VULKAN_HPP_NAMESPACE::PresentModeKHR> presentModes;
uint32_t presentModeCount;
VULKAN_HPP_NAMESPACE::Result result;
do
{
result = static_cast<VULKAN_HPP_NAMESPACE::Result>( getDispatcher()->vkGetPhysicalDeviceSurfacePresentModesKHR(
static_cast<VkPhysicalDevice>( m_physicalDevice ), static_cast<VkSurfaceKHR>( surface ), &presentModeCount, nullptr ) );
if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) && presentModeCount )
{
presentModes.resize( presentModeCount );
result = static_cast<VULKAN_HPP_NAMESPACE::Result>(
getDispatcher()->vkGetPhysicalDeviceSurfacePresentModesKHR( static_cast<VkPhysicalDevice>( m_physicalDevice ),
static_cast<VkSurfaceKHR>( surface ),
&presentModeCount,
reinterpret_cast<VkPresentModeKHR *>( presentModes.data() ) ) );
}
} while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete );
VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getSurfacePresentModesKHR" );
VULKAN_HPP_ASSERT( presentModeCount <= presentModes.size() );
if ( presentModeCount < presentModes.size() )
{
presentModes.resize( presentModeCount );
}
return presentModes;
}
//=== VK_KHR_swapchain ===
VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE
VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::CreateReturnType<VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::SwapchainKHR>::Type
Device::createSwapchainKHR( VULKAN_HPP_NAMESPACE::SwapchainCreateInfoKHR const & createInfo,
VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator ) const
VULKAN_HPP_RAII_CREATE_NOEXCEPT
{
VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain;
VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( getDispatcher()->vkCreateSwapchainKHR(
static_cast<VkDevice>( m_device ),
reinterpret_cast<const VkSwapchainCreateInfoKHR *>( &createInfo ),
reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
reinterpret_cast<VkSwapchainKHR *>( &swapchain ) ) );
if ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess )
{
# if defined( VULKAN_HPP_RAII_NO_EXCEPTIONS )
return VULKAN_HPP_UNEXPECTED( result );
# else
VULKAN_HPP_NAMESPACE::detail::throwResultException( result, "Device::createSwapchainKHR" );
# endif
}
return VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::SwapchainKHR( *this, *reinterpret_cast<VkSwapchainKHR *>( &swapchain ), allocator );
}
VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector<VULKAN_HPP_NAMESPACE::Image> SwapchainKHR::getImages() const
{
VULKAN_HPP_ASSERT( getDispatcher()->vkGetSwapchainImagesKHR && "Function <vkGetSwapchainImagesKHR> requires <VK_KHR_swapchain>" );
std::vector<VULKAN_HPP_NAMESPACE::Image> swapchainImages;
uint32_t swapchainImageCount;
VULKAN_HPP_NAMESPACE::Result result;
do
{
result = static_cast<VULKAN_HPP_NAMESPACE::Result>( getDispatcher()->vkGetSwapchainImagesKHR(
static_cast<VkDevice>( m_device ), static_cast<VkSwapchainKHR>( m_swapchain ), &swapchainImageCount, nullptr ) );
if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) && swapchainImageCount )
{
swapchainImages.resize( swapchainImageCount );
result =
static_cast<VULKAN_HPP_NAMESPACE::Result>( getDispatcher()->vkGetSwapchainImagesKHR( static_cast<VkDevice>( m_device ),
static_cast<VkSwapchainKHR>( m_swapchain ),
&swapchainImageCount,
reinterpret_cast<VkImage *>( swapchainImages.data() ) ) );
}
} while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete );
VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::SwapchainKHR::getImages" );
VULKAN_HPP_ASSERT( swapchainImageCount <= swapchainImages.size() );
if ( swapchainImageCount < swapchainImages.size() )
{
swapchainImages.resize( swapchainImageCount );
}
return swapchainImages;
}
VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::pair<VULKAN_HPP_NAMESPACE::Result, uint32_t>
SwapchainKHR::acquireNextImage( uint64_t timeout, VULKAN_HPP_NAMESPACE::Semaphore semaphore, VULKAN_HPP_NAMESPACE::Fence fence ) const
{
VULKAN_HPP_ASSERT( getDispatcher()->vkAcquireNextImageKHR && "Function <vkAcquireNextImageKHR> requires <VK_KHR_swapchain>" );
uint32_t imageIndex;
VULKAN_HPP_NAMESPACE::Result result =
static_cast<VULKAN_HPP_NAMESPACE::Result>( getDispatcher()->vkAcquireNextImageKHR( static_cast<VkDevice>( m_device ),
static_cast<VkSwapchainKHR>( m_swapchain ),
timeout,
static_cast<VkSemaphore>( semaphore ),
static_cast<VkFence>( fence ),
&imageIndex ) );
VULKAN_HPP_NAMESPACE::detail::resultCheck( result,
VULKAN_HPP_NAMESPACE_STRING "::SwapchainKHR::acquireNextImage",
{ VULKAN_HPP_NAMESPACE::Result::eSuccess,
VULKAN_HPP_NAMESPACE::Result::eTimeout,
VULKAN_HPP_NAMESPACE::Result::eNotReady,
VULKAN_HPP_NAMESPACE::Result::eSuboptimalKHR } );
return std::make_pair( result, std::move( imageIndex ) );
}
VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::Result Queue::presentKHR( const VULKAN_HPP_NAMESPACE::PresentInfoKHR & presentInfo ) const
{
VULKAN_HPP_ASSERT( getDispatcher()->vkQueuePresentKHR && "Function <vkQueuePresentKHR> requires <VK_KHR_swapchain>" );
VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>(
getDispatcher()->vkQueuePresentKHR( static_cast<VkQueue>( m_queue ), reinterpret_cast<const VkPresentInfoKHR *>( &presentInfo ) ) );
VULKAN_HPP_NAMESPACE::detail::resultCheck(
result, VULKAN_HPP_NAMESPACE_STRING "::Queue::presentKHR", { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::eSuboptimalKHR } );
return static_cast<VULKAN_HPP_NAMESPACE::Result>( result );
}
VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::DeviceGroupPresentCapabilitiesKHR Device::getGroupPresentCapabilitiesKHR() const
{
VULKAN_HPP_ASSERT( getDispatcher()->vkGetDeviceGroupPresentCapabilitiesKHR &&
"Function <vkGetDeviceGroupPresentCapabilitiesKHR> requires <VK_KHR_device_group> or <VK_KHR_swapchain>" );
VULKAN_HPP_NAMESPACE::DeviceGroupPresentCapabilitiesKHR deviceGroupPresentCapabilities;
VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( getDispatcher()->vkGetDeviceGroupPresentCapabilitiesKHR(
static_cast<VkDevice>( m_device ), reinterpret_cast<VkDeviceGroupPresentCapabilitiesKHR *>( &deviceGroupPresentCapabilities ) ) );
VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getGroupPresentCapabilitiesKHR" );
return deviceGroupPresentCapabilities;
}
VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::DeviceGroupPresentModeFlagsKHR
Device::getGroupSurfacePresentModesKHR( VULKAN_HPP_NAMESPACE::SurfaceKHR surface ) const
{
VULKAN_HPP_ASSERT( getDispatcher()->vkGetDeviceGroupSurfacePresentModesKHR &&
"Function <vkGetDeviceGroupSurfacePresentModesKHR> requires <VK_KHR_device_group> or <VK_KHR_swapchain>" );
VULKAN_HPP_NAMESPACE::DeviceGroupPresentModeFlagsKHR modes;
VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( getDispatcher()->vkGetDeviceGroupSurfacePresentModesKHR(
static_cast<VkDevice>( m_device ), static_cast<VkSurfaceKHR>( surface ), reinterpret_cast<VkDeviceGroupPresentModeFlagsKHR *>( &modes ) ) );
VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getGroupSurfacePresentModesKHR" );
return modes;
}
VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector<VULKAN_HPP_NAMESPACE::Rect2D>
PhysicalDevice::getPresentRectanglesKHR( VULKAN_HPP_NAMESPACE::SurfaceKHR surface ) const
{
VULKAN_HPP_ASSERT( getDispatcher()->vkGetPhysicalDevicePresentRectanglesKHR &&
"Function <vkGetPhysicalDevicePresentRectanglesKHR> requires <VK_KHR_device_group> or <VK_KHR_swapchain>" );
std::vector<VULKAN_HPP_NAMESPACE::Rect2D> rects;
uint32_t rectCount;
VULKAN_HPP_NAMESPACE::Result result;
do
{
result = static_cast<VULKAN_HPP_NAMESPACE::Result>( getDispatcher()->vkGetPhysicalDevicePresentRectanglesKHR(
static_cast<VkPhysicalDevice>( m_physicalDevice ), static_cast<VkSurfaceKHR>( surface ), &rectCount, nullptr ) );
if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) && rectCount )
{
rects.resize( rectCount );
result = static_cast<VULKAN_HPP_NAMESPACE::Result>(
getDispatcher()->vkGetPhysicalDevicePresentRectanglesKHR( static_cast<VkPhysicalDevice>( m_physicalDevice ),
static_cast<VkSurfaceKHR>( surface ),
&rectCount,
reinterpret_cast<VkRect2D *>( rects.data() ) ) );
}
} while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete );
VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getPresentRectanglesKHR" );
VULKAN_HPP_ASSERT( rectCount <= rects.size() );
if ( rectCount < rects.size() )
{
rects.resize( rectCount );
}
return rects;
}
VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::pair<VULKAN_HPP_NAMESPACE::Result, uint32_t>
Device::acquireNextImage2KHR( const VULKAN_HPP_NAMESPACE::AcquireNextImageInfoKHR & acquireInfo ) const
{
VULKAN_HPP_ASSERT( getDispatcher()->vkAcquireNextImage2KHR && "Function <vkAcquireNextImage2KHR> requires <VK_KHR_device_group> or <VK_KHR_swapchain>" );
uint32_t imageIndex;
VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( getDispatcher()->vkAcquireNextImage2KHR(
static_cast<VkDevice>( m_device ), reinterpret_cast<const VkAcquireNextImageInfoKHR *>( &acquireInfo ), &imageIndex ) );
VULKAN_HPP_NAMESPACE::detail::resultCheck( result,
VULKAN_HPP_NAMESPACE_STRING "::Device::acquireNextImage2KHR",
{ VULKAN_HPP_NAMESPACE::Result::eSuccess,
VULKAN_HPP_NAMESPACE::Result::eTimeout,
VULKAN_HPP_NAMESPACE::Result::eNotReady,
VULKAN_HPP_NAMESPACE::Result::eSuboptimalKHR } );
return std::make_pair( result, std::move( imageIndex ) );
}
//=== VK_KHR_display ===
VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector<VULKAN_HPP_NAMESPACE::DisplayPropertiesKHR> PhysicalDevice::getDisplayPropertiesKHR() const
2021-04-28 11:35:14 +00:00
{
VULKAN_HPP_ASSERT( getDispatcher()->vkGetPhysicalDeviceDisplayPropertiesKHR &&
"Function <vkGetPhysicalDeviceDisplayPropertiesKHR> requires <VK_KHR_display>" );
std::vector<VULKAN_HPP_NAMESPACE::DisplayPropertiesKHR> properties;
uint32_t propertyCount;
VULKAN_HPP_NAMESPACE::Result result;
do
{
result = static_cast<VULKAN_HPP_NAMESPACE::Result>(
getDispatcher()->vkGetPhysicalDeviceDisplayPropertiesKHR( static_cast<VkPhysicalDevice>( m_physicalDevice ), &propertyCount, nullptr ) );
if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) && propertyCount )
{
properties.resize( propertyCount );
result = static_cast<VULKAN_HPP_NAMESPACE::Result>( getDispatcher()->vkGetPhysicalDeviceDisplayPropertiesKHR(
static_cast<VkPhysicalDevice>( m_physicalDevice ), &propertyCount, reinterpret_cast<VkDisplayPropertiesKHR *>( properties.data() ) ) );
}
} while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete );
VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getDisplayPropertiesKHR" );
VULKAN_HPP_ASSERT( propertyCount <= properties.size() );
if ( propertyCount < properties.size() )
{
properties.resize( propertyCount );
}
return properties;
}
VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector<VULKAN_HPP_NAMESPACE::DisplayPlanePropertiesKHR> PhysicalDevice::getDisplayPlanePropertiesKHR() const
2021-04-28 11:35:14 +00:00
{
VULKAN_HPP_ASSERT( getDispatcher()->vkGetPhysicalDeviceDisplayPlanePropertiesKHR &&
"Function <vkGetPhysicalDeviceDisplayPlanePropertiesKHR> requires <VK_KHR_display>" );
std::vector<VULKAN_HPP_NAMESPACE::DisplayPlanePropertiesKHR> properties;
uint32_t propertyCount;
VULKAN_HPP_NAMESPACE::Result result;
do
{
result = static_cast<VULKAN_HPP_NAMESPACE::Result>(
getDispatcher()->vkGetPhysicalDeviceDisplayPlanePropertiesKHR( static_cast<VkPhysicalDevice>( m_physicalDevice ), &propertyCount, nullptr ) );
if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) && propertyCount )
{
properties.resize( propertyCount );
result = static_cast<VULKAN_HPP_NAMESPACE::Result>( getDispatcher()->vkGetPhysicalDeviceDisplayPlanePropertiesKHR(
static_cast<VkPhysicalDevice>( m_physicalDevice ), &propertyCount, reinterpret_cast<VkDisplayPlanePropertiesKHR *>( properties.data() ) ) );
}
} while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete );
VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getDisplayPlanePropertiesKHR" );
VULKAN_HPP_ASSERT( propertyCount <= properties.size() );
if ( propertyCount < properties.size() )
{
properties.resize( propertyCount );
}
return properties;
2021-04-28 11:35:14 +00:00
}
VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE
VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::CreateReturnType<std::vector<VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::DisplayKHR>>::Type
PhysicalDevice::getDisplayPlaneSupportedDisplaysKHR( uint32_t planeIndex ) const
{
std::vector<VULKAN_HPP_NAMESPACE::DisplayKHR> displays;
uint32_t displayCount;
VULKAN_HPP_NAMESPACE::Result result;
do
{
result = static_cast<VULKAN_HPP_NAMESPACE::Result>(
getDispatcher()->vkGetDisplayPlaneSupportedDisplaysKHR( static_cast<VkPhysicalDevice>( m_physicalDevice ), planeIndex, &displayCount, nullptr ) );
if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) && displayCount )
{
displays.resize( displayCount );
result = static_cast<VULKAN_HPP_NAMESPACE::Result>( getDispatcher()->vkGetDisplayPlaneSupportedDisplaysKHR(
static_cast<VkPhysicalDevice>( m_physicalDevice ), planeIndex, &displayCount, reinterpret_cast<VkDisplayKHR *>( displays.data() ) ) );
}
} while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete );
if ( ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess ) && ( result != VULKAN_HPP_NAMESPACE::Result::eIncomplete ) )
{
# if defined( VULKAN_HPP_RAII_NO_EXCEPTIONS )
return VULKAN_HPP_UNEXPECTED( result );
# else
VULKAN_HPP_NAMESPACE::detail::throwResultException( result, "PhysicalDevice::getDisplayPlaneSupportedDisplaysKHR" );
# endif
}
std::vector<VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::DisplayKHR> displaysRAII;
displaysRAII.reserve( displays.size() );
for ( auto & display : displays )
{
displaysRAII.emplace_back( *this, *reinterpret_cast<VkDisplayKHR *>( &display ) );
}
return displaysRAII;
}
VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector<VULKAN_HPP_NAMESPACE::DisplayModePropertiesKHR> DisplayKHR::getModeProperties() const
{
VULKAN_HPP_ASSERT( getDispatcher()->vkGetDisplayModePropertiesKHR && "Function <vkGetDisplayModePropertiesKHR> requires <VK_KHR_display>" );
std::vector<VULKAN_HPP_NAMESPACE::DisplayModePropertiesKHR> properties;
uint32_t propertyCount;
VULKAN_HPP_NAMESPACE::Result result;
do
{
result = static_cast<VULKAN_HPP_NAMESPACE::Result>( getDispatcher()->vkGetDisplayModePropertiesKHR(
static_cast<VkPhysicalDevice>( m_physicalDevice ), static_cast<VkDisplayKHR>( m_display ), &propertyCount, nullptr ) );
if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) && propertyCount )
{
properties.resize( propertyCount );
result = static_cast<VULKAN_HPP_NAMESPACE::Result>(
getDispatcher()->vkGetDisplayModePropertiesKHR( static_cast<VkPhysicalDevice>( m_physicalDevice ),
static_cast<VkDisplayKHR>( m_display ),
&propertyCount,
reinterpret_cast<VkDisplayModePropertiesKHR *>( properties.data() ) ) );
}
} while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete );
VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::DisplayKHR::getModeProperties" );
VULKAN_HPP_ASSERT( propertyCount <= properties.size() );
if ( propertyCount < properties.size() )
{
properties.resize( propertyCount );
}
return properties;
}
VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE
VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::CreateReturnType<VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::DisplayModeKHR>::Type
DisplayKHR::createMode( VULKAN_HPP_NAMESPACE::DisplayModeCreateInfoKHR const & createInfo,
VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator ) const VULKAN_HPP_RAII_CREATE_NOEXCEPT
{
VULKAN_HPP_NAMESPACE::DisplayModeKHR mode;
VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( getDispatcher()->vkCreateDisplayModeKHR(
static_cast<VkPhysicalDevice>( m_physicalDevice ),
static_cast<VkDisplayKHR>( m_display ),
reinterpret_cast<const VkDisplayModeCreateInfoKHR *>( &createInfo ),
reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
reinterpret_cast<VkDisplayModeKHR *>( &mode ) ) );
if ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess )
{
# if defined( VULKAN_HPP_RAII_NO_EXCEPTIONS )
return VULKAN_HPP_UNEXPECTED( result );
# else
VULKAN_HPP_NAMESPACE::detail::throwResultException( result, "DisplayKHR::createMode" );
# endif
}
return VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::DisplayModeKHR( *this, *reinterpret_cast<VkDisplayModeKHR *>( &mode ) );
}
VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::DisplayPlaneCapabilitiesKHR
DisplayModeKHR::getDisplayPlaneCapabilities( uint32_t planeIndex ) const
{
VULKAN_HPP_ASSERT( getDispatcher()->vkGetDisplayPlaneCapabilitiesKHR && "Function <vkGetDisplayPlaneCapabilitiesKHR> requires <VK_KHR_display>" );
VULKAN_HPP_NAMESPACE::DisplayPlaneCapabilitiesKHR capabilities;
VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>(
getDispatcher()->vkGetDisplayPlaneCapabilitiesKHR( static_cast<VkPhysicalDevice>( m_physicalDevice ),
static_cast<VkDisplayModeKHR>( m_displayModeKHR ),
planeIndex,
reinterpret_cast<VkDisplayPlaneCapabilitiesKHR *>( &capabilities ) ) );
VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::DisplayModeKHR::getDisplayPlaneCapabilities" );
return capabilities;
}
VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE
VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::CreateReturnType<VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::SurfaceKHR>::Type
Instance::createDisplayPlaneSurfaceKHR( VULKAN_HPP_NAMESPACE::DisplaySurfaceCreateInfoKHR const & createInfo,
VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator ) const
VULKAN_HPP_RAII_CREATE_NOEXCEPT
{
VULKAN_HPP_NAMESPACE::SurfaceKHR surface;
VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( getDispatcher()->vkCreateDisplayPlaneSurfaceKHR(
static_cast<VkInstance>( m_instance ),
reinterpret_cast<const VkDisplaySurfaceCreateInfoKHR *>( &createInfo ),
reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
reinterpret_cast<VkSurfaceKHR *>( &surface ) ) );
if ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess )
{
# if defined( VULKAN_HPP_RAII_NO_EXCEPTIONS )
return VULKAN_HPP_UNEXPECTED( result );
# else
VULKAN_HPP_NAMESPACE::detail::throwResultException( result, "Instance::createDisplayPlaneSurfaceKHR" );
# endif
}
return VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::SurfaceKHR( *this, *reinterpret_cast<VkSurfaceKHR *>( &surface ), allocator );
}
//=== VK_KHR_display_swapchain ===
VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::CreateReturnType<
std::vector<VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::SwapchainKHR>>::Type
Device::createSharedSwapchainsKHR( VULKAN_HPP_NAMESPACE::ArrayProxy<VULKAN_HPP_NAMESPACE::SwapchainCreateInfoKHR> const & createInfos,
VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator ) const
{
std::vector<VULKAN_HPP_NAMESPACE::SwapchainKHR> swapchains( createInfos.size() );
VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( getDispatcher()->vkCreateSharedSwapchainsKHR(
static_cast<VkDevice>( m_device ),
createInfos.size(),
reinterpret_cast<const VkSwapchainCreateInfoKHR *>( createInfos.data() ),
reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
reinterpret_cast<VkSwapchainKHR *>( swapchains.data() ) ) );
if ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess )
{
# if defined( VULKAN_HPP_RAII_NO_EXCEPTIONS )
return VULKAN_HPP_UNEXPECTED( result );
# else
VULKAN_HPP_NAMESPACE::detail::throwResultException( result, "Device::createSharedSwapchainsKHR" );
# endif
}
std::vector<VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::SwapchainKHR> swapchainsRAII;
swapchainsRAII.reserve( swapchains.size() );
for ( auto & swapchain : swapchains )
{
swapchainsRAII.emplace_back( *this, *reinterpret_cast<VkSwapchainKHR *>( &swapchain ), allocator );
}
return swapchainsRAII;
}
VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE
VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::CreateReturnType<VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::SwapchainKHR>::Type
Device::createSharedSwapchainKHR( VULKAN_HPP_NAMESPACE::SwapchainCreateInfoKHR const & createInfo,
VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator ) const
VULKAN_HPP_RAII_CREATE_NOEXCEPT
{
VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain;
VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( getDispatcher()->vkCreateSharedSwapchainsKHR(
static_cast<VkDevice>( m_device ),
1,
reinterpret_cast<const VkSwapchainCreateInfoKHR *>( &createInfo ),
reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
reinterpret_cast<VkSwapchainKHR *>( &swapchain ) ) );
if ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess )
{
# if defined( VULKAN_HPP_RAII_NO_EXCEPTIONS )
return VULKAN_HPP_UNEXPECTED( result );
# else
VULKAN_HPP_NAMESPACE::detail::throwResultException( result, "Device::createSharedSwapchainKHR" );
# endif
}
return VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::SwapchainKHR( *this, *reinterpret_cast<VkSwapchainKHR *>( &swapchain ), allocator );
}
# if defined( VK_USE_PLATFORM_XLIB_KHR )
//=== VK_KHR_xlib_surface ===
VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE
VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::CreateReturnType<VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::SurfaceKHR>::Type
Instance::createXlibSurfaceKHR( VULKAN_HPP_NAMESPACE::XlibSurfaceCreateInfoKHR const & createInfo,
VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator ) const
VULKAN_HPP_RAII_CREATE_NOEXCEPT
{
VULKAN_HPP_NAMESPACE::SurfaceKHR surface;
VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( getDispatcher()->vkCreateXlibSurfaceKHR(
static_cast<VkInstance>( m_instance ),
reinterpret_cast<const VkXlibSurfaceCreateInfoKHR *>( &createInfo ),
reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
reinterpret_cast<VkSurfaceKHR *>( &surface ) ) );
if ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess )
{
# if defined( VULKAN_HPP_RAII_NO_EXCEPTIONS )
return VULKAN_HPP_UNEXPECTED( result );
# else
VULKAN_HPP_NAMESPACE::detail::throwResultException( result, "Instance::createXlibSurfaceKHR" );
# endif
}
return VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::SurfaceKHR( *this, *reinterpret_cast<VkSurfaceKHR *>( &surface ), allocator );
}
VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::Bool32
PhysicalDevice::getXlibPresentationSupportKHR( uint32_t queueFamilyIndex, Display & dpy, VisualID visualID ) const VULKAN_HPP_NOEXCEPT
2021-04-28 11:35:14 +00:00
{
VULKAN_HPP_ASSERT( getDispatcher()->vkGetPhysicalDeviceXlibPresentationSupportKHR &&
"Function <vkGetPhysicalDeviceXlibPresentationSupportKHR> requires <VK_KHR_xlib_surface>" );
2021-04-28 11:35:14 +00:00
VkBool32 result =
getDispatcher()->vkGetPhysicalDeviceXlibPresentationSupportKHR( static_cast<VkPhysicalDevice>( m_physicalDevice ), queueFamilyIndex, &dpy, visualID );
return static_cast<VULKAN_HPP_NAMESPACE::Bool32>( result );
2021-04-28 11:35:14 +00:00
}
# endif /*VK_USE_PLATFORM_XLIB_KHR*/
2021-04-28 11:35:14 +00:00
# if defined( VK_USE_PLATFORM_XCB_KHR )
//=== VK_KHR_xcb_surface ===
VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE
VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::CreateReturnType<VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::SurfaceKHR>::Type
Instance::createXcbSurfaceKHR( VULKAN_HPP_NAMESPACE::XcbSurfaceCreateInfoKHR const & createInfo,
VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator ) const
VULKAN_HPP_RAII_CREATE_NOEXCEPT
{
VULKAN_HPP_NAMESPACE::SurfaceKHR surface;
VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( getDispatcher()->vkCreateXcbSurfaceKHR(
static_cast<VkInstance>( m_instance ),
reinterpret_cast<const VkXcbSurfaceCreateInfoKHR *>( &createInfo ),
reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
reinterpret_cast<VkSurfaceKHR *>( &surface ) ) );
if ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess )
{
# if defined( VULKAN_HPP_RAII_NO_EXCEPTIONS )
return VULKAN_HPP_UNEXPECTED( result );
# else
VULKAN_HPP_NAMESPACE::detail::throwResultException( result, "Instance::createXcbSurfaceKHR" );
# endif
}
return VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::SurfaceKHR( *this, *reinterpret_cast<VkSurfaceKHR *>( &surface ), allocator );
}
VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::Bool32 PhysicalDevice::getXcbPresentationSupportKHR(
uint32_t queueFamilyIndex, xcb_connection_t & connection, xcb_visualid_t visual_id ) const VULKAN_HPP_NOEXCEPT
2021-04-28 11:35:14 +00:00
{
VULKAN_HPP_ASSERT( getDispatcher()->vkGetPhysicalDeviceXcbPresentationSupportKHR &&
"Function <vkGetPhysicalDeviceXcbPresentationSupportKHR> requires <VK_KHR_xcb_surface>" );
2021-04-28 11:35:14 +00:00
VkBool32 result = getDispatcher()->vkGetPhysicalDeviceXcbPresentationSupportKHR(
static_cast<VkPhysicalDevice>( m_physicalDevice ), queueFamilyIndex, &connection, visual_id );
return static_cast<VULKAN_HPP_NAMESPACE::Bool32>( result );
2021-04-28 11:35:14 +00:00
}
# endif /*VK_USE_PLATFORM_XCB_KHR*/
2021-04-28 11:35:14 +00:00
# if defined( VK_USE_PLATFORM_WAYLAND_KHR )
//=== VK_KHR_wayland_surface ===
VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE
VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::CreateReturnType<VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::SurfaceKHR>::Type
Instance::createWaylandSurfaceKHR( VULKAN_HPP_NAMESPACE::WaylandSurfaceCreateInfoKHR const & createInfo,
VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator ) const
VULKAN_HPP_RAII_CREATE_NOEXCEPT
{
VULKAN_HPP_NAMESPACE::SurfaceKHR surface;
VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( getDispatcher()->vkCreateWaylandSurfaceKHR(
static_cast<VkInstance>( m_instance ),
reinterpret_cast<const VkWaylandSurfaceCreateInfoKHR *>( &createInfo ),
reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
reinterpret_cast<VkSurfaceKHR *>( &surface ) ) );
if ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess )
{
# if defined( VULKAN_HPP_RAII_NO_EXCEPTIONS )
return VULKAN_HPP_UNEXPECTED( result );
# else
VULKAN_HPP_NAMESPACE::detail::throwResultException( result, "Instance::createWaylandSurfaceKHR" );
# endif
}
return VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::SurfaceKHR( *this, *reinterpret_cast<VkSurfaceKHR *>( &surface ), allocator );
}
VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::Bool32
PhysicalDevice::getWaylandPresentationSupportKHR( uint32_t queueFamilyIndex, struct wl_display & display ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( getDispatcher()->vkGetPhysicalDeviceWaylandPresentationSupportKHR &&
"Function <vkGetPhysicalDeviceWaylandPresentationSupportKHR> requires <VK_KHR_wayland_surface>" );
VkBool32 result =
getDispatcher()->vkGetPhysicalDeviceWaylandPresentationSupportKHR( static_cast<VkPhysicalDevice>( m_physicalDevice ), queueFamilyIndex, &display );
return static_cast<VULKAN_HPP_NAMESPACE::Bool32>( result );
}
# endif /*VK_USE_PLATFORM_WAYLAND_KHR*/
# if defined( VK_USE_PLATFORM_ANDROID_KHR )
//=== VK_KHR_android_surface ===
VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE
VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::CreateReturnType<VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::SurfaceKHR>::Type
Instance::createAndroidSurfaceKHR( VULKAN_HPP_NAMESPACE::AndroidSurfaceCreateInfoKHR const & createInfo,
VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator ) const
VULKAN_HPP_RAII_CREATE_NOEXCEPT
{
VULKAN_HPP_NAMESPACE::SurfaceKHR surface;
VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( getDispatcher()->vkCreateAndroidSurfaceKHR(
static_cast<VkInstance>( m_instance ),
reinterpret_cast<const VkAndroidSurfaceCreateInfoKHR *>( &createInfo ),
reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
reinterpret_cast<VkSurfaceKHR *>( &surface ) ) );
if ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess )
{
# if defined( VULKAN_HPP_RAII_NO_EXCEPTIONS )
return VULKAN_HPP_UNEXPECTED( result );
# else
VULKAN_HPP_NAMESPACE::detail::throwResultException( result, "Instance::createAndroidSurfaceKHR" );
# endif
}
return VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::SurfaceKHR( *this, *reinterpret_cast<VkSurfaceKHR *>( &surface ), allocator );
}
# endif /*VK_USE_PLATFORM_ANDROID_KHR*/
# if defined( VK_USE_PLATFORM_WIN32_KHR )
//=== VK_KHR_win32_surface ===
VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE
VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::CreateReturnType<VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::SurfaceKHR>::Type
Instance::createWin32SurfaceKHR( VULKAN_HPP_NAMESPACE::Win32SurfaceCreateInfoKHR const & createInfo,
VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator ) const
VULKAN_HPP_RAII_CREATE_NOEXCEPT
{
VULKAN_HPP_NAMESPACE::SurfaceKHR surface;
VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( getDispatcher()->vkCreateWin32SurfaceKHR(
static_cast<VkInstance>( m_instance ),
reinterpret_cast<const VkWin32SurfaceCreateInfoKHR *>( &createInfo ),
reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
reinterpret_cast<VkSurfaceKHR *>( &surface ) ) );
if ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess )
{
# if defined( VULKAN_HPP_RAII_NO_EXCEPTIONS )
return VULKAN_HPP_UNEXPECTED( result );
# else
VULKAN_HPP_NAMESPACE::detail::throwResultException( result, "Instance::createWin32SurfaceKHR" );
# endif
}
return VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::SurfaceKHR( *this, *reinterpret_cast<VkSurfaceKHR *>( &surface ), allocator );
}
VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::Bool32
PhysicalDevice::getWin32PresentationSupportKHR( uint32_t queueFamilyIndex ) const VULKAN_HPP_NOEXCEPT
2021-04-28 11:35:14 +00:00
{
VULKAN_HPP_ASSERT( getDispatcher()->vkGetPhysicalDeviceWin32PresentationSupportKHR &&
"Function <vkGetPhysicalDeviceWin32PresentationSupportKHR> requires <VK_KHR_win32_surface>" );
2021-04-28 11:35:14 +00:00
VkBool32 result = getDispatcher()->vkGetPhysicalDeviceWin32PresentationSupportKHR( static_cast<VkPhysicalDevice>( m_physicalDevice ), queueFamilyIndex );
return static_cast<VULKAN_HPP_NAMESPACE::Bool32>( result );
2021-04-28 11:35:14 +00:00
}
# endif /*VK_USE_PLATFORM_WIN32_KHR*/
2021-04-28 11:35:14 +00:00
//=== VK_EXT_debug_report ===
VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE
VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::CreateReturnType<VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::DebugReportCallbackEXT>::Type
Instance::createDebugReportCallbackEXT( VULKAN_HPP_NAMESPACE::DebugReportCallbackCreateInfoEXT const & createInfo,
VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator ) const
VULKAN_HPP_RAII_CREATE_NOEXCEPT
{
VULKAN_HPP_NAMESPACE::DebugReportCallbackEXT callback;
VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( getDispatcher()->vkCreateDebugReportCallbackEXT(
static_cast<VkInstance>( m_instance ),
reinterpret_cast<const VkDebugReportCallbackCreateInfoEXT *>( &createInfo ),
reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
reinterpret_cast<VkDebugReportCallbackEXT *>( &callback ) ) );
if ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess )
{
# if defined( VULKAN_HPP_RAII_NO_EXCEPTIONS )
return VULKAN_HPP_UNEXPECTED( result );
# else
VULKAN_HPP_NAMESPACE::detail::throwResultException( result, "Instance::createDebugReportCallbackEXT" );
# endif
}
return VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::DebugReportCallbackEXT(
*this, *reinterpret_cast<VkDebugReportCallbackEXT *>( &callback ), allocator );
}
VULKAN_HPP_INLINE void Instance::debugReportMessageEXT( VULKAN_HPP_NAMESPACE::DebugReportFlagsEXT flags,
VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT objectType_,
uint64_t object,
size_t location,
int32_t messageCode,
const std::string & layerPrefix,
const std::string & message ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( getDispatcher()->vkDebugReportMessageEXT && "Function <vkDebugReportMessageEXT> requires <VK_EXT_debug_report>" );
getDispatcher()->vkDebugReportMessageEXT( static_cast<VkInstance>( m_instance ),
static_cast<VkDebugReportFlagsEXT>( flags ),
static_cast<VkDebugReportObjectTypeEXT>( objectType_ ),
object,
location,
messageCode,
layerPrefix.c_str(),
message.c_str() );
}
//=== VK_EXT_debug_marker ===
VULKAN_HPP_INLINE void Device::debugMarkerSetObjectTagEXT( const VULKAN_HPP_NAMESPACE::DebugMarkerObjectTagInfoEXT & tagInfo ) const
{
VULKAN_HPP_ASSERT( getDispatcher()->vkDebugMarkerSetObjectTagEXT && "Function <vkDebugMarkerSetObjectTagEXT> requires <VK_EXT_debug_marker>" );
VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( getDispatcher()->vkDebugMarkerSetObjectTagEXT(
static_cast<VkDevice>( m_device ), reinterpret_cast<const VkDebugMarkerObjectTagInfoEXT *>( &tagInfo ) ) );
VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::debugMarkerSetObjectTagEXT" );
}
VULKAN_HPP_INLINE void Device::debugMarkerSetObjectNameEXT( const VULKAN_HPP_NAMESPACE::DebugMarkerObjectNameInfoEXT & nameInfo ) const
{
VULKAN_HPP_ASSERT( getDispatcher()->vkDebugMarkerSetObjectNameEXT && "Function <vkDebugMarkerSetObjectNameEXT> requires <VK_EXT_debug_marker>" );
VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( getDispatcher()->vkDebugMarkerSetObjectNameEXT(
static_cast<VkDevice>( m_device ), reinterpret_cast<const VkDebugMarkerObjectNameInfoEXT *>( &nameInfo ) ) );
VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::debugMarkerSetObjectNameEXT" );
}
VULKAN_HPP_INLINE void CommandBuffer::debugMarkerBeginEXT( const VULKAN_HPP_NAMESPACE::DebugMarkerMarkerInfoEXT & markerInfo ) const VULKAN_HPP_NOEXCEPT
2021-04-28 11:35:14 +00:00
{
VULKAN_HPP_ASSERT( getDispatcher()->vkCmdDebugMarkerBeginEXT && "Function <vkCmdDebugMarkerBeginEXT> requires <VK_EXT_debug_marker>" );
2021-04-28 11:35:14 +00:00
getDispatcher()->vkCmdDebugMarkerBeginEXT( static_cast<VkCommandBuffer>( m_commandBuffer ),
reinterpret_cast<const VkDebugMarkerMarkerInfoEXT *>( &markerInfo ) );
2021-04-28 11:35:14 +00:00
}
VULKAN_HPP_INLINE void CommandBuffer::debugMarkerEndEXT() const VULKAN_HPP_NOEXCEPT
2021-04-28 11:35:14 +00:00
{
VULKAN_HPP_ASSERT( getDispatcher()->vkCmdDebugMarkerEndEXT && "Function <vkCmdDebugMarkerEndEXT> requires <VK_EXT_debug_marker>" );
2021-04-28 11:35:14 +00:00
getDispatcher()->vkCmdDebugMarkerEndEXT( static_cast<VkCommandBuffer>( m_commandBuffer ) );
2021-04-28 11:35:14 +00:00
}
VULKAN_HPP_INLINE void CommandBuffer::debugMarkerInsertEXT( const VULKAN_HPP_NAMESPACE::DebugMarkerMarkerInfoEXT & markerInfo ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( getDispatcher()->vkCmdDebugMarkerInsertEXT && "Function <vkCmdDebugMarkerInsertEXT> requires <VK_EXT_debug_marker>" );
getDispatcher()->vkCmdDebugMarkerInsertEXT( static_cast<VkCommandBuffer>( m_commandBuffer ),
reinterpret_cast<const VkDebugMarkerMarkerInfoEXT *>( &markerInfo ) );
}
//=== VK_KHR_video_queue ===
VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::VideoCapabilitiesKHR
2022-08-23 00:11:46 +00:00
PhysicalDevice::getVideoCapabilitiesKHR( const VULKAN_HPP_NAMESPACE::VideoProfileInfoKHR & videoProfile ) const
2021-04-28 11:35:14 +00:00
{
VULKAN_HPP_ASSERT( getDispatcher()->vkGetPhysicalDeviceVideoCapabilitiesKHR &&
"Function <vkGetPhysicalDeviceVideoCapabilitiesKHR> requires <VK_KHR_video_queue>" );
VULKAN_HPP_NAMESPACE::VideoCapabilitiesKHR capabilities;
VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>(
getDispatcher()->vkGetPhysicalDeviceVideoCapabilitiesKHR( static_cast<VkPhysicalDevice>( m_physicalDevice ),
reinterpret_cast<const VkVideoProfileInfoKHR *>( &videoProfile ),
reinterpret_cast<VkVideoCapabilitiesKHR *>( &capabilities ) ) );
VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getVideoCapabilitiesKHR" );
return capabilities;
2021-04-28 11:35:14 +00:00
}
template <typename X, typename Y, typename... Z>
VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::StructureChain<X, Y, Z...>
2022-08-23 00:11:46 +00:00
PhysicalDevice::getVideoCapabilitiesKHR( const VULKAN_HPP_NAMESPACE::VideoProfileInfoKHR & videoProfile ) const
{
VULKAN_HPP_ASSERT( getDispatcher()->vkGetPhysicalDeviceVideoCapabilitiesKHR &&
"Function <vkGetPhysicalDeviceVideoCapabilitiesKHR> requires <VK_KHR_video_queue>" );
StructureChain<X, Y, Z...> structureChain;
VULKAN_HPP_NAMESPACE::VideoCapabilitiesKHR & capabilities = structureChain.template get<VULKAN_HPP_NAMESPACE::VideoCapabilitiesKHR>();
VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>(
getDispatcher()->vkGetPhysicalDeviceVideoCapabilitiesKHR( static_cast<VkPhysicalDevice>( m_physicalDevice ),
reinterpret_cast<const VkVideoProfileInfoKHR *>( &videoProfile ),
reinterpret_cast<VkVideoCapabilitiesKHR *>( &capabilities ) ) );
VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getVideoCapabilitiesKHR" );
return structureChain;
}
VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector<VULKAN_HPP_NAMESPACE::VideoFormatPropertiesKHR>
PhysicalDevice::getVideoFormatPropertiesKHR( const VULKAN_HPP_NAMESPACE::PhysicalDeviceVideoFormatInfoKHR & videoFormatInfo ) const
{
VULKAN_HPP_ASSERT( getDispatcher()->vkGetPhysicalDeviceVideoFormatPropertiesKHR &&
"Function <vkGetPhysicalDeviceVideoFormatPropertiesKHR> requires <VK_KHR_video_queue>" );
std::vector<VULKAN_HPP_NAMESPACE::VideoFormatPropertiesKHR> videoFormatProperties;
uint32_t videoFormatPropertyCount;
VULKAN_HPP_NAMESPACE::Result result;
do
{
result = static_cast<VULKAN_HPP_NAMESPACE::Result>(
getDispatcher()->vkGetPhysicalDeviceVideoFormatPropertiesKHR( static_cast<VkPhysicalDevice>( m_physicalDevice ),
reinterpret_cast<const VkPhysicalDeviceVideoFormatInfoKHR *>( &videoFormatInfo ),
&videoFormatPropertyCount,
nullptr ) );
if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) && videoFormatPropertyCount )
{
videoFormatProperties.resize( videoFormatPropertyCount );
result = static_cast<VULKAN_HPP_NAMESPACE::Result>(
getDispatcher()->vkGetPhysicalDeviceVideoFormatPropertiesKHR( static_cast<VkPhysicalDevice>( m_physicalDevice ),
reinterpret_cast<const VkPhysicalDeviceVideoFormatInfoKHR *>( &videoFormatInfo ),
&videoFormatPropertyCount,
reinterpret_cast<VkVideoFormatPropertiesKHR *>( videoFormatProperties.data() ) ) );
}
} while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete );
VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getVideoFormatPropertiesKHR" );
VULKAN_HPP_ASSERT( videoFormatPropertyCount <= videoFormatProperties.size() );
if ( videoFormatPropertyCount < videoFormatProperties.size() )
{
videoFormatProperties.resize( videoFormatPropertyCount );
}
return videoFormatProperties;
}
VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE
VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::CreateReturnType<VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::VideoSessionKHR>::Type
Device::createVideoSessionKHR( VULKAN_HPP_NAMESPACE::VideoSessionCreateInfoKHR const & createInfo,
VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator ) const
VULKAN_HPP_RAII_CREATE_NOEXCEPT
{
VULKAN_HPP_NAMESPACE::VideoSessionKHR videoSession;
VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( getDispatcher()->vkCreateVideoSessionKHR(
static_cast<VkDevice>( m_device ),
reinterpret_cast<const VkVideoSessionCreateInfoKHR *>( &createInfo ),
reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
reinterpret_cast<VkVideoSessionKHR *>( &videoSession ) ) );
if ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess )
{
# if defined( VULKAN_HPP_RAII_NO_EXCEPTIONS )
return VULKAN_HPP_UNEXPECTED( result );
# else
VULKAN_HPP_NAMESPACE::detail::throwResultException( result, "Device::createVideoSessionKHR" );
# endif
}
return VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::VideoSessionKHR( *this, *reinterpret_cast<VkVideoSessionKHR *>( &videoSession ), allocator );
}
2022-08-23 00:11:46 +00:00
VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector<VULKAN_HPP_NAMESPACE::VideoSessionMemoryRequirementsKHR> VideoSessionKHR::getMemoryRequirements() const
{
VULKAN_HPP_ASSERT( getDispatcher()->vkGetVideoSessionMemoryRequirementsKHR &&
"Function <vkGetVideoSessionMemoryRequirementsKHR> requires <VK_KHR_video_queue>" );
2022-08-23 00:11:46 +00:00
std::vector<VULKAN_HPP_NAMESPACE::VideoSessionMemoryRequirementsKHR> memoryRequirements;
uint32_t memoryRequirementsCount;
VULKAN_HPP_NAMESPACE::Result result;
do
{
result = static_cast<VULKAN_HPP_NAMESPACE::Result>( getDispatcher()->vkGetVideoSessionMemoryRequirementsKHR(
static_cast<VkDevice>( m_device ), static_cast<VkVideoSessionKHR>( m_videoSession ), &memoryRequirementsCount, nullptr ) );
if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) && memoryRequirementsCount )
{
2022-08-23 00:11:46 +00:00
memoryRequirements.resize( memoryRequirementsCount );
result = static_cast<VULKAN_HPP_NAMESPACE::Result>(
2022-08-23 00:11:46 +00:00
getDispatcher()->vkGetVideoSessionMemoryRequirementsKHR( static_cast<VkDevice>( m_device ),
static_cast<VkVideoSessionKHR>( m_videoSession ),
&memoryRequirementsCount,
reinterpret_cast<VkVideoSessionMemoryRequirementsKHR *>( memoryRequirements.data() ) ) );
}
} while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete );
2022-08-23 00:11:46 +00:00
VULKAN_HPP_ASSERT( memoryRequirementsCount <= memoryRequirements.size() );
if ( memoryRequirementsCount < memoryRequirements.size() )
{
2022-08-23 00:11:46 +00:00
memoryRequirements.resize( memoryRequirementsCount );
}
2022-08-23 00:11:46 +00:00
return memoryRequirements;
}
VULKAN_HPP_INLINE void VideoSessionKHR::bindMemory(
VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::BindVideoSessionMemoryInfoKHR> const & bindSessionMemoryInfos ) const
{
VULKAN_HPP_ASSERT( getDispatcher()->vkBindVideoSessionMemoryKHR && "Function <vkBindVideoSessionMemoryKHR> requires <VK_KHR_video_queue>" );
VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>(
2022-08-23 00:11:46 +00:00
getDispatcher()->vkBindVideoSessionMemoryKHR( static_cast<VkDevice>( m_device ),
static_cast<VkVideoSessionKHR>( m_videoSession ),
bindSessionMemoryInfos.size(),
reinterpret_cast<const VkBindVideoSessionMemoryInfoKHR *>( bindSessionMemoryInfos.data() ) ) );
VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::VideoSessionKHR::bindMemory" );
}
VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::CreateReturnType<
VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::VideoSessionParametersKHR>::Type
Device::createVideoSessionParametersKHR( VULKAN_HPP_NAMESPACE::VideoSessionParametersCreateInfoKHR const & createInfo,
VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator ) const
VULKAN_HPP_RAII_CREATE_NOEXCEPT
{
VULKAN_HPP_NAMESPACE::VideoSessionParametersKHR videoSessionParameters;
VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( getDispatcher()->vkCreateVideoSessionParametersKHR(
static_cast<VkDevice>( m_device ),
reinterpret_cast<const VkVideoSessionParametersCreateInfoKHR *>( &createInfo ),
reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
reinterpret_cast<VkVideoSessionParametersKHR *>( &videoSessionParameters ) ) );
if ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess )
{
# if defined( VULKAN_HPP_RAII_NO_EXCEPTIONS )
return VULKAN_HPP_UNEXPECTED( result );
# else
VULKAN_HPP_NAMESPACE::detail::throwResultException( result, "Device::createVideoSessionParametersKHR" );
# endif
}
return VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::VideoSessionParametersKHR(
*this, *reinterpret_cast<VkVideoSessionParametersKHR *>( &videoSessionParameters ), allocator );
}
VULKAN_HPP_INLINE void VideoSessionParametersKHR::update( const VULKAN_HPP_NAMESPACE::VideoSessionParametersUpdateInfoKHR & updateInfo ) const
{
VULKAN_HPP_ASSERT( getDispatcher()->vkUpdateVideoSessionParametersKHR && "Function <vkUpdateVideoSessionParametersKHR> requires <VK_KHR_video_queue>" );
VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>(
getDispatcher()->vkUpdateVideoSessionParametersKHR( static_cast<VkDevice>( m_device ),
static_cast<VkVideoSessionParametersKHR>( m_videoSessionParameters ),
reinterpret_cast<const VkVideoSessionParametersUpdateInfoKHR *>( &updateInfo ) ) );
VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::VideoSessionParametersKHR::update" );
}
VULKAN_HPP_INLINE void CommandBuffer::beginVideoCodingKHR( const VULKAN_HPP_NAMESPACE::VideoBeginCodingInfoKHR & beginInfo ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( getDispatcher()->vkCmdBeginVideoCodingKHR && "Function <vkCmdBeginVideoCodingKHR> requires <VK_KHR_video_queue>" );
getDispatcher()->vkCmdBeginVideoCodingKHR( static_cast<VkCommandBuffer>( m_commandBuffer ),
reinterpret_cast<const VkVideoBeginCodingInfoKHR *>( &beginInfo ) );
}
VULKAN_HPP_INLINE void CommandBuffer::endVideoCodingKHR( const VULKAN_HPP_NAMESPACE::VideoEndCodingInfoKHR & endCodingInfo ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( getDispatcher()->vkCmdEndVideoCodingKHR && "Function <vkCmdEndVideoCodingKHR> requires <VK_KHR_video_queue>" );
getDispatcher()->vkCmdEndVideoCodingKHR( static_cast<VkCommandBuffer>( m_commandBuffer ),
reinterpret_cast<const VkVideoEndCodingInfoKHR *>( &endCodingInfo ) );
}
VULKAN_HPP_INLINE void
CommandBuffer::controlVideoCodingKHR( const VULKAN_HPP_NAMESPACE::VideoCodingControlInfoKHR & codingControlInfo ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( getDispatcher()->vkCmdControlVideoCodingKHR && "Function <vkCmdControlVideoCodingKHR> requires <VK_KHR_video_queue>" );
getDispatcher()->vkCmdControlVideoCodingKHR( static_cast<VkCommandBuffer>( m_commandBuffer ),
reinterpret_cast<const VkVideoCodingControlInfoKHR *>( &codingControlInfo ) );
}
//=== VK_KHR_video_decode_queue ===
2022-09-23 00:12:32 +00:00
VULKAN_HPP_INLINE void CommandBuffer::decodeVideoKHR( const VULKAN_HPP_NAMESPACE::VideoDecodeInfoKHR & decodeInfo ) const VULKAN_HPP_NOEXCEPT
2021-04-28 11:35:14 +00:00
{
VULKAN_HPP_ASSERT( getDispatcher()->vkCmdDecodeVideoKHR && "Function <vkCmdDecodeVideoKHR> requires <VK_KHR_video_decode_queue>" );
2021-04-28 11:35:14 +00:00
2022-09-23 00:12:32 +00:00
getDispatcher()->vkCmdDecodeVideoKHR( static_cast<VkCommandBuffer>( m_commandBuffer ), reinterpret_cast<const VkVideoDecodeInfoKHR *>( &decodeInfo ) );
2021-04-28 11:35:14 +00:00
}
//=== VK_EXT_transform_feedback ===
VULKAN_HPP_INLINE void
CommandBuffer::bindTransformFeedbackBuffersEXT( uint32_t firstBinding,
VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::Buffer> const & buffers,
VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::DeviceSize> const & offsets,
VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::DeviceSize> const & sizes ) const
2021-04-28 11:35:14 +00:00
{
VULKAN_HPP_ASSERT( getDispatcher()->vkCmdBindTransformFeedbackBuffersEXT &&
"Function <vkCmdBindTransformFeedbackBuffersEXT> requires <VK_EXT_transform_feedback>" );
# ifdef VULKAN_HPP_NO_EXCEPTIONS
VULKAN_HPP_ASSERT( buffers.size() == offsets.size() );
VULKAN_HPP_ASSERT( sizes.empty() || buffers.size() == sizes.size() );
# else
if ( buffers.size() != offsets.size() )
{
throw LogicError( VULKAN_HPP_NAMESPACE_STRING "::CommandBuffer::bindTransformFeedbackBuffersEXT: buffers.size() != offsets.size()" );
}
if ( !sizes.empty() && buffers.size() != sizes.size() )
{
throw LogicError( VULKAN_HPP_NAMESPACE_STRING "::CommandBuffer::bindTransformFeedbackBuffersEXT: buffers.size() != sizes.size()" );
}
# endif /*VULKAN_HPP_NO_EXCEPTIONS*/
getDispatcher()->vkCmdBindTransformFeedbackBuffersEXT( static_cast<VkCommandBuffer>( m_commandBuffer ),
firstBinding,
buffers.size(),
reinterpret_cast<const VkBuffer *>( buffers.data() ),
reinterpret_cast<const VkDeviceSize *>( offsets.data() ),
reinterpret_cast<const VkDeviceSize *>( sizes.data() ) );
}
VULKAN_HPP_INLINE void
CommandBuffer::beginTransformFeedbackEXT( uint32_t firstCounterBuffer,
VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::Buffer> const & counterBuffers,
VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::DeviceSize> const & counterBufferOffsets ) const
2021-04-28 11:35:14 +00:00
{
VULKAN_HPP_ASSERT( getDispatcher()->vkCmdBeginTransformFeedbackEXT && "Function <vkCmdBeginTransformFeedbackEXT> requires <VK_EXT_transform_feedback>" );
# ifdef VULKAN_HPP_NO_EXCEPTIONS
VULKAN_HPP_ASSERT( counterBufferOffsets.empty() || counterBuffers.size() == counterBufferOffsets.size() );
# else
if ( !counterBufferOffsets.empty() && counterBuffers.size() != counterBufferOffsets.size() )
{
throw LogicError( VULKAN_HPP_NAMESPACE_STRING "::CommandBuffer::beginTransformFeedbackEXT: counterBuffers.size() != counterBufferOffsets.size()" );
}
# endif /*VULKAN_HPP_NO_EXCEPTIONS*/
getDispatcher()->vkCmdBeginTransformFeedbackEXT( static_cast<VkCommandBuffer>( m_commandBuffer ),
firstCounterBuffer,
counterBuffers.size(),
reinterpret_cast<const VkBuffer *>( counterBuffers.data() ),
reinterpret_cast<const VkDeviceSize *>( counterBufferOffsets.data() ) );
}
VULKAN_HPP_INLINE void
CommandBuffer::endTransformFeedbackEXT( uint32_t firstCounterBuffer,
VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::Buffer> const & counterBuffers,
VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::DeviceSize> const & counterBufferOffsets ) const
{
VULKAN_HPP_ASSERT( getDispatcher()->vkCmdEndTransformFeedbackEXT && "Function <vkCmdEndTransformFeedbackEXT> requires <VK_EXT_transform_feedback>" );
# ifdef VULKAN_HPP_NO_EXCEPTIONS
VULKAN_HPP_ASSERT( counterBufferOffsets.empty() || counterBuffers.size() == counterBufferOffsets.size() );
# else
if ( !counterBufferOffsets.empty() && counterBuffers.size() != counterBufferOffsets.size() )
{
throw LogicError( VULKAN_HPP_NAMESPACE_STRING "::CommandBuffer::endTransformFeedbackEXT: counterBuffers.size() != counterBufferOffsets.size()" );
}
# endif /*VULKAN_HPP_NO_EXCEPTIONS*/
getDispatcher()->vkCmdEndTransformFeedbackEXT( static_cast<VkCommandBuffer>( m_commandBuffer ),
firstCounterBuffer,
counterBuffers.size(),
reinterpret_cast<const VkBuffer *>( counterBuffers.data() ),
reinterpret_cast<const VkDeviceSize *>( counterBufferOffsets.data() ) );
}
VULKAN_HPP_INLINE void CommandBuffer::beginQueryIndexedEXT( VULKAN_HPP_NAMESPACE::QueryPool queryPool,
uint32_t query,
VULKAN_HPP_NAMESPACE::QueryControlFlags flags,
uint32_t index ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( getDispatcher()->vkCmdBeginQueryIndexedEXT && "Function <vkCmdBeginQueryIndexedEXT> requires <VK_EXT_transform_feedback>" );
getDispatcher()->vkCmdBeginQueryIndexedEXT(
static_cast<VkCommandBuffer>( m_commandBuffer ), static_cast<VkQueryPool>( queryPool ), query, static_cast<VkQueryControlFlags>( flags ), index );
}
VULKAN_HPP_INLINE void
CommandBuffer::endQueryIndexedEXT( VULKAN_HPP_NAMESPACE::QueryPool queryPool, uint32_t query, uint32_t index ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( getDispatcher()->vkCmdEndQueryIndexedEXT && "Function <vkCmdEndQueryIndexedEXT> requires <VK_EXT_transform_feedback>" );
getDispatcher()->vkCmdEndQueryIndexedEXT( static_cast<VkCommandBuffer>( m_commandBuffer ), static_cast<VkQueryPool>( queryPool ), query, index );
}
VULKAN_HPP_INLINE void CommandBuffer::drawIndirectByteCountEXT( uint32_t instanceCount,
uint32_t firstInstance,
VULKAN_HPP_NAMESPACE::Buffer counterBuffer,
VULKAN_HPP_NAMESPACE::DeviceSize counterBufferOffset,
uint32_t counterOffset,
uint32_t vertexStride ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( getDispatcher()->vkCmdDrawIndirectByteCountEXT && "Function <vkCmdDrawIndirectByteCountEXT> requires <VK_EXT_transform_feedback>" );
getDispatcher()->vkCmdDrawIndirectByteCountEXT( static_cast<VkCommandBuffer>( m_commandBuffer ),
instanceCount,
firstInstance,
static_cast<VkBuffer>( counterBuffer ),
static_cast<VkDeviceSize>( counterBufferOffset ),
counterOffset,
vertexStride );
}
//=== VK_NVX_binary_import ===
VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE
VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::CreateReturnType<VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::CuModuleNVX>::Type
Device::createCuModuleNVX( VULKAN_HPP_NAMESPACE::CuModuleCreateInfoNVX const & createInfo,
VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator ) const
VULKAN_HPP_RAII_CREATE_NOEXCEPT
{
VULKAN_HPP_NAMESPACE::CuModuleNVX module;
VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( getDispatcher()->vkCreateCuModuleNVX(
static_cast<VkDevice>( m_device ),
reinterpret_cast<const VkCuModuleCreateInfoNVX *>( &createInfo ),
reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
reinterpret_cast<VkCuModuleNVX *>( &module ) ) );
if ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess )
{
# if defined( VULKAN_HPP_RAII_NO_EXCEPTIONS )
return VULKAN_HPP_UNEXPECTED( result );
# else
VULKAN_HPP_NAMESPACE::detail::throwResultException( result, "Device::createCuModuleNVX" );
# endif
}
return VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::CuModuleNVX( *this, *reinterpret_cast<VkCuModuleNVX *>( &module ), allocator );
}
VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE
VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::CreateReturnType<VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::CuFunctionNVX>::Type
Device::createCuFunctionNVX( VULKAN_HPP_NAMESPACE::CuFunctionCreateInfoNVX const & createInfo,
VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator ) const
VULKAN_HPP_RAII_CREATE_NOEXCEPT
{
VULKAN_HPP_NAMESPACE::CuFunctionNVX function;
VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( getDispatcher()->vkCreateCuFunctionNVX(
static_cast<VkDevice>( m_device ),
reinterpret_cast<const VkCuFunctionCreateInfoNVX *>( &createInfo ),
reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
reinterpret_cast<VkCuFunctionNVX *>( &function ) ) );
if ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess )
{
# if defined( VULKAN_HPP_RAII_NO_EXCEPTIONS )
return VULKAN_HPP_UNEXPECTED( result );
# else
VULKAN_HPP_NAMESPACE::detail::throwResultException( result, "Device::createCuFunctionNVX" );
# endif
}
return VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::CuFunctionNVX( *this, *reinterpret_cast<VkCuFunctionNVX *>( &function ), allocator );
}
VULKAN_HPP_INLINE void CommandBuffer::cuLaunchKernelNVX( const VULKAN_HPP_NAMESPACE::CuLaunchInfoNVX & launchInfo ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( getDispatcher()->vkCmdCuLaunchKernelNVX && "Function <vkCmdCuLaunchKernelNVX> requires <VK_NVX_binary_import>" );
getDispatcher()->vkCmdCuLaunchKernelNVX( static_cast<VkCommandBuffer>( m_commandBuffer ), reinterpret_cast<const VkCuLaunchInfoNVX *>( &launchInfo ) );
}
//=== VK_NVX_image_view_handle ===
VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE uint32_t
Device::getImageViewHandleNVX( const VULKAN_HPP_NAMESPACE::ImageViewHandleInfoNVX & info ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( getDispatcher()->vkGetImageViewHandleNVX && "Function <vkGetImageViewHandleNVX> requires <VK_NVX_image_view_handle>" );
uint32_t result =
getDispatcher()->vkGetImageViewHandleNVX( static_cast<VkDevice>( m_device ), reinterpret_cast<const VkImageViewHandleInfoNVX *>( &info ) );
return result;
}
VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::ImageViewAddressPropertiesNVX ImageView::getAddressNVX() const
2021-03-22 08:53:35 +00:00
{
VULKAN_HPP_ASSERT( getDispatcher()->vkGetImageViewAddressNVX && "Function <vkGetImageViewAddressNVX> requires <VK_NVX_image_view_handle>" );
VULKAN_HPP_NAMESPACE::ImageViewAddressPropertiesNVX properties;
VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( getDispatcher()->vkGetImageViewAddressNVX(
static_cast<VkDevice>( m_device ), static_cast<VkImageView>( m_imageView ), reinterpret_cast<VkImageViewAddressPropertiesNVX *>( &properties ) ) );
VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::ImageView::getAddressNVX" );
return properties;
2021-03-22 08:53:35 +00:00
}
//=== VK_AMD_draw_indirect_count ===
VULKAN_HPP_INLINE void CommandBuffer::drawIndirectCountAMD( VULKAN_HPP_NAMESPACE::Buffer buffer,
VULKAN_HPP_NAMESPACE::DeviceSize offset,
VULKAN_HPP_NAMESPACE::Buffer countBuffer,
VULKAN_HPP_NAMESPACE::DeviceSize countBufferOffset,
uint32_t maxDrawCount,
uint32_t stride ) const VULKAN_HPP_NOEXCEPT
2021-03-22 08:53:35 +00:00
{
2023-03-01 09:17:02 +00:00
VULKAN_HPP_ASSERT( getDispatcher()->vkCmdDrawIndirectCountAMD &&
"Function <vkCmdDrawIndirectCountAMD> requires <VK_AMD_draw_indirect_count> or <VK_KHR_draw_indirect_count> or <VK_VERSION_1_2>" );
2021-03-22 08:53:35 +00:00
getDispatcher()->vkCmdDrawIndirectCountAMD( static_cast<VkCommandBuffer>( m_commandBuffer ),
static_cast<VkBuffer>( buffer ),
static_cast<VkDeviceSize>( offset ),
static_cast<VkBuffer>( countBuffer ),
static_cast<VkDeviceSize>( countBufferOffset ),
maxDrawCount,
stride );
2021-03-22 08:53:35 +00:00
}
VULKAN_HPP_INLINE void CommandBuffer::drawIndexedIndirectCountAMD( VULKAN_HPP_NAMESPACE::Buffer buffer,
VULKAN_HPP_NAMESPACE::DeviceSize offset,
VULKAN_HPP_NAMESPACE::Buffer countBuffer,
VULKAN_HPP_NAMESPACE::DeviceSize countBufferOffset,
uint32_t maxDrawCount,
uint32_t stride ) const VULKAN_HPP_NOEXCEPT
{
2023-03-01 09:17:02 +00:00
VULKAN_HPP_ASSERT(
getDispatcher()->vkCmdDrawIndexedIndirectCountAMD &&
"Function <vkCmdDrawIndexedIndirectCountAMD> requires <VK_AMD_draw_indirect_count> or <VK_KHR_draw_indirect_count> or <VK_VERSION_1_2>" );
getDispatcher()->vkCmdDrawIndexedIndirectCountAMD( static_cast<VkCommandBuffer>( m_commandBuffer ),
static_cast<VkBuffer>( buffer ),
static_cast<VkDeviceSize>( offset ),
static_cast<VkBuffer>( countBuffer ),
static_cast<VkDeviceSize>( countBufferOffset ),
maxDrawCount,
stride );
}
//=== VK_AMD_shader_info ===
VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector<uint8_t> Pipeline::getShaderInfoAMD( VULKAN_HPP_NAMESPACE::ShaderStageFlagBits shaderStage,
VULKAN_HPP_NAMESPACE::ShaderInfoTypeAMD infoType ) const
{
VULKAN_HPP_ASSERT( getDispatcher()->vkGetShaderInfoAMD && "Function <vkGetShaderInfoAMD> requires <VK_AMD_shader_info>" );
std::vector<uint8_t> info;
size_t infoSize;
VULKAN_HPP_NAMESPACE::Result result;
do
{
result = static_cast<VULKAN_HPP_NAMESPACE::Result>( getDispatcher()->vkGetShaderInfoAMD( static_cast<VkDevice>( m_device ),
static_cast<VkPipeline>( m_pipeline ),
static_cast<VkShaderStageFlagBits>( shaderStage ),
static_cast<VkShaderInfoTypeAMD>( infoType ),
&infoSize,
nullptr ) );
if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) && infoSize )
{
info.resize( infoSize );
result = static_cast<VULKAN_HPP_NAMESPACE::Result>( getDispatcher()->vkGetShaderInfoAMD( static_cast<VkDevice>( m_device ),
static_cast<VkPipeline>( m_pipeline ),
static_cast<VkShaderStageFlagBits>( shaderStage ),
static_cast<VkShaderInfoTypeAMD>( infoType ),
&infoSize,
reinterpret_cast<void *>( info.data() ) ) );
}
} while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete );
VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Pipeline::getShaderInfoAMD" );
VULKAN_HPP_ASSERT( infoSize <= info.size() );
if ( infoSize < info.size() )
{
info.resize( infoSize );
}
return info;
}
2021-11-03 00:34:42 +00:00
//=== VK_KHR_dynamic_rendering ===
VULKAN_HPP_INLINE void CommandBuffer::beginRenderingKHR( const VULKAN_HPP_NAMESPACE::RenderingInfo & renderingInfo ) const VULKAN_HPP_NOEXCEPT
2021-11-03 00:34:42 +00:00
{
2023-03-01 09:17:02 +00:00
VULKAN_HPP_ASSERT( getDispatcher()->vkCmdBeginRenderingKHR &&
"Function <vkCmdBeginRenderingKHR> requires <VK_KHR_dynamic_rendering> or <VK_VERSION_1_3>" );
2021-11-03 00:34:42 +00:00
getDispatcher()->vkCmdBeginRenderingKHR( static_cast<VkCommandBuffer>( m_commandBuffer ), reinterpret_cast<const VkRenderingInfo *>( &renderingInfo ) );
2021-11-03 00:34:42 +00:00
}
VULKAN_HPP_INLINE void CommandBuffer::endRenderingKHR() const VULKAN_HPP_NOEXCEPT
{
2023-03-01 09:17:02 +00:00
VULKAN_HPP_ASSERT( getDispatcher()->vkCmdEndRenderingKHR && "Function <vkCmdEndRenderingKHR> requires <VK_KHR_dynamic_rendering> or <VK_VERSION_1_3>" );
2021-11-03 00:34:42 +00:00
getDispatcher()->vkCmdEndRenderingKHR( static_cast<VkCommandBuffer>( m_commandBuffer ) );
}
# if defined( VK_USE_PLATFORM_GGP )
//=== VK_GGP_stream_descriptor_surface ===
VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE
VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::CreateReturnType<VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::SurfaceKHR>::Type
Instance::createStreamDescriptorSurfaceGGP( VULKAN_HPP_NAMESPACE::StreamDescriptorSurfaceCreateInfoGGP const & createInfo,
VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator ) const
VULKAN_HPP_RAII_CREATE_NOEXCEPT
{
VULKAN_HPP_NAMESPACE::SurfaceKHR surface;
VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( getDispatcher()->vkCreateStreamDescriptorSurfaceGGP(
static_cast<VkInstance>( m_instance ),
reinterpret_cast<const VkStreamDescriptorSurfaceCreateInfoGGP *>( &createInfo ),
reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
reinterpret_cast<VkSurfaceKHR *>( &surface ) ) );
if ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess )
{
# if defined( VULKAN_HPP_RAII_NO_EXCEPTIONS )
return VULKAN_HPP_UNEXPECTED( result );
# else
VULKAN_HPP_NAMESPACE::detail::throwResultException( result, "Instance::createStreamDescriptorSurfaceGGP" );
# endif
}
return VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::SurfaceKHR( *this, *reinterpret_cast<VkSurfaceKHR *>( &surface ), allocator );
}
# endif /*VK_USE_PLATFORM_GGP*/
//=== VK_NV_external_memory_capabilities ===
VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::ExternalImageFormatPropertiesNV
PhysicalDevice::getExternalImageFormatPropertiesNV( VULKAN_HPP_NAMESPACE::Format format,
VULKAN_HPP_NAMESPACE::ImageType type,
VULKAN_HPP_NAMESPACE::ImageTiling tiling,
VULKAN_HPP_NAMESPACE::ImageUsageFlags usage,
VULKAN_HPP_NAMESPACE::ImageCreateFlags flags,
VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagsNV externalHandleType ) const
{
VULKAN_HPP_ASSERT( getDispatcher()->vkGetPhysicalDeviceExternalImageFormatPropertiesNV &&
"Function <vkGetPhysicalDeviceExternalImageFormatPropertiesNV> requires <VK_NV_external_memory_capabilities>" );
VULKAN_HPP_NAMESPACE::ExternalImageFormatPropertiesNV externalImageFormatProperties;
VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( getDispatcher()->vkGetPhysicalDeviceExternalImageFormatPropertiesNV(
static_cast<VkPhysicalDevice>( m_physicalDevice ),
static_cast<VkFormat>( format ),
static_cast<VkImageType>( type ),
static_cast<VkImageTiling>( tiling ),
static_cast<VkImageUsageFlags>( usage ),
static_cast<VkImageCreateFlags>( flags ),
static_cast<VkExternalMemoryHandleTypeFlagsNV>( externalHandleType ),
reinterpret_cast<VkExternalImageFormatPropertiesNV *>( &externalImageFormatProperties ) ) );
VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getExternalImageFormatPropertiesNV" );
return externalImageFormatProperties;
}
# if defined( VK_USE_PLATFORM_WIN32_KHR )
//=== VK_NV_external_memory_win32 ===
VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE HANDLE DeviceMemory::getMemoryWin32HandleNV( VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagsNV handleType ) const
{
VULKAN_HPP_ASSERT( getDispatcher()->vkGetMemoryWin32HandleNV && "Function <vkGetMemoryWin32HandleNV> requires <VK_NV_external_memory_win32>" );
HANDLE handle;
VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( getDispatcher()->vkGetMemoryWin32HandleNV(
static_cast<VkDevice>( m_device ), static_cast<VkDeviceMemory>( m_memory ), static_cast<VkExternalMemoryHandleTypeFlagsNV>( handleType ), &handle ) );
VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::DeviceMemory::getMemoryWin32HandleNV" );
return handle;
}
# endif /*VK_USE_PLATFORM_WIN32_KHR*/
//=== VK_KHR_get_physical_device_properties2 ===
VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::PhysicalDeviceFeatures2 PhysicalDevice::getFeatures2KHR() const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( getDispatcher()->vkGetPhysicalDeviceFeatures2KHR &&
2023-03-01 09:17:02 +00:00
"Function <vkGetPhysicalDeviceFeatures2KHR> requires <VK_KHR_get_physical_device_properties2> or <VK_VERSION_1_1>" );
VULKAN_HPP_NAMESPACE::PhysicalDeviceFeatures2 features;
getDispatcher()->vkGetPhysicalDeviceFeatures2KHR( static_cast<VkPhysicalDevice>( m_physicalDevice ),
reinterpret_cast<VkPhysicalDeviceFeatures2 *>( &features ) );
return features;
}
template <typename X, typename Y, typename... Z>
VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::StructureChain<X, Y, Z...> PhysicalDevice::getFeatures2KHR() const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( getDispatcher()->vkGetPhysicalDeviceFeatures2KHR &&
2023-03-01 09:17:02 +00:00
"Function <vkGetPhysicalDeviceFeatures2KHR> requires <VK_KHR_get_physical_device_properties2> or <VK_VERSION_1_1>" );
VULKAN_HPP_NAMESPACE::StructureChain<X, Y, Z...> structureChain;
VULKAN_HPP_NAMESPACE::PhysicalDeviceFeatures2 & features = structureChain.template get<VULKAN_HPP_NAMESPACE::PhysicalDeviceFeatures2>();
getDispatcher()->vkGetPhysicalDeviceFeatures2KHR( static_cast<VkPhysicalDevice>( m_physicalDevice ),
reinterpret_cast<VkPhysicalDeviceFeatures2 *>( &features ) );
return structureChain;
}
VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::PhysicalDeviceProperties2 PhysicalDevice::getProperties2KHR() const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( getDispatcher()->vkGetPhysicalDeviceProperties2KHR &&
2023-03-01 09:17:02 +00:00
"Function <vkGetPhysicalDeviceProperties2KHR> requires <VK_KHR_get_physical_device_properties2> or <VK_VERSION_1_1>" );
VULKAN_HPP_NAMESPACE::PhysicalDeviceProperties2 properties;
getDispatcher()->vkGetPhysicalDeviceProperties2KHR( static_cast<VkPhysicalDevice>( m_physicalDevice ),
reinterpret_cast<VkPhysicalDeviceProperties2 *>( &properties ) );
return properties;
}
template <typename X, typename Y, typename... Z>
VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::StructureChain<X, Y, Z...> PhysicalDevice::getProperties2KHR() const VULKAN_HPP_NOEXCEPT
2021-03-22 08:53:35 +00:00
{
VULKAN_HPP_ASSERT( getDispatcher()->vkGetPhysicalDeviceProperties2KHR &&
2023-03-01 09:17:02 +00:00
"Function <vkGetPhysicalDeviceProperties2KHR> requires <VK_KHR_get_physical_device_properties2> or <VK_VERSION_1_1>" );
VULKAN_HPP_NAMESPACE::StructureChain<X, Y, Z...> structureChain;
VULKAN_HPP_NAMESPACE::PhysicalDeviceProperties2 & properties = structureChain.template get<VULKAN_HPP_NAMESPACE::PhysicalDeviceProperties2>();
getDispatcher()->vkGetPhysicalDeviceProperties2KHR( static_cast<VkPhysicalDevice>( m_physicalDevice ),
reinterpret_cast<VkPhysicalDeviceProperties2 *>( &properties ) );
return structureChain;
2021-03-22 08:53:35 +00:00
}
VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::FormatProperties2
PhysicalDevice::getFormatProperties2KHR( VULKAN_HPP_NAMESPACE::Format format ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( getDispatcher()->vkGetPhysicalDeviceFormatProperties2KHR &&
2023-03-01 09:17:02 +00:00
"Function <vkGetPhysicalDeviceFormatProperties2KHR> requires <VK_KHR_get_physical_device_properties2> or <VK_VERSION_1_1>" );
VULKAN_HPP_NAMESPACE::FormatProperties2 formatProperties;
getDispatcher()->vkGetPhysicalDeviceFormatProperties2KHR(
static_cast<VkPhysicalDevice>( m_physicalDevice ), static_cast<VkFormat>( format ), reinterpret_cast<VkFormatProperties2 *>( &formatProperties ) );
return formatProperties;
}
template <typename X, typename Y, typename... Z>
VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::StructureChain<X, Y, Z...>
PhysicalDevice::getFormatProperties2KHR( VULKAN_HPP_NAMESPACE::Format format ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( getDispatcher()->vkGetPhysicalDeviceFormatProperties2KHR &&
2023-03-01 09:17:02 +00:00
"Function <vkGetPhysicalDeviceFormatProperties2KHR> requires <VK_KHR_get_physical_device_properties2> or <VK_VERSION_1_1>" );
VULKAN_HPP_NAMESPACE::StructureChain<X, Y, Z...> structureChain;
VULKAN_HPP_NAMESPACE::FormatProperties2 & formatProperties = structureChain.template get<VULKAN_HPP_NAMESPACE::FormatProperties2>();
getDispatcher()->vkGetPhysicalDeviceFormatProperties2KHR(
static_cast<VkPhysicalDevice>( m_physicalDevice ), static_cast<VkFormat>( format ), reinterpret_cast<VkFormatProperties2 *>( &formatProperties ) );
return structureChain;
}
VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::ImageFormatProperties2
PhysicalDevice::getImageFormatProperties2KHR( const VULKAN_HPP_NAMESPACE::PhysicalDeviceImageFormatInfo2 & imageFormatInfo ) const
{
VULKAN_HPP_ASSERT( getDispatcher()->vkGetPhysicalDeviceImageFormatProperties2KHR &&
2023-03-01 09:17:02 +00:00
"Function <vkGetPhysicalDeviceImageFormatProperties2KHR> requires <VK_KHR_get_physical_device_properties2> or <VK_VERSION_1_1>" );
VULKAN_HPP_NAMESPACE::ImageFormatProperties2 imageFormatProperties;
VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>(
getDispatcher()->vkGetPhysicalDeviceImageFormatProperties2KHR( static_cast<VkPhysicalDevice>( m_physicalDevice ),
reinterpret_cast<const VkPhysicalDeviceImageFormatInfo2 *>( &imageFormatInfo ),
reinterpret_cast<VkImageFormatProperties2 *>( &imageFormatProperties ) ) );
VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getImageFormatProperties2KHR" );
return imageFormatProperties;
}
template <typename X, typename Y, typename... Z>
VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::StructureChain<X, Y, Z...>
PhysicalDevice::getImageFormatProperties2KHR( const VULKAN_HPP_NAMESPACE::PhysicalDeviceImageFormatInfo2 & imageFormatInfo ) const
{
VULKAN_HPP_ASSERT( getDispatcher()->vkGetPhysicalDeviceImageFormatProperties2KHR &&
2023-03-01 09:17:02 +00:00
"Function <vkGetPhysicalDeviceImageFormatProperties2KHR> requires <VK_KHR_get_physical_device_properties2> or <VK_VERSION_1_1>" );
StructureChain<X, Y, Z...> structureChain;
VULKAN_HPP_NAMESPACE::ImageFormatProperties2 & imageFormatProperties = structureChain.template get<VULKAN_HPP_NAMESPACE::ImageFormatProperties2>();
VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>(
getDispatcher()->vkGetPhysicalDeviceImageFormatProperties2KHR( static_cast<VkPhysicalDevice>( m_physicalDevice ),
reinterpret_cast<const VkPhysicalDeviceImageFormatInfo2 *>( &imageFormatInfo ),
reinterpret_cast<VkImageFormatProperties2 *>( &imageFormatProperties ) ) );
VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getImageFormatProperties2KHR" );
return structureChain;
}
VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector<VULKAN_HPP_NAMESPACE::QueueFamilyProperties2> PhysicalDevice::getQueueFamilyProperties2KHR() const
2021-03-22 08:53:35 +00:00
{
VULKAN_HPP_ASSERT( getDispatcher()->vkGetPhysicalDeviceQueueFamilyProperties2KHR &&
2023-03-01 09:17:02 +00:00
"Function <vkGetPhysicalDeviceQueueFamilyProperties2KHR> requires <VK_KHR_get_physical_device_properties2> or <VK_VERSION_1_1>" );
std::vector<VULKAN_HPP_NAMESPACE::QueueFamilyProperties2> queueFamilyProperties;
uint32_t queueFamilyPropertyCount;
getDispatcher()->vkGetPhysicalDeviceQueueFamilyProperties2KHR( static_cast<VkPhysicalDevice>( m_physicalDevice ), &queueFamilyPropertyCount, nullptr );
queueFamilyProperties.resize( queueFamilyPropertyCount );
getDispatcher()->vkGetPhysicalDeviceQueueFamilyProperties2KHR( static_cast<VkPhysicalDevice>( m_physicalDevice ),
&queueFamilyPropertyCount,
reinterpret_cast<VkQueueFamilyProperties2 *>( queueFamilyProperties.data() ) );
VULKAN_HPP_ASSERT( queueFamilyPropertyCount <= queueFamilyProperties.size() );
if ( queueFamilyPropertyCount < queueFamilyProperties.size() )
{
queueFamilyProperties.resize( queueFamilyPropertyCount );
}
return queueFamilyProperties;
2021-03-22 08:53:35 +00:00
}
template <typename StructureChain>
VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector<StructureChain> PhysicalDevice::getQueueFamilyProperties2KHR() const
{
VULKAN_HPP_ASSERT( getDispatcher()->vkGetPhysicalDeviceQueueFamilyProperties2KHR &&
2023-03-01 09:17:02 +00:00
"Function <vkGetPhysicalDeviceQueueFamilyProperties2KHR> requires <VK_KHR_get_physical_device_properties2> or <VK_VERSION_1_1>" );
std::vector<StructureChain> structureChains;
std::vector<VULKAN_HPP_NAMESPACE::QueueFamilyProperties2> queueFamilyProperties;
uint32_t queueFamilyPropertyCount;
getDispatcher()->vkGetPhysicalDeviceQueueFamilyProperties2KHR( static_cast<VkPhysicalDevice>( m_physicalDevice ), &queueFamilyPropertyCount, nullptr );
structureChains.resize( queueFamilyPropertyCount );
queueFamilyProperties.resize( queueFamilyPropertyCount );
for ( uint32_t i = 0; i < queueFamilyPropertyCount; i++ )
{
queueFamilyProperties[i].pNext = structureChains[i].template get<VULKAN_HPP_NAMESPACE::QueueFamilyProperties2>().pNext;
}
getDispatcher()->vkGetPhysicalDeviceQueueFamilyProperties2KHR( static_cast<VkPhysicalDevice>( m_physicalDevice ),
&queueFamilyPropertyCount,
reinterpret_cast<VkQueueFamilyProperties2 *>( queueFamilyProperties.data() ) );
VULKAN_HPP_ASSERT( queueFamilyPropertyCount <= queueFamilyProperties.size() );
if ( queueFamilyPropertyCount < queueFamilyProperties.size() )
{
structureChains.resize( queueFamilyPropertyCount );
}
for ( uint32_t i = 0; i < queueFamilyPropertyCount; i++ )
{
structureChains[i].template get<VULKAN_HPP_NAMESPACE::QueueFamilyProperties2>() = queueFamilyProperties[i];
}
return structureChains;
}
VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::PhysicalDeviceMemoryProperties2
PhysicalDevice::getMemoryProperties2KHR() const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( getDispatcher()->vkGetPhysicalDeviceMemoryProperties2KHR &&
2023-03-01 09:17:02 +00:00
"Function <vkGetPhysicalDeviceMemoryProperties2KHR> requires <VK_KHR_get_physical_device_properties2> or <VK_VERSION_1_1>" );
VULKAN_HPP_NAMESPACE::PhysicalDeviceMemoryProperties2 memoryProperties;
getDispatcher()->vkGetPhysicalDeviceMemoryProperties2KHR( static_cast<VkPhysicalDevice>( m_physicalDevice ),
reinterpret_cast<VkPhysicalDeviceMemoryProperties2 *>( &memoryProperties ) );
return memoryProperties;
}
template <typename X, typename Y, typename... Z>
VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::StructureChain<X, Y, Z...> PhysicalDevice::getMemoryProperties2KHR() const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( getDispatcher()->vkGetPhysicalDeviceMemoryProperties2KHR &&
2023-03-01 09:17:02 +00:00
"Function <vkGetPhysicalDeviceMemoryProperties2KHR> requires <VK_KHR_get_physical_device_properties2> or <VK_VERSION_1_1>" );
VULKAN_HPP_NAMESPACE::StructureChain<X, Y, Z...> structureChain;
VULKAN_HPP_NAMESPACE::PhysicalDeviceMemoryProperties2 & memoryProperties =
structureChain.template get<VULKAN_HPP_NAMESPACE::PhysicalDeviceMemoryProperties2>();
getDispatcher()->vkGetPhysicalDeviceMemoryProperties2KHR( static_cast<VkPhysicalDevice>( m_physicalDevice ),
reinterpret_cast<VkPhysicalDeviceMemoryProperties2 *>( &memoryProperties ) );
return structureChain;
}
VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector<VULKAN_HPP_NAMESPACE::SparseImageFormatProperties2>
PhysicalDevice::getSparseImageFormatProperties2KHR( const VULKAN_HPP_NAMESPACE::PhysicalDeviceSparseImageFormatInfo2 & formatInfo ) const
{
2023-03-01 09:17:02 +00:00
VULKAN_HPP_ASSERT(
getDispatcher()->vkGetPhysicalDeviceSparseImageFormatProperties2KHR &&
"Function <vkGetPhysicalDeviceSparseImageFormatProperties2KHR> requires <VK_KHR_get_physical_device_properties2> or <VK_VERSION_1_1>" );
std::vector<VULKAN_HPP_NAMESPACE::SparseImageFormatProperties2> properties;
uint32_t propertyCount;
getDispatcher()->vkGetPhysicalDeviceSparseImageFormatProperties2KHR( static_cast<VkPhysicalDevice>( m_physicalDevice ),
reinterpret_cast<const VkPhysicalDeviceSparseImageFormatInfo2 *>( &formatInfo ),
&propertyCount,
nullptr );
properties.resize( propertyCount );
getDispatcher()->vkGetPhysicalDeviceSparseImageFormatProperties2KHR( static_cast<VkPhysicalDevice>( m_physicalDevice ),
reinterpret_cast<const VkPhysicalDeviceSparseImageFormatInfo2 *>( &formatInfo ),
&propertyCount,
reinterpret_cast<VkSparseImageFormatProperties2 *>( properties.data() ) );
VULKAN_HPP_ASSERT( propertyCount <= properties.size() );
if ( propertyCount < properties.size() )
{
properties.resize( propertyCount );
}
return properties;
}
//=== VK_KHR_device_group ===
VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::PeerMemoryFeatureFlags
Device::getGroupPeerMemoryFeaturesKHR( uint32_t heapIndex, uint32_t localDeviceIndex, uint32_t remoteDeviceIndex ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( getDispatcher()->vkGetDeviceGroupPeerMemoryFeaturesKHR &&
2023-03-01 09:17:02 +00:00
"Function <vkGetDeviceGroupPeerMemoryFeaturesKHR> requires <VK_KHR_device_group> or <VK_VERSION_1_1>" );
VULKAN_HPP_NAMESPACE::PeerMemoryFeatureFlags peerMemoryFeatures;
getDispatcher()->vkGetDeviceGroupPeerMemoryFeaturesKHR( static_cast<VkDevice>( m_device ),
heapIndex,
localDeviceIndex,
remoteDeviceIndex,
reinterpret_cast<VkPeerMemoryFeatureFlags *>( &peerMemoryFeatures ) );
return peerMemoryFeatures;
}
VULKAN_HPP_INLINE void CommandBuffer::setDeviceMaskKHR( uint32_t deviceMask ) const VULKAN_HPP_NOEXCEPT
{
2023-03-01 09:17:02 +00:00
VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSetDeviceMaskKHR && "Function <vkCmdSetDeviceMaskKHR> requires <VK_KHR_device_group> or <VK_VERSION_1_1>" );
getDispatcher()->vkCmdSetDeviceMaskKHR( static_cast<VkCommandBuffer>( m_commandBuffer ), deviceMask );
}
VULKAN_HPP_INLINE void CommandBuffer::dispatchBaseKHR( uint32_t baseGroupX,
uint32_t baseGroupY,
uint32_t baseGroupZ,
uint32_t groupCountX,
uint32_t groupCountY,
uint32_t groupCountZ ) const VULKAN_HPP_NOEXCEPT
{
2023-03-01 09:17:02 +00:00
VULKAN_HPP_ASSERT( getDispatcher()->vkCmdDispatchBaseKHR && "Function <vkCmdDispatchBaseKHR> requires <VK_KHR_device_group> or <VK_VERSION_1_1>" );
getDispatcher()->vkCmdDispatchBaseKHR(
static_cast<VkCommandBuffer>( m_commandBuffer ), baseGroupX, baseGroupY, baseGroupZ, groupCountX, groupCountY, groupCountZ );
}
# if defined( VK_USE_PLATFORM_VI_NN )
//=== VK_NN_vi_surface ===
VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE
VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::CreateReturnType<VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::SurfaceKHR>::Type
Instance::createViSurfaceNN( VULKAN_HPP_NAMESPACE::ViSurfaceCreateInfoNN const & createInfo,
VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator ) const
VULKAN_HPP_RAII_CREATE_NOEXCEPT
{
VULKAN_HPP_NAMESPACE::SurfaceKHR surface;
VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( getDispatcher()->vkCreateViSurfaceNN(
static_cast<VkInstance>( m_instance ),
reinterpret_cast<const VkViSurfaceCreateInfoNN *>( &createInfo ),
reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
reinterpret_cast<VkSurfaceKHR *>( &surface ) ) );
if ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess )
{
# if defined( VULKAN_HPP_RAII_NO_EXCEPTIONS )
return VULKAN_HPP_UNEXPECTED( result );
# else
VULKAN_HPP_NAMESPACE::detail::throwResultException( result, "Instance::createViSurfaceNN" );
# endif
}
return VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::SurfaceKHR( *this, *reinterpret_cast<VkSurfaceKHR *>( &surface ), allocator );
}
# endif /*VK_USE_PLATFORM_VI_NN*/
//=== VK_KHR_maintenance1 ===
VULKAN_HPP_INLINE void CommandPool::trimKHR( VULKAN_HPP_NAMESPACE::CommandPoolTrimFlags flags ) const VULKAN_HPP_NOEXCEPT
{
2023-03-01 09:17:02 +00:00
VULKAN_HPP_ASSERT( getDispatcher()->vkTrimCommandPoolKHR && "Function <vkTrimCommandPoolKHR> requires <VK_KHR_maintenance1> or <VK_VERSION_1_1>" );
getDispatcher()->vkTrimCommandPoolKHR(
static_cast<VkDevice>( m_device ), static_cast<VkCommandPool>( m_commandPool ), static_cast<VkCommandPoolTrimFlags>( flags ) );
}
//=== VK_KHR_device_group_creation ===
VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector<VULKAN_HPP_NAMESPACE::PhysicalDeviceGroupProperties> Instance::enumeratePhysicalDeviceGroupsKHR() const
2021-04-28 11:35:14 +00:00
{
VULKAN_HPP_ASSERT( getDispatcher()->vkEnumeratePhysicalDeviceGroupsKHR &&
2023-03-01 09:17:02 +00:00
"Function <vkEnumeratePhysicalDeviceGroupsKHR> requires <VK_KHR_device_group_creation> or <VK_VERSION_1_1>" );
std::vector<VULKAN_HPP_NAMESPACE::PhysicalDeviceGroupProperties> physicalDeviceGroupProperties;
uint32_t physicalDeviceGroupCount;
VULKAN_HPP_NAMESPACE::Result result;
do
{
result = static_cast<VULKAN_HPP_NAMESPACE::Result>(
getDispatcher()->vkEnumeratePhysicalDeviceGroupsKHR( static_cast<VkInstance>( m_instance ), &physicalDeviceGroupCount, nullptr ) );
if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) && physicalDeviceGroupCount )
{
physicalDeviceGroupProperties.resize( physicalDeviceGroupCount );
result = static_cast<VULKAN_HPP_NAMESPACE::Result>( getDispatcher()->vkEnumeratePhysicalDeviceGroupsKHR(
static_cast<VkInstance>( m_instance ),
&physicalDeviceGroupCount,
reinterpret_cast<VkPhysicalDeviceGroupProperties *>( physicalDeviceGroupProperties.data() ) ) );
}
} while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete );
VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Instance::enumeratePhysicalDeviceGroupsKHR" );
VULKAN_HPP_ASSERT( physicalDeviceGroupCount <= physicalDeviceGroupProperties.size() );
if ( physicalDeviceGroupCount < physicalDeviceGroupProperties.size() )
2021-04-28 11:35:14 +00:00
{
physicalDeviceGroupProperties.resize( physicalDeviceGroupCount );
}
return physicalDeviceGroupProperties;
2021-04-28 11:35:14 +00:00
}
//=== VK_KHR_external_memory_capabilities ===
VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::ExternalBufferProperties PhysicalDevice::getExternalBufferPropertiesKHR(
const VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalBufferInfo & externalBufferInfo ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( getDispatcher()->vkGetPhysicalDeviceExternalBufferPropertiesKHR &&
2023-03-01 09:17:02 +00:00
"Function <vkGetPhysicalDeviceExternalBufferPropertiesKHR> requires <VK_KHR_external_memory_capabilities> or <VK_VERSION_1_1>" );
VULKAN_HPP_NAMESPACE::ExternalBufferProperties externalBufferProperties;
getDispatcher()->vkGetPhysicalDeviceExternalBufferPropertiesKHR( static_cast<VkPhysicalDevice>( m_physicalDevice ),
reinterpret_cast<const VkPhysicalDeviceExternalBufferInfo *>( &externalBufferInfo ),
reinterpret_cast<VkExternalBufferProperties *>( &externalBufferProperties ) );
return externalBufferProperties;
}
# if defined( VK_USE_PLATFORM_WIN32_KHR )
//=== VK_KHR_external_memory_win32 ===
VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE HANDLE
Device::getMemoryWin32HandleKHR( const VULKAN_HPP_NAMESPACE::MemoryGetWin32HandleInfoKHR & getWin32HandleInfo ) const
{
VULKAN_HPP_ASSERT( getDispatcher()->vkGetMemoryWin32HandleKHR && "Function <vkGetMemoryWin32HandleKHR> requires <VK_KHR_external_memory_win32>" );
HANDLE handle;
VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( getDispatcher()->vkGetMemoryWin32HandleKHR(
static_cast<VkDevice>( m_device ), reinterpret_cast<const VkMemoryGetWin32HandleInfoKHR *>( &getWin32HandleInfo ), &handle ) );
VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getMemoryWin32HandleKHR" );
return handle;
}
VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::MemoryWin32HandlePropertiesKHR
Device::getMemoryWin32HandlePropertiesKHR( VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType, HANDLE handle ) const
{
VULKAN_HPP_ASSERT( getDispatcher()->vkGetMemoryWin32HandlePropertiesKHR &&
"Function <vkGetMemoryWin32HandlePropertiesKHR> requires <VK_KHR_external_memory_win32>" );
VULKAN_HPP_NAMESPACE::MemoryWin32HandlePropertiesKHR memoryWin32HandleProperties;
VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>(
getDispatcher()->vkGetMemoryWin32HandlePropertiesKHR( static_cast<VkDevice>( m_device ),
static_cast<VkExternalMemoryHandleTypeFlagBits>( handleType ),
handle,
reinterpret_cast<VkMemoryWin32HandlePropertiesKHR *>( &memoryWin32HandleProperties ) ) );
VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getMemoryWin32HandlePropertiesKHR" );
return memoryWin32HandleProperties;
}
# endif /*VK_USE_PLATFORM_WIN32_KHR*/
//=== VK_KHR_external_memory_fd ===
VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE int Device::getMemoryFdKHR( const VULKAN_HPP_NAMESPACE::MemoryGetFdInfoKHR & getFdInfo ) const
{
VULKAN_HPP_ASSERT( getDispatcher()->vkGetMemoryFdKHR && "Function <vkGetMemoryFdKHR> requires <VK_KHR_external_memory_fd>" );
int fd;
VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>(
getDispatcher()->vkGetMemoryFdKHR( static_cast<VkDevice>( m_device ), reinterpret_cast<const VkMemoryGetFdInfoKHR *>( &getFdInfo ), &fd ) );
VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getMemoryFdKHR" );
return fd;
}
VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::MemoryFdPropertiesKHR
Device::getMemoryFdPropertiesKHR( VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType, int fd ) const
2021-04-28 11:35:14 +00:00
{
VULKAN_HPP_ASSERT( getDispatcher()->vkGetMemoryFdPropertiesKHR && "Function <vkGetMemoryFdPropertiesKHR> requires <VK_KHR_external_memory_fd>" );
VULKAN_HPP_NAMESPACE::MemoryFdPropertiesKHR memoryFdProperties;
VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>(
getDispatcher()->vkGetMemoryFdPropertiesKHR( static_cast<VkDevice>( m_device ),
static_cast<VkExternalMemoryHandleTypeFlagBits>( handleType ),
fd,
reinterpret_cast<VkMemoryFdPropertiesKHR *>( &memoryFdProperties ) ) );
VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getMemoryFdPropertiesKHR" );
return memoryFdProperties;
2021-04-28 11:35:14 +00:00
}
//=== VK_KHR_external_semaphore_capabilities ===
VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::ExternalSemaphoreProperties PhysicalDevice::getExternalSemaphorePropertiesKHR(
const VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalSemaphoreInfo & externalSemaphoreInfo ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( getDispatcher()->vkGetPhysicalDeviceExternalSemaphorePropertiesKHR &&
2023-03-01 09:17:02 +00:00
"Function <vkGetPhysicalDeviceExternalSemaphorePropertiesKHR> requires <VK_KHR_external_semaphore_capabilities> or <VK_VERSION_1_1>" );
VULKAN_HPP_NAMESPACE::ExternalSemaphoreProperties externalSemaphoreProperties;
getDispatcher()->vkGetPhysicalDeviceExternalSemaphorePropertiesKHR(
static_cast<VkPhysicalDevice>( m_physicalDevice ),
reinterpret_cast<const VkPhysicalDeviceExternalSemaphoreInfo *>( &externalSemaphoreInfo ),
reinterpret_cast<VkExternalSemaphoreProperties *>( &externalSemaphoreProperties ) );
return externalSemaphoreProperties;
}
# if defined( VK_USE_PLATFORM_WIN32_KHR )
//=== VK_KHR_external_semaphore_win32 ===
VULKAN_HPP_INLINE void
Device::importSemaphoreWin32HandleKHR( const VULKAN_HPP_NAMESPACE::ImportSemaphoreWin32HandleInfoKHR & importSemaphoreWin32HandleInfo ) const
{
VULKAN_HPP_ASSERT( getDispatcher()->vkImportSemaphoreWin32HandleKHR &&
"Function <vkImportSemaphoreWin32HandleKHR> requires <VK_KHR_external_semaphore_win32>" );
VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( getDispatcher()->vkImportSemaphoreWin32HandleKHR(
static_cast<VkDevice>( m_device ), reinterpret_cast<const VkImportSemaphoreWin32HandleInfoKHR *>( &importSemaphoreWin32HandleInfo ) ) );
VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::importSemaphoreWin32HandleKHR" );
}
VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE HANDLE
Device::getSemaphoreWin32HandleKHR( const VULKAN_HPP_NAMESPACE::SemaphoreGetWin32HandleInfoKHR & getWin32HandleInfo ) const
{
VULKAN_HPP_ASSERT( getDispatcher()->vkGetSemaphoreWin32HandleKHR &&
"Function <vkGetSemaphoreWin32HandleKHR> requires <VK_KHR_external_semaphore_win32>" );
HANDLE handle;
VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( getDispatcher()->vkGetSemaphoreWin32HandleKHR(
static_cast<VkDevice>( m_device ), reinterpret_cast<const VkSemaphoreGetWin32HandleInfoKHR *>( &getWin32HandleInfo ), &handle ) );
VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getSemaphoreWin32HandleKHR" );
return handle;
}
# endif /*VK_USE_PLATFORM_WIN32_KHR*/
//=== VK_KHR_external_semaphore_fd ===
VULKAN_HPP_INLINE void Device::importSemaphoreFdKHR( const VULKAN_HPP_NAMESPACE::ImportSemaphoreFdInfoKHR & importSemaphoreFdInfo ) const
{
VULKAN_HPP_ASSERT( getDispatcher()->vkImportSemaphoreFdKHR && "Function <vkImportSemaphoreFdKHR> requires <VK_KHR_external_semaphore_fd>" );
VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( getDispatcher()->vkImportSemaphoreFdKHR(
static_cast<VkDevice>( m_device ), reinterpret_cast<const VkImportSemaphoreFdInfoKHR *>( &importSemaphoreFdInfo ) ) );
VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::importSemaphoreFdKHR" );
}
VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE int Device::getSemaphoreFdKHR( const VULKAN_HPP_NAMESPACE::SemaphoreGetFdInfoKHR & getFdInfo ) const
{
VULKAN_HPP_ASSERT( getDispatcher()->vkGetSemaphoreFdKHR && "Function <vkGetSemaphoreFdKHR> requires <VK_KHR_external_semaphore_fd>" );
int fd;
VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>(
getDispatcher()->vkGetSemaphoreFdKHR( static_cast<VkDevice>( m_device ), reinterpret_cast<const VkSemaphoreGetFdInfoKHR *>( &getFdInfo ), &fd ) );
VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getSemaphoreFdKHR" );
return fd;
}
//=== VK_KHR_push_descriptor ===
VULKAN_HPP_INLINE void CommandBuffer::pushDescriptorSetKHR(
VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint,
VULKAN_HPP_NAMESPACE::PipelineLayout layout,
uint32_t set,
VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::WriteDescriptorSet> const & descriptorWrites ) const VULKAN_HPP_NOEXCEPT
2021-04-28 11:35:14 +00:00
{
VULKAN_HPP_ASSERT( getDispatcher()->vkCmdPushDescriptorSetKHR && "Function <vkCmdPushDescriptorSetKHR> requires <VK_KHR_push_descriptor>" );
2021-04-28 11:35:14 +00:00
getDispatcher()->vkCmdPushDescriptorSetKHR( static_cast<VkCommandBuffer>( m_commandBuffer ),
static_cast<VkPipelineBindPoint>( pipelineBindPoint ),
static_cast<VkPipelineLayout>( layout ),
set,
descriptorWrites.size(),
reinterpret_cast<const VkWriteDescriptorSet *>( descriptorWrites.data() ) );
2021-04-28 11:35:14 +00:00
}
template <typename DataType>
VULKAN_HPP_INLINE void CommandBuffer::pushDescriptorSetWithTemplateKHR( VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate descriptorUpdateTemplate,
VULKAN_HPP_NAMESPACE::PipelineLayout layout,
uint32_t set,
DataType const & data ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( getDispatcher()->vkCmdPushDescriptorSetWithTemplateKHR &&
"Function <vkCmdPushDescriptorSetWithTemplateKHR> requires <VK_KHR_descriptor_update_template> or <VK_KHR_push_descriptor>" );
getDispatcher()->vkCmdPushDescriptorSetWithTemplateKHR( static_cast<VkCommandBuffer>( m_commandBuffer ),
static_cast<VkDescriptorUpdateTemplate>( descriptorUpdateTemplate ),
static_cast<VkPipelineLayout>( layout ),
set,
reinterpret_cast<const void *>( &data ) );
}
//=== VK_EXT_conditional_rendering ===
VULKAN_HPP_INLINE void CommandBuffer::beginConditionalRenderingEXT(
const VULKAN_HPP_NAMESPACE::ConditionalRenderingBeginInfoEXT & conditionalRenderingBegin ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( getDispatcher()->vkCmdBeginConditionalRenderingEXT &&
"Function <vkCmdBeginConditionalRenderingEXT> requires <VK_EXT_conditional_rendering>" );
getDispatcher()->vkCmdBeginConditionalRenderingEXT( static_cast<VkCommandBuffer>( m_commandBuffer ),
reinterpret_cast<const VkConditionalRenderingBeginInfoEXT *>( &conditionalRenderingBegin ) );
}
VULKAN_HPP_INLINE void CommandBuffer::endConditionalRenderingEXT() const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( getDispatcher()->vkCmdEndConditionalRenderingEXT &&
"Function <vkCmdEndConditionalRenderingEXT> requires <VK_EXT_conditional_rendering>" );
getDispatcher()->vkCmdEndConditionalRenderingEXT( static_cast<VkCommandBuffer>( m_commandBuffer ) );
}
//=== VK_KHR_descriptor_update_template ===
VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE
VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::CreateReturnType<VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::DescriptorUpdateTemplate>::Type
Device::createDescriptorUpdateTemplateKHR( VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplateCreateInfo const & createInfo,
VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator ) const
VULKAN_HPP_RAII_CREATE_NOEXCEPT
{
VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate descriptorUpdateTemplate;
VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( getDispatcher()->vkCreateDescriptorUpdateTemplateKHR(
static_cast<VkDevice>( m_device ),
reinterpret_cast<const VkDescriptorUpdateTemplateCreateInfo *>( &createInfo ),
reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
reinterpret_cast<VkDescriptorUpdateTemplate *>( &descriptorUpdateTemplate ) ) );
if ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess )
{
# if defined( VULKAN_HPP_RAII_NO_EXCEPTIONS )
return VULKAN_HPP_UNEXPECTED( result );
# else
VULKAN_HPP_NAMESPACE::detail::throwResultException( result, "Device::createDescriptorUpdateTemplateKHR" );
# endif
}
return VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::DescriptorUpdateTemplate(
*this, *reinterpret_cast<VkDescriptorUpdateTemplate *>( &descriptorUpdateTemplate ), allocator );
}
VULKAN_HPP_INLINE void
Device::destroyDescriptorUpdateTemplateKHR( VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate descriptorUpdateTemplate,
Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( getDispatcher()->vkDestroyDescriptorUpdateTemplateKHR &&
2023-03-01 09:17:02 +00:00
"Function <vkDestroyDescriptorUpdateTemplateKHR> requires <VK_KHR_descriptor_update_template> or <VK_VERSION_1_1>" );
getDispatcher()->vkDestroyDescriptorUpdateTemplateKHR(
static_cast<VkDevice>( m_device ),
static_cast<VkDescriptorUpdateTemplate>( descriptorUpdateTemplate ),
reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) );
}
template <typename DataType>
VULKAN_HPP_INLINE void DescriptorSet::updateWithTemplateKHR( VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate descriptorUpdateTemplate,
DataType const & data ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( getDispatcher()->vkUpdateDescriptorSetWithTemplateKHR &&
2023-03-01 09:17:02 +00:00
"Function <vkUpdateDescriptorSetWithTemplateKHR> requires <VK_KHR_descriptor_update_template> or <VK_VERSION_1_1>" );
getDispatcher()->vkUpdateDescriptorSetWithTemplateKHR( static_cast<VkDevice>( m_device ),
static_cast<VkDescriptorSet>( m_descriptorSet ),
static_cast<VkDescriptorUpdateTemplate>( descriptorUpdateTemplate ),
reinterpret_cast<const void *>( &data ) );
}
//=== VK_NV_clip_space_w_scaling ===
VULKAN_HPP_INLINE void CommandBuffer::setViewportWScalingNV(
uint32_t firstViewport,
VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::ViewportWScalingNV> const & viewportWScalings ) const VULKAN_HPP_NOEXCEPT
2021-04-28 11:35:14 +00:00
{
VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSetViewportWScalingNV && "Function <vkCmdSetViewportWScalingNV> requires <VK_NV_clip_space_w_scaling>" );
2021-04-28 11:35:14 +00:00
getDispatcher()->vkCmdSetViewportWScalingNV( static_cast<VkCommandBuffer>( m_commandBuffer ),
firstViewport,
viewportWScalings.size(),
reinterpret_cast<const VkViewportWScalingNV *>( viewportWScalings.data() ) );
2021-04-28 11:35:14 +00:00
}
# if defined( VK_USE_PLATFORM_XLIB_XRANDR_EXT )
//=== VK_EXT_acquire_xlib_display ===
VULKAN_HPP_INLINE void PhysicalDevice::acquireXlibDisplayEXT( Display & dpy, VULKAN_HPP_NAMESPACE::DisplayKHR display ) const
{
VULKAN_HPP_ASSERT( getDispatcher()->vkAcquireXlibDisplayEXT && "Function <vkAcquireXlibDisplayEXT> requires <VK_EXT_acquire_xlib_display>" );
VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>(
getDispatcher()->vkAcquireXlibDisplayEXT( static_cast<VkPhysicalDevice>( m_physicalDevice ), &dpy, static_cast<VkDisplayKHR>( display ) ) );
VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::acquireXlibDisplayEXT" );
}
VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE
VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::CreateReturnType<VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::DisplayKHR>::Type
PhysicalDevice::getRandROutputDisplayEXT( Display & dpy, RROutput rrOutput ) const VULKAN_HPP_RAII_CREATE_NOEXCEPT
{
VULKAN_HPP_NAMESPACE::DisplayKHR display;
VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( getDispatcher()->vkGetRandROutputDisplayEXT(
static_cast<VkPhysicalDevice>( m_physicalDevice ), &dpy, rrOutput, reinterpret_cast<VkDisplayKHR *>( &display ) ) );
if ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess )
{
# if defined( VULKAN_HPP_RAII_NO_EXCEPTIONS )
return VULKAN_HPP_UNEXPECTED( result );
# else
VULKAN_HPP_NAMESPACE::detail::throwResultException( result, "PhysicalDevice::getRandROutputDisplayEXT" );
# endif
}
return VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::DisplayKHR( *this, *reinterpret_cast<VkDisplayKHR *>( &display ) );
}
# endif /*VK_USE_PLATFORM_XLIB_XRANDR_EXT*/
//=== VK_EXT_display_surface_counter ===
VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::SurfaceCapabilities2EXT
PhysicalDevice::getSurfaceCapabilities2EXT( VULKAN_HPP_NAMESPACE::SurfaceKHR surface ) const
2021-04-15 08:49:54 +00:00
{
VULKAN_HPP_ASSERT( getDispatcher()->vkGetPhysicalDeviceSurfaceCapabilities2EXT &&
"Function <vkGetPhysicalDeviceSurfaceCapabilities2EXT> requires <VK_EXT_display_surface_counter>" );
VULKAN_HPP_NAMESPACE::SurfaceCapabilities2EXT surfaceCapabilities;
VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>(
getDispatcher()->vkGetPhysicalDeviceSurfaceCapabilities2EXT( static_cast<VkPhysicalDevice>( m_physicalDevice ),
static_cast<VkSurfaceKHR>( surface ),
reinterpret_cast<VkSurfaceCapabilities2EXT *>( &surfaceCapabilities ) ) );
VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getSurfaceCapabilities2EXT" );
return surfaceCapabilities;
2021-04-15 08:49:54 +00:00
}
//=== VK_EXT_display_control ===
VULKAN_HPP_INLINE void Device::displayPowerControlEXT( VULKAN_HPP_NAMESPACE::DisplayKHR display,
const VULKAN_HPP_NAMESPACE::DisplayPowerInfoEXT & displayPowerInfo ) const
{
VULKAN_HPP_ASSERT( getDispatcher()->vkDisplayPowerControlEXT && "Function <vkDisplayPowerControlEXT> requires <VK_EXT_display_control>" );
VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( getDispatcher()->vkDisplayPowerControlEXT(
static_cast<VkDevice>( m_device ), static_cast<VkDisplayKHR>( display ), reinterpret_cast<const VkDisplayPowerInfoEXT *>( &displayPowerInfo ) ) );
VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::displayPowerControlEXT" );
}
VULKAN_HPP_NODISCARD
VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::CreateReturnType<VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Fence>::Type
Device::registerEventEXT( VULKAN_HPP_NAMESPACE::DeviceEventInfoEXT const & deviceEventInfo,
VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator ) const
VULKAN_HPP_RAII_CREATE_NOEXCEPT
{
VULKAN_HPP_NAMESPACE::Fence fence;
VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( getDispatcher()->vkRegisterDeviceEventEXT(
static_cast<VkDevice>( m_device ),
reinterpret_cast<const VkDeviceEventInfoEXT *>( &deviceEventInfo ),
reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
reinterpret_cast<VkFence *>( &fence ) ) );
if ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess )
{
# if defined( VULKAN_HPP_RAII_NO_EXCEPTIONS )
return VULKAN_HPP_UNEXPECTED( result );
# else
VULKAN_HPP_NAMESPACE::detail::throwResultException( result, "Device::registerEventEXT" );
# endif
}
return VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Fence( *this, *reinterpret_cast<VkFence *>( &fence ), allocator );
}
VULKAN_HPP_NODISCARD
VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::CreateReturnType<VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Fence>::Type
Device::registerDisplayEventEXT( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::DisplayKHR const & display,
VULKAN_HPP_NAMESPACE::DisplayEventInfoEXT const & displayEventInfo,
VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator ) const
VULKAN_HPP_RAII_CREATE_NOEXCEPT
{
VULKAN_HPP_NAMESPACE::Fence fence;
VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( getDispatcher()->vkRegisterDisplayEventEXT(
static_cast<VkDevice>( m_device ),
static_cast<VkDisplayKHR>( *display ),
reinterpret_cast<const VkDisplayEventInfoEXT *>( &displayEventInfo ),
reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
reinterpret_cast<VkFence *>( &fence ) ) );
if ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess )
{
# if defined( VULKAN_HPP_RAII_NO_EXCEPTIONS )
return VULKAN_HPP_UNEXPECTED( result );
# else
VULKAN_HPP_NAMESPACE::detail::throwResultException( result, "Device::registerDisplayEventEXT" );
# endif
}
return VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Fence( *this, *reinterpret_cast<VkFence *>( &fence ), allocator );
}
VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE uint64_t SwapchainKHR::getCounterEXT( VULKAN_HPP_NAMESPACE::SurfaceCounterFlagBitsEXT counter ) const
{
VULKAN_HPP_ASSERT( getDispatcher()->vkGetSwapchainCounterEXT && "Function <vkGetSwapchainCounterEXT> requires <VK_EXT_display_control>" );
uint64_t counterValue;
VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( getDispatcher()->vkGetSwapchainCounterEXT(
static_cast<VkDevice>( m_device ), static_cast<VkSwapchainKHR>( m_swapchain ), static_cast<VkSurfaceCounterFlagBitsEXT>( counter ), &counterValue ) );
VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::SwapchainKHR::getCounterEXT" );
return counterValue;
}
//=== VK_GOOGLE_display_timing ===
VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::RefreshCycleDurationGOOGLE SwapchainKHR::getRefreshCycleDurationGOOGLE() const
{
VULKAN_HPP_ASSERT( getDispatcher()->vkGetRefreshCycleDurationGOOGLE && "Function <vkGetRefreshCycleDurationGOOGLE> requires <VK_GOOGLE_display_timing>" );
VULKAN_HPP_NAMESPACE::RefreshCycleDurationGOOGLE displayTimingProperties;
VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>(
getDispatcher()->vkGetRefreshCycleDurationGOOGLE( static_cast<VkDevice>( m_device ),
static_cast<VkSwapchainKHR>( m_swapchain ),
reinterpret_cast<VkRefreshCycleDurationGOOGLE *>( &displayTimingProperties ) ) );
VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::SwapchainKHR::getRefreshCycleDurationGOOGLE" );
return displayTimingProperties;
}
VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector<VULKAN_HPP_NAMESPACE::PastPresentationTimingGOOGLE> SwapchainKHR::getPastPresentationTimingGOOGLE() const
{
VULKAN_HPP_ASSERT( getDispatcher()->vkGetPastPresentationTimingGOOGLE &&
"Function <vkGetPastPresentationTimingGOOGLE> requires <VK_GOOGLE_display_timing>" );
std::vector<VULKAN_HPP_NAMESPACE::PastPresentationTimingGOOGLE> presentationTimings;
uint32_t presentationTimingCount;
VULKAN_HPP_NAMESPACE::Result result;
do
{
result = static_cast<VULKAN_HPP_NAMESPACE::Result>( getDispatcher()->vkGetPastPresentationTimingGOOGLE(
static_cast<VkDevice>( m_device ), static_cast<VkSwapchainKHR>( m_swapchain ), &presentationTimingCount, nullptr ) );
if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) && presentationTimingCount )
{
presentationTimings.resize( presentationTimingCount );
result = static_cast<VULKAN_HPP_NAMESPACE::Result>(
getDispatcher()->vkGetPastPresentationTimingGOOGLE( static_cast<VkDevice>( m_device ),
static_cast<VkSwapchainKHR>( m_swapchain ),
&presentationTimingCount,
reinterpret_cast<VkPastPresentationTimingGOOGLE *>( presentationTimings.data() ) ) );
}
} while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete );
VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::SwapchainKHR::getPastPresentationTimingGOOGLE" );
VULKAN_HPP_ASSERT( presentationTimingCount <= presentationTimings.size() );
if ( presentationTimingCount < presentationTimings.size() )
{
presentationTimings.resize( presentationTimingCount );
}
return presentationTimings;
}
//=== VK_EXT_discard_rectangles ===
VULKAN_HPP_INLINE void CommandBuffer::setDiscardRectangleEXT(
uint32_t firstDiscardRectangle, VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::Rect2D> const & discardRectangles ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSetDiscardRectangleEXT && "Function <vkCmdSetDiscardRectangleEXT> requires <VK_EXT_discard_rectangles>" );
getDispatcher()->vkCmdSetDiscardRectangleEXT( static_cast<VkCommandBuffer>( m_commandBuffer ),
firstDiscardRectangle,
discardRectangles.size(),
reinterpret_cast<const VkRect2D *>( discardRectangles.data() ) );
}
VULKAN_HPP_INLINE void CommandBuffer::setDiscardRectangleEnableEXT( VULKAN_HPP_NAMESPACE::Bool32 discardRectangleEnable ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSetDiscardRectangleEnableEXT &&
"Function <vkCmdSetDiscardRectangleEnableEXT> requires <VK_EXT_discard_rectangles>" );
getDispatcher()->vkCmdSetDiscardRectangleEnableEXT( static_cast<VkCommandBuffer>( m_commandBuffer ), static_cast<VkBool32>( discardRectangleEnable ) );
}
VULKAN_HPP_INLINE void
CommandBuffer::setDiscardRectangleModeEXT( VULKAN_HPP_NAMESPACE::DiscardRectangleModeEXT discardRectangleMode ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSetDiscardRectangleModeEXT &&
"Function <vkCmdSetDiscardRectangleModeEXT> requires <VK_EXT_discard_rectangles>" );
getDispatcher()->vkCmdSetDiscardRectangleModeEXT( static_cast<VkCommandBuffer>( m_commandBuffer ),
static_cast<VkDiscardRectangleModeEXT>( discardRectangleMode ) );
}
//=== VK_EXT_hdr_metadata ===
VULKAN_HPP_INLINE void Device::setHdrMetadataEXT( VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::SwapchainKHR> const & swapchains,
VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::HdrMetadataEXT> const & metadata ) const
{
VULKAN_HPP_ASSERT( getDispatcher()->vkSetHdrMetadataEXT && "Function <vkSetHdrMetadataEXT> requires <VK_EXT_hdr_metadata>" );
# ifdef VULKAN_HPP_NO_EXCEPTIONS
VULKAN_HPP_ASSERT( swapchains.size() == metadata.size() );
# else
if ( swapchains.size() != metadata.size() )
{
throw LogicError( VULKAN_HPP_NAMESPACE_STRING "::Device::setHdrMetadataEXT: swapchains.size() != metadata.size()" );
}
# endif /*VULKAN_HPP_NO_EXCEPTIONS*/
getDispatcher()->vkSetHdrMetadataEXT( static_cast<VkDevice>( m_device ),
swapchains.size(),
reinterpret_cast<const VkSwapchainKHR *>( swapchains.data() ),
reinterpret_cast<const VkHdrMetadataEXT *>( metadata.data() ) );
}
//=== VK_KHR_create_renderpass2 ===
VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE
VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::CreateReturnType<VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::RenderPass>::Type
Device::createRenderPass2KHR( VULKAN_HPP_NAMESPACE::RenderPassCreateInfo2 const & createInfo,
VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator ) const
VULKAN_HPP_RAII_CREATE_NOEXCEPT
{
VULKAN_HPP_NAMESPACE::RenderPass renderPass;
VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( getDispatcher()->vkCreateRenderPass2KHR(
static_cast<VkDevice>( m_device ),
reinterpret_cast<const VkRenderPassCreateInfo2 *>( &createInfo ),
reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
reinterpret_cast<VkRenderPass *>( &renderPass ) ) );
if ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess )
{
# if defined( VULKAN_HPP_RAII_NO_EXCEPTIONS )
return VULKAN_HPP_UNEXPECTED( result );
# else
VULKAN_HPP_NAMESPACE::detail::throwResultException( result, "Device::createRenderPass2KHR" );
# endif
}
return VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::RenderPass( *this, *reinterpret_cast<VkRenderPass *>( &renderPass ), allocator );
}
VULKAN_HPP_INLINE void CommandBuffer::beginRenderPass2KHR( const VULKAN_HPP_NAMESPACE::RenderPassBeginInfo & renderPassBegin,
const VULKAN_HPP_NAMESPACE::SubpassBeginInfo & subpassBeginInfo ) const VULKAN_HPP_NOEXCEPT
{
2023-03-01 09:17:02 +00:00
VULKAN_HPP_ASSERT( getDispatcher()->vkCmdBeginRenderPass2KHR &&
"Function <vkCmdBeginRenderPass2KHR> requires <VK_KHR_create_renderpass2> or <VK_VERSION_1_2>" );
getDispatcher()->vkCmdBeginRenderPass2KHR( static_cast<VkCommandBuffer>( m_commandBuffer ),
reinterpret_cast<const VkRenderPassBeginInfo *>( &renderPassBegin ),
reinterpret_cast<const VkSubpassBeginInfo *>( &subpassBeginInfo ) );
}
VULKAN_HPP_INLINE void CommandBuffer::nextSubpass2KHR( const VULKAN_HPP_NAMESPACE::SubpassBeginInfo & subpassBeginInfo,
const VULKAN_HPP_NAMESPACE::SubpassEndInfo & subpassEndInfo ) const VULKAN_HPP_NOEXCEPT
{
2023-03-01 09:17:02 +00:00
VULKAN_HPP_ASSERT( getDispatcher()->vkCmdNextSubpass2KHR && "Function <vkCmdNextSubpass2KHR> requires <VK_KHR_create_renderpass2> or <VK_VERSION_1_2>" );
getDispatcher()->vkCmdNextSubpass2KHR( static_cast<VkCommandBuffer>( m_commandBuffer ),
reinterpret_cast<const VkSubpassBeginInfo *>( &subpassBeginInfo ),
reinterpret_cast<const VkSubpassEndInfo *>( &subpassEndInfo ) );
}
VULKAN_HPP_INLINE void CommandBuffer::endRenderPass2KHR( const VULKAN_HPP_NAMESPACE::SubpassEndInfo & subpassEndInfo ) const VULKAN_HPP_NOEXCEPT
{
2023-03-01 09:17:02 +00:00
VULKAN_HPP_ASSERT( getDispatcher()->vkCmdEndRenderPass2KHR &&
"Function <vkCmdEndRenderPass2KHR> requires <VK_KHR_create_renderpass2> or <VK_VERSION_1_2>" );
getDispatcher()->vkCmdEndRenderPass2KHR( static_cast<VkCommandBuffer>( m_commandBuffer ), reinterpret_cast<const VkSubpassEndInfo *>( &subpassEndInfo ) );
}
//=== VK_KHR_shared_presentable_image ===
VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::Result SwapchainKHR::getStatus() const
{
VULKAN_HPP_ASSERT( getDispatcher()->vkGetSwapchainStatusKHR && "Function <vkGetSwapchainStatusKHR> requires <VK_KHR_shared_presentable_image>" );
VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>(
getDispatcher()->vkGetSwapchainStatusKHR( static_cast<VkDevice>( m_device ), static_cast<VkSwapchainKHR>( m_swapchain ) ) );
VULKAN_HPP_NAMESPACE::detail::resultCheck( result,
VULKAN_HPP_NAMESPACE_STRING "::SwapchainKHR::getStatus",
{ VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::eSuboptimalKHR } );
return static_cast<VULKAN_HPP_NAMESPACE::Result>( result );
}
//=== VK_KHR_external_fence_capabilities ===
VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::ExternalFenceProperties
PhysicalDevice::getExternalFencePropertiesKHR( const VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalFenceInfo & externalFenceInfo ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( getDispatcher()->vkGetPhysicalDeviceExternalFencePropertiesKHR &&
2023-03-01 09:17:02 +00:00
"Function <vkGetPhysicalDeviceExternalFencePropertiesKHR> requires <VK_KHR_external_fence_capabilities> or <VK_VERSION_1_1>" );
VULKAN_HPP_NAMESPACE::ExternalFenceProperties externalFenceProperties;
getDispatcher()->vkGetPhysicalDeviceExternalFencePropertiesKHR( static_cast<VkPhysicalDevice>( m_physicalDevice ),
reinterpret_cast<const VkPhysicalDeviceExternalFenceInfo *>( &externalFenceInfo ),
reinterpret_cast<VkExternalFenceProperties *>( &externalFenceProperties ) );
return externalFenceProperties;
}
# if defined( VK_USE_PLATFORM_WIN32_KHR )
//=== VK_KHR_external_fence_win32 ===
VULKAN_HPP_INLINE void Device::importFenceWin32HandleKHR( const VULKAN_HPP_NAMESPACE::ImportFenceWin32HandleInfoKHR & importFenceWin32HandleInfo ) const
{
VULKAN_HPP_ASSERT( getDispatcher()->vkImportFenceWin32HandleKHR && "Function <vkImportFenceWin32HandleKHR> requires <VK_KHR_external_fence_win32>" );
VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( getDispatcher()->vkImportFenceWin32HandleKHR(
static_cast<VkDevice>( m_device ), reinterpret_cast<const VkImportFenceWin32HandleInfoKHR *>( &importFenceWin32HandleInfo ) ) );
VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::importFenceWin32HandleKHR" );
}
VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE HANDLE
Device::getFenceWin32HandleKHR( const VULKAN_HPP_NAMESPACE::FenceGetWin32HandleInfoKHR & getWin32HandleInfo ) const
{
VULKAN_HPP_ASSERT( getDispatcher()->vkGetFenceWin32HandleKHR && "Function <vkGetFenceWin32HandleKHR> requires <VK_KHR_external_fence_win32>" );
HANDLE handle;
VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( getDispatcher()->vkGetFenceWin32HandleKHR(
static_cast<VkDevice>( m_device ), reinterpret_cast<const VkFenceGetWin32HandleInfoKHR *>( &getWin32HandleInfo ), &handle ) );
VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getFenceWin32HandleKHR" );
return handle;
}
# endif /*VK_USE_PLATFORM_WIN32_KHR*/
//=== VK_KHR_external_fence_fd ===
VULKAN_HPP_INLINE void Device::importFenceFdKHR( const VULKAN_HPP_NAMESPACE::ImportFenceFdInfoKHR & importFenceFdInfo ) const
{
VULKAN_HPP_ASSERT( getDispatcher()->vkImportFenceFdKHR && "Function <vkImportFenceFdKHR> requires <VK_KHR_external_fence_fd>" );
VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>(
getDispatcher()->vkImportFenceFdKHR( static_cast<VkDevice>( m_device ), reinterpret_cast<const VkImportFenceFdInfoKHR *>( &importFenceFdInfo ) ) );
VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::importFenceFdKHR" );
}
VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE int Device::getFenceFdKHR( const VULKAN_HPP_NAMESPACE::FenceGetFdInfoKHR & getFdInfo ) const
{
VULKAN_HPP_ASSERT( getDispatcher()->vkGetFenceFdKHR && "Function <vkGetFenceFdKHR> requires <VK_KHR_external_fence_fd>" );
int fd;
VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>(
getDispatcher()->vkGetFenceFdKHR( static_cast<VkDevice>( m_device ), reinterpret_cast<const VkFenceGetFdInfoKHR *>( &getFdInfo ), &fd ) );
VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getFenceFdKHR" );
return fd;
}
//=== VK_KHR_performance_query ===
VULKAN_HPP_NODISCARD
VULKAN_HPP_INLINE std::pair<std::vector<VULKAN_HPP_NAMESPACE::PerformanceCounterKHR>, std::vector<VULKAN_HPP_NAMESPACE::PerformanceCounterDescriptionKHR>>
PhysicalDevice::enumerateQueueFamilyPerformanceQueryCountersKHR( uint32_t queueFamilyIndex ) const
2021-04-15 08:49:54 +00:00
{
VULKAN_HPP_ASSERT( getDispatcher()->vkEnumeratePhysicalDeviceQueueFamilyPerformanceQueryCountersKHR &&
"Function <vkEnumeratePhysicalDeviceQueueFamilyPerformanceQueryCountersKHR> requires <VK_KHR_performance_query>" );
std::pair<std::vector<VULKAN_HPP_NAMESPACE::PerformanceCounterKHR>, std::vector<VULKAN_HPP_NAMESPACE::PerformanceCounterDescriptionKHR>> data_;
std::vector<VULKAN_HPP_NAMESPACE::PerformanceCounterKHR> & counters = data_.first;
std::vector<VULKAN_HPP_NAMESPACE::PerformanceCounterDescriptionKHR> & counterDescriptions = data_.second;
uint32_t counterCount;
VULKAN_HPP_NAMESPACE::Result result;
do
{
result = static_cast<VULKAN_HPP_NAMESPACE::Result>( getDispatcher()->vkEnumeratePhysicalDeviceQueueFamilyPerformanceQueryCountersKHR(
static_cast<VkPhysicalDevice>( m_physicalDevice ), queueFamilyIndex, &counterCount, nullptr, nullptr ) );
if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) && counterCount )
{
counters.resize( counterCount );
counterDescriptions.resize( counterCount );
result = static_cast<VULKAN_HPP_NAMESPACE::Result>( getDispatcher()->vkEnumeratePhysicalDeviceQueueFamilyPerformanceQueryCountersKHR(
static_cast<VkPhysicalDevice>( m_physicalDevice ),
queueFamilyIndex,
&counterCount,
reinterpret_cast<VkPerformanceCounterKHR *>( counters.data() ),
reinterpret_cast<VkPerformanceCounterDescriptionKHR *>( counterDescriptions.data() ) ) );
}
} while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete );
VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::enumerateQueueFamilyPerformanceQueryCountersKHR" );
VULKAN_HPP_ASSERT( counterCount <= counters.size() );
if ( counterCount < counters.size() )
{
counters.resize( counterCount );
counterDescriptions.resize( counterCount );
}
return data_;
2021-04-15 08:49:54 +00:00
}
VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE uint32_t PhysicalDevice::getQueueFamilyPerformanceQueryPassesKHR(
const VULKAN_HPP_NAMESPACE::QueryPoolPerformanceCreateInfoKHR & performanceQueryCreateInfo ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( getDispatcher()->vkGetPhysicalDeviceQueueFamilyPerformanceQueryPassesKHR &&
"Function <vkGetPhysicalDeviceQueueFamilyPerformanceQueryPassesKHR> requires <VK_KHR_performance_query>" );
uint32_t numPasses;
getDispatcher()->vkGetPhysicalDeviceQueueFamilyPerformanceQueryPassesKHR(
static_cast<VkPhysicalDevice>( m_physicalDevice ),
reinterpret_cast<const VkQueryPoolPerformanceCreateInfoKHR *>( &performanceQueryCreateInfo ),
&numPasses );
return numPasses;
}
VULKAN_HPP_INLINE void Device::acquireProfilingLockKHR( const VULKAN_HPP_NAMESPACE::AcquireProfilingLockInfoKHR & info ) const
{
VULKAN_HPP_ASSERT( getDispatcher()->vkAcquireProfilingLockKHR && "Function <vkAcquireProfilingLockKHR> requires <VK_KHR_performance_query>" );
VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>(
getDispatcher()->vkAcquireProfilingLockKHR( static_cast<VkDevice>( m_device ), reinterpret_cast<const VkAcquireProfilingLockInfoKHR *>( &info ) ) );
VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::acquireProfilingLockKHR" );
}
VULKAN_HPP_INLINE void Device::releaseProfilingLockKHR() const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( getDispatcher()->vkReleaseProfilingLockKHR && "Function <vkReleaseProfilingLockKHR> requires <VK_KHR_performance_query>" );
getDispatcher()->vkReleaseProfilingLockKHR( static_cast<VkDevice>( m_device ) );
}
//=== VK_KHR_get_surface_capabilities2 ===
VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::SurfaceCapabilities2KHR
PhysicalDevice::getSurfaceCapabilities2KHR( const VULKAN_HPP_NAMESPACE::PhysicalDeviceSurfaceInfo2KHR & surfaceInfo ) const
{
VULKAN_HPP_ASSERT( getDispatcher()->vkGetPhysicalDeviceSurfaceCapabilities2KHR &&
"Function <vkGetPhysicalDeviceSurfaceCapabilities2KHR> requires <VK_KHR_get_surface_capabilities2>" );
VULKAN_HPP_NAMESPACE::SurfaceCapabilities2KHR surfaceCapabilities;
VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>(
getDispatcher()->vkGetPhysicalDeviceSurfaceCapabilities2KHR( static_cast<VkPhysicalDevice>( m_physicalDevice ),
reinterpret_cast<const VkPhysicalDeviceSurfaceInfo2KHR *>( &surfaceInfo ),
reinterpret_cast<VkSurfaceCapabilities2KHR *>( &surfaceCapabilities ) ) );
VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getSurfaceCapabilities2KHR" );
return surfaceCapabilities;
}
template <typename X, typename Y, typename... Z>
VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::StructureChain<X, Y, Z...>
PhysicalDevice::getSurfaceCapabilities2KHR( const VULKAN_HPP_NAMESPACE::PhysicalDeviceSurfaceInfo2KHR & surfaceInfo ) const
{
VULKAN_HPP_ASSERT( getDispatcher()->vkGetPhysicalDeviceSurfaceCapabilities2KHR &&
"Function <vkGetPhysicalDeviceSurfaceCapabilities2KHR> requires <VK_KHR_get_surface_capabilities2>" );
StructureChain<X, Y, Z...> structureChain;
VULKAN_HPP_NAMESPACE::SurfaceCapabilities2KHR & surfaceCapabilities = structureChain.template get<VULKAN_HPP_NAMESPACE::SurfaceCapabilities2KHR>();
VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>(
getDispatcher()->vkGetPhysicalDeviceSurfaceCapabilities2KHR( static_cast<VkPhysicalDevice>( m_physicalDevice ),
reinterpret_cast<const VkPhysicalDeviceSurfaceInfo2KHR *>( &surfaceInfo ),
reinterpret_cast<VkSurfaceCapabilities2KHR *>( &surfaceCapabilities ) ) );
VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getSurfaceCapabilities2KHR" );
return structureChain;
}
VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector<VULKAN_HPP_NAMESPACE::SurfaceFormat2KHR>
PhysicalDevice::getSurfaceFormats2KHR( const VULKAN_HPP_NAMESPACE::PhysicalDeviceSurfaceInfo2KHR & surfaceInfo ) const
{
VULKAN_HPP_ASSERT( getDispatcher()->vkGetPhysicalDeviceSurfaceFormats2KHR &&
"Function <vkGetPhysicalDeviceSurfaceFormats2KHR> requires <VK_KHR_get_surface_capabilities2>" );
std::vector<VULKAN_HPP_NAMESPACE::SurfaceFormat2KHR> surfaceFormats;
uint32_t surfaceFormatCount;
VULKAN_HPP_NAMESPACE::Result result;
do
{
result = static_cast<VULKAN_HPP_NAMESPACE::Result>(
getDispatcher()->vkGetPhysicalDeviceSurfaceFormats2KHR( static_cast<VkPhysicalDevice>( m_physicalDevice ),
reinterpret_cast<const VkPhysicalDeviceSurfaceInfo2KHR *>( &surfaceInfo ),
&surfaceFormatCount,
nullptr ) );
if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) && surfaceFormatCount )
{
surfaceFormats.resize( surfaceFormatCount );
result = static_cast<VULKAN_HPP_NAMESPACE::Result>(
getDispatcher()->vkGetPhysicalDeviceSurfaceFormats2KHR( static_cast<VkPhysicalDevice>( m_physicalDevice ),
reinterpret_cast<const VkPhysicalDeviceSurfaceInfo2KHR *>( &surfaceInfo ),
&surfaceFormatCount,
reinterpret_cast<VkSurfaceFormat2KHR *>( surfaceFormats.data() ) ) );
}
} while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete );
VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getSurfaceFormats2KHR" );
VULKAN_HPP_ASSERT( surfaceFormatCount <= surfaceFormats.size() );
if ( surfaceFormatCount < surfaceFormats.size() )
{
surfaceFormats.resize( surfaceFormatCount );
}
return surfaceFormats;
}
template <typename StructureChain>
VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector<StructureChain>
2022-05-11 00:09:28 +00:00
PhysicalDevice::getSurfaceFormats2KHR( const VULKAN_HPP_NAMESPACE::PhysicalDeviceSurfaceInfo2KHR & surfaceInfo ) const
{
VULKAN_HPP_ASSERT( getDispatcher()->vkGetPhysicalDeviceSurfaceFormats2KHR &&
"Function <vkGetPhysicalDeviceSurfaceFormats2KHR> requires <VK_KHR_get_surface_capabilities2>" );
std::vector<StructureChain> structureChains;
2022-05-11 00:09:28 +00:00
std::vector<VULKAN_HPP_NAMESPACE::SurfaceFormat2KHR> surfaceFormats;
uint32_t surfaceFormatCount;
VULKAN_HPP_NAMESPACE::Result result;
2022-05-11 00:09:28 +00:00
do
{
result = static_cast<VULKAN_HPP_NAMESPACE::Result>(
getDispatcher()->vkGetPhysicalDeviceSurfaceFormats2KHR( static_cast<VkPhysicalDevice>( m_physicalDevice ),
reinterpret_cast<const VkPhysicalDeviceSurfaceInfo2KHR *>( &surfaceInfo ),
&surfaceFormatCount,
nullptr ) );
if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) && surfaceFormatCount )
2022-05-11 00:09:28 +00:00
{
structureChains.resize( surfaceFormatCount );
2022-05-11 00:09:28 +00:00
surfaceFormats.resize( surfaceFormatCount );
for ( uint32_t i = 0; i < surfaceFormatCount; i++ )
{
surfaceFormats[i].pNext = structureChains[i].template get<VULKAN_HPP_NAMESPACE::SurfaceFormat2KHR>().pNext;
2022-05-11 00:09:28 +00:00
}
result = static_cast<VULKAN_HPP_NAMESPACE::Result>(
getDispatcher()->vkGetPhysicalDeviceSurfaceFormats2KHR( static_cast<VkPhysicalDevice>( m_physicalDevice ),
reinterpret_cast<const VkPhysicalDeviceSurfaceInfo2KHR *>( &surfaceInfo ),
&surfaceFormatCount,
reinterpret_cast<VkSurfaceFormat2KHR *>( surfaceFormats.data() ) ) );
2022-05-11 00:09:28 +00:00
}
} while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete );
VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getSurfaceFormats2KHR" );
2022-05-11 00:09:28 +00:00
VULKAN_HPP_ASSERT( surfaceFormatCount <= surfaceFormats.size() );
if ( surfaceFormatCount < surfaceFormats.size() )
{
structureChains.resize( surfaceFormatCount );
2022-05-11 00:09:28 +00:00
}
for ( uint32_t i = 0; i < surfaceFormatCount; i++ )
{
structureChains[i].template get<VULKAN_HPP_NAMESPACE::SurfaceFormat2KHR>() = surfaceFormats[i];
2022-05-11 00:09:28 +00:00
}
return structureChains;
2022-05-11 00:09:28 +00:00
}
//=== VK_KHR_get_display_properties2 ===
VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector<VULKAN_HPP_NAMESPACE::DisplayProperties2KHR> PhysicalDevice::getDisplayProperties2KHR() const
{
VULKAN_HPP_ASSERT( getDispatcher()->vkGetPhysicalDeviceDisplayProperties2KHR &&
"Function <vkGetPhysicalDeviceDisplayProperties2KHR> requires <VK_KHR_get_display_properties2>" );
std::vector<VULKAN_HPP_NAMESPACE::DisplayProperties2KHR> properties;
uint32_t propertyCount;
VULKAN_HPP_NAMESPACE::Result result;
do
{
result = static_cast<VULKAN_HPP_NAMESPACE::Result>(
getDispatcher()->vkGetPhysicalDeviceDisplayProperties2KHR( static_cast<VkPhysicalDevice>( m_physicalDevice ), &propertyCount, nullptr ) );
if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) && propertyCount )
{
properties.resize( propertyCount );
result = static_cast<VULKAN_HPP_NAMESPACE::Result>( getDispatcher()->vkGetPhysicalDeviceDisplayProperties2KHR(
static_cast<VkPhysicalDevice>( m_physicalDevice ), &propertyCount, reinterpret_cast<VkDisplayProperties2KHR *>( properties.data() ) ) );
}
} while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete );
VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getDisplayProperties2KHR" );
VULKAN_HPP_ASSERT( propertyCount <= properties.size() );
if ( propertyCount < properties.size() )
{
properties.resize( propertyCount );
}
return properties;
}
VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector<VULKAN_HPP_NAMESPACE::DisplayPlaneProperties2KHR> PhysicalDevice::getDisplayPlaneProperties2KHR() const
{
VULKAN_HPP_ASSERT( getDispatcher()->vkGetPhysicalDeviceDisplayPlaneProperties2KHR &&
"Function <vkGetPhysicalDeviceDisplayPlaneProperties2KHR> requires <VK_KHR_get_display_properties2>" );
std::vector<VULKAN_HPP_NAMESPACE::DisplayPlaneProperties2KHR> properties;
uint32_t propertyCount;
VULKAN_HPP_NAMESPACE::Result result;
do
{
result = static_cast<VULKAN_HPP_NAMESPACE::Result>(
getDispatcher()->vkGetPhysicalDeviceDisplayPlaneProperties2KHR( static_cast<VkPhysicalDevice>( m_physicalDevice ), &propertyCount, nullptr ) );
if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) && propertyCount )
{
properties.resize( propertyCount );
result = static_cast<VULKAN_HPP_NAMESPACE::Result>( getDispatcher()->vkGetPhysicalDeviceDisplayPlaneProperties2KHR(
static_cast<VkPhysicalDevice>( m_physicalDevice ), &propertyCount, reinterpret_cast<VkDisplayPlaneProperties2KHR *>( properties.data() ) ) );
}
} while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete );
VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getDisplayPlaneProperties2KHR" );
VULKAN_HPP_ASSERT( propertyCount <= properties.size() );
if ( propertyCount < properties.size() )
{
properties.resize( propertyCount );
}
return properties;
}
VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector<VULKAN_HPP_NAMESPACE::DisplayModeProperties2KHR> DisplayKHR::getModeProperties2() const
{
VULKAN_HPP_ASSERT( getDispatcher()->vkGetDisplayModeProperties2KHR &&
"Function <vkGetDisplayModeProperties2KHR> requires <VK_KHR_get_display_properties2>" );
std::vector<VULKAN_HPP_NAMESPACE::DisplayModeProperties2KHR> properties;
uint32_t propertyCount;
VULKAN_HPP_NAMESPACE::Result result;
do
{
result = static_cast<VULKAN_HPP_NAMESPACE::Result>( getDispatcher()->vkGetDisplayModeProperties2KHR(
static_cast<VkPhysicalDevice>( m_physicalDevice ), static_cast<VkDisplayKHR>( m_display ), &propertyCount, nullptr ) );
if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) && propertyCount )
{
properties.resize( propertyCount );
result = static_cast<VULKAN_HPP_NAMESPACE::Result>(
getDispatcher()->vkGetDisplayModeProperties2KHR( static_cast<VkPhysicalDevice>( m_physicalDevice ),
static_cast<VkDisplayKHR>( m_display ),
&propertyCount,
reinterpret_cast<VkDisplayModeProperties2KHR *>( properties.data() ) ) );
}
} while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete );
VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::DisplayKHR::getModeProperties2" );
VULKAN_HPP_ASSERT( propertyCount <= properties.size() );
if ( propertyCount < properties.size() )
{
properties.resize( propertyCount );
}
return properties;
}
VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::DisplayPlaneCapabilities2KHR
PhysicalDevice::getDisplayPlaneCapabilities2KHR( const VULKAN_HPP_NAMESPACE::DisplayPlaneInfo2KHR & displayPlaneInfo ) const
{
VULKAN_HPP_ASSERT( getDispatcher()->vkGetDisplayPlaneCapabilities2KHR &&
"Function <vkGetDisplayPlaneCapabilities2KHR> requires <VK_KHR_get_display_properties2>" );
VULKAN_HPP_NAMESPACE::DisplayPlaneCapabilities2KHR capabilities;
VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>(
getDispatcher()->vkGetDisplayPlaneCapabilities2KHR( static_cast<VkPhysicalDevice>( m_physicalDevice ),
reinterpret_cast<const VkDisplayPlaneInfo2KHR *>( &displayPlaneInfo ),
reinterpret_cast<VkDisplayPlaneCapabilities2KHR *>( &capabilities ) ) );
VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getDisplayPlaneCapabilities2KHR" );
return capabilities;
}
# if defined( VK_USE_PLATFORM_IOS_MVK )
//=== VK_MVK_ios_surface ===
VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE
VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::CreateReturnType<VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::SurfaceKHR>::Type
Instance::createIOSSurfaceMVK( VULKAN_HPP_NAMESPACE::IOSSurfaceCreateInfoMVK const & createInfo,
VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator ) const
VULKAN_HPP_RAII_CREATE_NOEXCEPT
{
VULKAN_HPP_NAMESPACE::SurfaceKHR surface;
VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( getDispatcher()->vkCreateIOSSurfaceMVK(
static_cast<VkInstance>( m_instance ),
reinterpret_cast<const VkIOSSurfaceCreateInfoMVK *>( &createInfo ),
reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
reinterpret_cast<VkSurfaceKHR *>( &surface ) ) );
if ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess )
{
# if defined( VULKAN_HPP_RAII_NO_EXCEPTIONS )
return VULKAN_HPP_UNEXPECTED( result );
# else
VULKAN_HPP_NAMESPACE::detail::throwResultException( result, "Instance::createIOSSurfaceMVK" );
# endif
}
return VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::SurfaceKHR( *this, *reinterpret_cast<VkSurfaceKHR *>( &surface ), allocator );
}
# endif /*VK_USE_PLATFORM_IOS_MVK*/
# if defined( VK_USE_PLATFORM_MACOS_MVK )
//=== VK_MVK_macos_surface ===
VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE
VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::CreateReturnType<VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::SurfaceKHR>::Type
Instance::createMacOSSurfaceMVK( VULKAN_HPP_NAMESPACE::MacOSSurfaceCreateInfoMVK const & createInfo,
VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator ) const
VULKAN_HPP_RAII_CREATE_NOEXCEPT
{
VULKAN_HPP_NAMESPACE::SurfaceKHR surface;
VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( getDispatcher()->vkCreateMacOSSurfaceMVK(
static_cast<VkInstance>( m_instance ),
reinterpret_cast<const VkMacOSSurfaceCreateInfoMVK *>( &createInfo ),
reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
reinterpret_cast<VkSurfaceKHR *>( &surface ) ) );
if ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess )
{
# if defined( VULKAN_HPP_RAII_NO_EXCEPTIONS )
return VULKAN_HPP_UNEXPECTED( result );
# else
VULKAN_HPP_NAMESPACE::detail::throwResultException( result, "Instance::createMacOSSurfaceMVK" );
# endif
}
return VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::SurfaceKHR( *this, *reinterpret_cast<VkSurfaceKHR *>( &surface ), allocator );
}
# endif /*VK_USE_PLATFORM_MACOS_MVK*/
//=== VK_EXT_debug_utils ===
VULKAN_HPP_INLINE void Device::setDebugUtilsObjectNameEXT( const VULKAN_HPP_NAMESPACE::DebugUtilsObjectNameInfoEXT & nameInfo ) const
{
VULKAN_HPP_ASSERT( getDispatcher()->vkSetDebugUtilsObjectNameEXT && "Function <vkSetDebugUtilsObjectNameEXT> requires <VK_EXT_debug_utils>" );
VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( getDispatcher()->vkSetDebugUtilsObjectNameEXT(
static_cast<VkDevice>( m_device ), reinterpret_cast<const VkDebugUtilsObjectNameInfoEXT *>( &nameInfo ) ) );
VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::setDebugUtilsObjectNameEXT" );
2021-05-11 07:15:45 +00:00
}
VULKAN_HPP_INLINE void Device::setDebugUtilsObjectTagEXT( const VULKAN_HPP_NAMESPACE::DebugUtilsObjectTagInfoEXT & tagInfo ) const
2021-05-11 07:15:45 +00:00
{
VULKAN_HPP_ASSERT( getDispatcher()->vkSetDebugUtilsObjectTagEXT && "Function <vkSetDebugUtilsObjectTagEXT> requires <VK_EXT_debug_utils>" );
2021-05-11 07:15:45 +00:00
VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>(
getDispatcher()->vkSetDebugUtilsObjectTagEXT( static_cast<VkDevice>( m_device ), reinterpret_cast<const VkDebugUtilsObjectTagInfoEXT *>( &tagInfo ) ) );
VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::setDebugUtilsObjectTagEXT" );
}
VULKAN_HPP_INLINE void Queue::beginDebugUtilsLabelEXT( const VULKAN_HPP_NAMESPACE::DebugUtilsLabelEXT & labelInfo ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( getDispatcher()->vkQueueBeginDebugUtilsLabelEXT && "Function <vkQueueBeginDebugUtilsLabelEXT> requires <VK_EXT_debug_utils>" );
getDispatcher()->vkQueueBeginDebugUtilsLabelEXT( static_cast<VkQueue>( m_queue ), reinterpret_cast<const VkDebugUtilsLabelEXT *>( &labelInfo ) );
}
VULKAN_HPP_INLINE void Queue::endDebugUtilsLabelEXT() const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( getDispatcher()->vkQueueEndDebugUtilsLabelEXT && "Function <vkQueueEndDebugUtilsLabelEXT> requires <VK_EXT_debug_utils>" );
getDispatcher()->vkQueueEndDebugUtilsLabelEXT( static_cast<VkQueue>( m_queue ) );
}
VULKAN_HPP_INLINE void Queue::insertDebugUtilsLabelEXT( const VULKAN_HPP_NAMESPACE::DebugUtilsLabelEXT & labelInfo ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( getDispatcher()->vkQueueInsertDebugUtilsLabelEXT && "Function <vkQueueInsertDebugUtilsLabelEXT> requires <VK_EXT_debug_utils>" );
getDispatcher()->vkQueueInsertDebugUtilsLabelEXT( static_cast<VkQueue>( m_queue ), reinterpret_cast<const VkDebugUtilsLabelEXT *>( &labelInfo ) );
}
VULKAN_HPP_INLINE void CommandBuffer::beginDebugUtilsLabelEXT( const VULKAN_HPP_NAMESPACE::DebugUtilsLabelEXT & labelInfo ) const VULKAN_HPP_NOEXCEPT
2021-04-15 08:49:54 +00:00
{
VULKAN_HPP_ASSERT( getDispatcher()->vkCmdBeginDebugUtilsLabelEXT && "Function <vkCmdBeginDebugUtilsLabelEXT> requires <VK_EXT_debug_utils>" );
2021-04-15 08:49:54 +00:00
getDispatcher()->vkCmdBeginDebugUtilsLabelEXT( static_cast<VkCommandBuffer>( m_commandBuffer ),
reinterpret_cast<const VkDebugUtilsLabelEXT *>( &labelInfo ) );
2021-04-15 08:49:54 +00:00
}
VULKAN_HPP_INLINE void CommandBuffer::endDebugUtilsLabelEXT() const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( getDispatcher()->vkCmdEndDebugUtilsLabelEXT && "Function <vkCmdEndDebugUtilsLabelEXT> requires <VK_EXT_debug_utils>" );
getDispatcher()->vkCmdEndDebugUtilsLabelEXT( static_cast<VkCommandBuffer>( m_commandBuffer ) );
}
VULKAN_HPP_INLINE void CommandBuffer::insertDebugUtilsLabelEXT( const VULKAN_HPP_NAMESPACE::DebugUtilsLabelEXT & labelInfo ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( getDispatcher()->vkCmdInsertDebugUtilsLabelEXT && "Function <vkCmdInsertDebugUtilsLabelEXT> requires <VK_EXT_debug_utils>" );
getDispatcher()->vkCmdInsertDebugUtilsLabelEXT( static_cast<VkCommandBuffer>( m_commandBuffer ),
reinterpret_cast<const VkDebugUtilsLabelEXT *>( &labelInfo ) );
2021-04-28 11:35:14 +00:00
}
VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE
VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::CreateReturnType<VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::DebugUtilsMessengerEXT>::Type
Instance::createDebugUtilsMessengerEXT( VULKAN_HPP_NAMESPACE::DebugUtilsMessengerCreateInfoEXT const & createInfo,
VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator ) const
VULKAN_HPP_RAII_CREATE_NOEXCEPT
{
VULKAN_HPP_NAMESPACE::DebugUtilsMessengerEXT messenger;
VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( getDispatcher()->vkCreateDebugUtilsMessengerEXT(
static_cast<VkInstance>( m_instance ),
reinterpret_cast<const VkDebugUtilsMessengerCreateInfoEXT *>( &createInfo ),
reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
reinterpret_cast<VkDebugUtilsMessengerEXT *>( &messenger ) ) );
if ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess )
{
# if defined( VULKAN_HPP_RAII_NO_EXCEPTIONS )
return VULKAN_HPP_UNEXPECTED( result );
# else
VULKAN_HPP_NAMESPACE::detail::throwResultException( result, "Instance::createDebugUtilsMessengerEXT" );
# endif
}
return VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::DebugUtilsMessengerEXT(
*this, *reinterpret_cast<VkDebugUtilsMessengerEXT *>( &messenger ), allocator );
}
VULKAN_HPP_INLINE void
Instance::submitDebugUtilsMessageEXT( VULKAN_HPP_NAMESPACE::DebugUtilsMessageSeverityFlagBitsEXT messageSeverity,
VULKAN_HPP_NAMESPACE::DebugUtilsMessageTypeFlagsEXT messageTypes,
const VULKAN_HPP_NAMESPACE::DebugUtilsMessengerCallbackDataEXT & callbackData ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( getDispatcher()->vkSubmitDebugUtilsMessageEXT && "Function <vkSubmitDebugUtilsMessageEXT> requires <VK_EXT_debug_utils>" );
getDispatcher()->vkSubmitDebugUtilsMessageEXT( static_cast<VkInstance>( m_instance ),
static_cast<VkDebugUtilsMessageSeverityFlagBitsEXT>( messageSeverity ),
static_cast<VkDebugUtilsMessageTypeFlagsEXT>( messageTypes ),
reinterpret_cast<const VkDebugUtilsMessengerCallbackDataEXT *>( &callbackData ) );
}
# if defined( VK_USE_PLATFORM_ANDROID_KHR )
//=== VK_ANDROID_external_memory_android_hardware_buffer ===
VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::AndroidHardwareBufferPropertiesANDROID
Device::getAndroidHardwareBufferPropertiesANDROID( const struct AHardwareBuffer & buffer ) const
{
VULKAN_HPP_ASSERT( getDispatcher()->vkGetAndroidHardwareBufferPropertiesANDROID &&
"Function <vkGetAndroidHardwareBufferPropertiesANDROID> requires <VK_ANDROID_external_memory_android_hardware_buffer>" );
VULKAN_HPP_NAMESPACE::AndroidHardwareBufferPropertiesANDROID properties;
VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( getDispatcher()->vkGetAndroidHardwareBufferPropertiesANDROID(
static_cast<VkDevice>( m_device ), &buffer, reinterpret_cast<VkAndroidHardwareBufferPropertiesANDROID *>( &properties ) ) );
VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getAndroidHardwareBufferPropertiesANDROID" );
return properties;
}
template <typename X, typename Y, typename... Z>
VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::StructureChain<X, Y, Z...>
Device::getAndroidHardwareBufferPropertiesANDROID( const struct AHardwareBuffer & buffer ) const
{
VULKAN_HPP_ASSERT( getDispatcher()->vkGetAndroidHardwareBufferPropertiesANDROID &&
"Function <vkGetAndroidHardwareBufferPropertiesANDROID> requires <VK_ANDROID_external_memory_android_hardware_buffer>" );
StructureChain<X, Y, Z...> structureChain;
VULKAN_HPP_NAMESPACE::AndroidHardwareBufferPropertiesANDROID & properties =
structureChain.template get<VULKAN_HPP_NAMESPACE::AndroidHardwareBufferPropertiesANDROID>();
VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( getDispatcher()->vkGetAndroidHardwareBufferPropertiesANDROID(
static_cast<VkDevice>( m_device ), &buffer, reinterpret_cast<VkAndroidHardwareBufferPropertiesANDROID *>( &properties ) ) );
VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getAndroidHardwareBufferPropertiesANDROID" );
return structureChain;
}
VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE struct AHardwareBuffer *
2022-12-06 00:11:19 +00:00
Device::getMemoryAndroidHardwareBufferANDROID( const VULKAN_HPP_NAMESPACE::MemoryGetAndroidHardwareBufferInfoANDROID & info ) const
{
VULKAN_HPP_ASSERT( getDispatcher()->vkGetMemoryAndroidHardwareBufferANDROID &&
"Function <vkGetMemoryAndroidHardwareBufferANDROID> requires <VK_ANDROID_external_memory_android_hardware_buffer>" );
struct AHardwareBuffer * buffer;
VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( getDispatcher()->vkGetMemoryAndroidHardwareBufferANDROID(
static_cast<VkDevice>( m_device ), reinterpret_cast<const VkMemoryGetAndroidHardwareBufferInfoANDROID *>( &info ), &buffer ) );
VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getMemoryAndroidHardwareBufferANDROID" );
return buffer;
}
# endif /*VK_USE_PLATFORM_ANDROID_KHR*/
# if defined( VK_ENABLE_BETA_EXTENSIONS )
//=== VK_AMDX_shader_enqueue ===
VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE
VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::CreateReturnType<std::vector<VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Pipeline>>::Type
Device::createExecutionGraphPipelinesAMDX(
VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::PipelineCache> const & pipelineCache,
VULKAN_HPP_NAMESPACE::ArrayProxy<VULKAN_HPP_NAMESPACE::ExecutionGraphPipelineCreateInfoAMDX> const & createInfos,
VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator ) const
{
std::vector<VULKAN_HPP_NAMESPACE::Pipeline> pipelines( createInfos.size() );
VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( getDispatcher()->vkCreateExecutionGraphPipelinesAMDX(
static_cast<VkDevice>( m_device ),
pipelineCache ? static_cast<VkPipelineCache>( **pipelineCache ) : 0,
createInfos.size(),
reinterpret_cast<const VkExecutionGraphPipelineCreateInfoAMDX *>( createInfos.data() ),
reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
reinterpret_cast<VkPipeline *>( pipelines.data() ) ) );
if ( ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess ) && ( result != VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT ) )
{
# if defined( VULKAN_HPP_RAII_NO_EXCEPTIONS )
return VULKAN_HPP_UNEXPECTED( result );
# else
VULKAN_HPP_NAMESPACE::detail::throwResultException( result, "Device::createExecutionGraphPipelinesAMDX" );
# endif
}
std::vector<VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Pipeline> pipelinesRAII;
pipelinesRAII.reserve( pipelines.size() );
for ( auto & pipeline : pipelines )
{
pipelinesRAII.emplace_back( *this, *reinterpret_cast<VkPipeline *>( &pipeline ), allocator, result );
}
return pipelinesRAII;
}
VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE
VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::CreateReturnType<VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Pipeline>::Type
Device::createExecutionGraphPipelineAMDX(
VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::PipelineCache> const & pipelineCache,
VULKAN_HPP_NAMESPACE::ExecutionGraphPipelineCreateInfoAMDX const & createInfo,
VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator ) const VULKAN_HPP_RAII_CREATE_NOEXCEPT
{
VULKAN_HPP_NAMESPACE::Pipeline pipeline;
VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( getDispatcher()->vkCreateExecutionGraphPipelinesAMDX(
static_cast<VkDevice>( m_device ),
pipelineCache ? static_cast<VkPipelineCache>( **pipelineCache ) : 0,
1,
reinterpret_cast<const VkExecutionGraphPipelineCreateInfoAMDX *>( &createInfo ),
reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
reinterpret_cast<VkPipeline *>( &pipeline ) ) );
if ( ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess ) && ( result != VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT ) )
{
# if defined( VULKAN_HPP_RAII_NO_EXCEPTIONS )
return VULKAN_HPP_UNEXPECTED( result );
# else
VULKAN_HPP_NAMESPACE::detail::throwResultException( result, "Device::createExecutionGraphPipelineAMDX" );
# endif
}
return VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Pipeline( *this, *reinterpret_cast<VkPipeline *>( &pipeline ), allocator, result );
}
VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::ExecutionGraphPipelineScratchSizeAMDX Pipeline::getExecutionGraphScratchSizeAMDX() const
{
VULKAN_HPP_ASSERT( getDispatcher()->vkGetExecutionGraphPipelineScratchSizeAMDX &&
"Function <vkGetExecutionGraphPipelineScratchSizeAMDX> requires <VK_AMDX_shader_enqueue>" );
VULKAN_HPP_NAMESPACE::ExecutionGraphPipelineScratchSizeAMDX sizeInfo;
VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( getDispatcher()->vkGetExecutionGraphPipelineScratchSizeAMDX(
static_cast<VkDevice>( m_device ), static_cast<VkPipeline>( m_pipeline ), reinterpret_cast<VkExecutionGraphPipelineScratchSizeAMDX *>( &sizeInfo ) ) );
VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Pipeline::getExecutionGraphScratchSizeAMDX" );
return sizeInfo;
}
VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE uint32_t
Pipeline::getExecutionGraphNodeIndexAMDX( const VULKAN_HPP_NAMESPACE::PipelineShaderStageNodeCreateInfoAMDX & nodeInfo ) const
{
VULKAN_HPP_ASSERT( getDispatcher()->vkGetExecutionGraphPipelineNodeIndexAMDX &&
"Function <vkGetExecutionGraphPipelineNodeIndexAMDX> requires <VK_AMDX_shader_enqueue>" );
uint32_t nodeIndex;
VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>(
getDispatcher()->vkGetExecutionGraphPipelineNodeIndexAMDX( static_cast<VkDevice>( m_device ),
static_cast<VkPipeline>( m_pipeline ),
reinterpret_cast<const VkPipelineShaderStageNodeCreateInfoAMDX *>( &nodeInfo ),
&nodeIndex ) );
VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Pipeline::getExecutionGraphNodeIndexAMDX" );
return nodeIndex;
}
VULKAN_HPP_INLINE void CommandBuffer::initializeGraphScratchMemoryAMDX( VULKAN_HPP_NAMESPACE::Pipeline executionGraph,
VULKAN_HPP_NAMESPACE::DeviceAddress scratch,
VULKAN_HPP_NAMESPACE::DeviceSize scratchSize ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( getDispatcher()->vkCmdInitializeGraphScratchMemoryAMDX &&
"Function <vkCmdInitializeGraphScratchMemoryAMDX> requires <VK_AMDX_shader_enqueue>" );
getDispatcher()->vkCmdInitializeGraphScratchMemoryAMDX( static_cast<VkCommandBuffer>( m_commandBuffer ),
static_cast<VkPipeline>( executionGraph ),
static_cast<VkDeviceAddress>( scratch ),
static_cast<VkDeviceSize>( scratchSize ) );
}
VULKAN_HPP_INLINE void CommandBuffer::dispatchGraphAMDX( VULKAN_HPP_NAMESPACE::DeviceAddress scratch,
VULKAN_HPP_NAMESPACE::DeviceSize scratchSize,
const VULKAN_HPP_NAMESPACE::DispatchGraphCountInfoAMDX & countInfo ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( getDispatcher()->vkCmdDispatchGraphAMDX && "Function <vkCmdDispatchGraphAMDX> requires <VK_AMDX_shader_enqueue>" );
getDispatcher()->vkCmdDispatchGraphAMDX( static_cast<VkCommandBuffer>( m_commandBuffer ),
static_cast<VkDeviceAddress>( scratch ),
static_cast<VkDeviceSize>( scratchSize ),
reinterpret_cast<const VkDispatchGraphCountInfoAMDX *>( &countInfo ) );
}
VULKAN_HPP_INLINE void
CommandBuffer::dispatchGraphIndirectAMDX( VULKAN_HPP_NAMESPACE::DeviceAddress scratch,
VULKAN_HPP_NAMESPACE::DeviceSize scratchSize,
const VULKAN_HPP_NAMESPACE::DispatchGraphCountInfoAMDX & countInfo ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( getDispatcher()->vkCmdDispatchGraphIndirectAMDX && "Function <vkCmdDispatchGraphIndirectAMDX> requires <VK_AMDX_shader_enqueue>" );
getDispatcher()->vkCmdDispatchGraphIndirectAMDX( static_cast<VkCommandBuffer>( m_commandBuffer ),
static_cast<VkDeviceAddress>( scratch ),
static_cast<VkDeviceSize>( scratchSize ),
reinterpret_cast<const VkDispatchGraphCountInfoAMDX *>( &countInfo ) );
}
VULKAN_HPP_INLINE void CommandBuffer::dispatchGraphIndirectCountAMDX( VULKAN_HPP_NAMESPACE::DeviceAddress scratch,
VULKAN_HPP_NAMESPACE::DeviceSize scratchSize,
VULKAN_HPP_NAMESPACE::DeviceAddress countInfo ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( getDispatcher()->vkCmdDispatchGraphIndirectCountAMDX &&
"Function <vkCmdDispatchGraphIndirectCountAMDX> requires <VK_AMDX_shader_enqueue>" );
getDispatcher()->vkCmdDispatchGraphIndirectCountAMDX( static_cast<VkCommandBuffer>( m_commandBuffer ),
static_cast<VkDeviceAddress>( scratch ),
static_cast<VkDeviceSize>( scratchSize ),
static_cast<VkDeviceAddress>( countInfo ) );
}
# endif /*VK_ENABLE_BETA_EXTENSIONS*/
//=== VK_EXT_sample_locations ===
VULKAN_HPP_INLINE void
CommandBuffer::setSampleLocationsEXT( const VULKAN_HPP_NAMESPACE::SampleLocationsInfoEXT & sampleLocationsInfo ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSetSampleLocationsEXT && "Function <vkCmdSetSampleLocationsEXT> requires <VK_EXT_sample_locations>" );
getDispatcher()->vkCmdSetSampleLocationsEXT( static_cast<VkCommandBuffer>( m_commandBuffer ),
reinterpret_cast<const VkSampleLocationsInfoEXT *>( &sampleLocationsInfo ) );
}
VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::MultisamplePropertiesEXT
PhysicalDevice::getMultisamplePropertiesEXT( VULKAN_HPP_NAMESPACE::SampleCountFlagBits samples ) const VULKAN_HPP_NOEXCEPT
2021-04-28 11:35:14 +00:00
{
VULKAN_HPP_ASSERT( getDispatcher()->vkGetPhysicalDeviceMultisamplePropertiesEXT &&
"Function <vkGetPhysicalDeviceMultisamplePropertiesEXT> requires <VK_EXT_sample_locations>" );
2021-04-28 11:35:14 +00:00
VULKAN_HPP_NAMESPACE::MultisamplePropertiesEXT multisampleProperties;
getDispatcher()->vkGetPhysicalDeviceMultisamplePropertiesEXT( static_cast<VkPhysicalDevice>( m_physicalDevice ),
static_cast<VkSampleCountFlagBits>( samples ),
reinterpret_cast<VkMultisamplePropertiesEXT *>( &multisampleProperties ) );
return multisampleProperties;
2021-04-28 11:35:14 +00:00
}
//=== VK_KHR_get_memory_requirements2 ===
VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::MemoryRequirements2
Device::getImageMemoryRequirements2KHR( const VULKAN_HPP_NAMESPACE::ImageMemoryRequirementsInfo2 & info ) const VULKAN_HPP_NOEXCEPT
2021-04-28 11:35:14 +00:00
{
VULKAN_HPP_ASSERT( getDispatcher()->vkGetImageMemoryRequirements2KHR &&
2023-03-01 09:17:02 +00:00
"Function <vkGetImageMemoryRequirements2KHR> requires <VK_KHR_get_memory_requirements2> or <VK_VERSION_1_1>" );
2021-04-28 11:35:14 +00:00
VULKAN_HPP_NAMESPACE::MemoryRequirements2 memoryRequirements;
getDispatcher()->vkGetImageMemoryRequirements2KHR( static_cast<VkDevice>( m_device ),
reinterpret_cast<const VkImageMemoryRequirementsInfo2 *>( &info ),
reinterpret_cast<VkMemoryRequirements2 *>( &memoryRequirements ) );
return memoryRequirements;
2021-04-28 11:35:14 +00:00
}
template <typename X, typename Y, typename... Z>
VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::StructureChain<X, Y, Z...>
Device::getImageMemoryRequirements2KHR( const VULKAN_HPP_NAMESPACE::ImageMemoryRequirementsInfo2 & info ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( getDispatcher()->vkGetImageMemoryRequirements2KHR &&
2023-03-01 09:17:02 +00:00
"Function <vkGetImageMemoryRequirements2KHR> requires <VK_KHR_get_memory_requirements2> or <VK_VERSION_1_1>" );
VULKAN_HPP_NAMESPACE::StructureChain<X, Y, Z...> structureChain;
VULKAN_HPP_NAMESPACE::MemoryRequirements2 & memoryRequirements = structureChain.template get<VULKAN_HPP_NAMESPACE::MemoryRequirements2>();
getDispatcher()->vkGetImageMemoryRequirements2KHR( static_cast<VkDevice>( m_device ),
reinterpret_cast<const VkImageMemoryRequirementsInfo2 *>( &info ),
reinterpret_cast<VkMemoryRequirements2 *>( &memoryRequirements ) );
return structureChain;
}
VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::MemoryRequirements2
Device::getBufferMemoryRequirements2KHR( const VULKAN_HPP_NAMESPACE::BufferMemoryRequirementsInfo2 & info ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( getDispatcher()->vkGetBufferMemoryRequirements2KHR &&
2023-03-01 09:17:02 +00:00
"Function <vkGetBufferMemoryRequirements2KHR> requires <VK_KHR_get_memory_requirements2> or <VK_VERSION_1_1>" );
VULKAN_HPP_NAMESPACE::MemoryRequirements2 memoryRequirements;
getDispatcher()->vkGetBufferMemoryRequirements2KHR( static_cast<VkDevice>( m_device ),
reinterpret_cast<const VkBufferMemoryRequirementsInfo2 *>( &info ),
reinterpret_cast<VkMemoryRequirements2 *>( &memoryRequirements ) );
return memoryRequirements;
}
template <typename X, typename Y, typename... Z>
VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::StructureChain<X, Y, Z...>
Device::getBufferMemoryRequirements2KHR( const VULKAN_HPP_NAMESPACE::BufferMemoryRequirementsInfo2 & info ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( getDispatcher()->vkGetBufferMemoryRequirements2KHR &&
2023-03-01 09:17:02 +00:00
"Function <vkGetBufferMemoryRequirements2KHR> requires <VK_KHR_get_memory_requirements2> or <VK_VERSION_1_1>" );
VULKAN_HPP_NAMESPACE::StructureChain<X, Y, Z...> structureChain;
VULKAN_HPP_NAMESPACE::MemoryRequirements2 & memoryRequirements = structureChain.template get<VULKAN_HPP_NAMESPACE::MemoryRequirements2>();
getDispatcher()->vkGetBufferMemoryRequirements2KHR( static_cast<VkDevice>( m_device ),
reinterpret_cast<const VkBufferMemoryRequirementsInfo2 *>( &info ),
reinterpret_cast<VkMemoryRequirements2 *>( &memoryRequirements ) );
return structureChain;
2021-04-28 11:35:14 +00:00
}
VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector<VULKAN_HPP_NAMESPACE::SparseImageMemoryRequirements2>
Device::getImageSparseMemoryRequirements2KHR( const VULKAN_HPP_NAMESPACE::ImageSparseMemoryRequirementsInfo2 & info ) const
2021-04-28 11:35:14 +00:00
{
VULKAN_HPP_ASSERT( getDispatcher()->vkGetImageSparseMemoryRequirements2KHR &&
2023-03-01 09:17:02 +00:00
"Function <vkGetImageSparseMemoryRequirements2KHR> requires <VK_KHR_get_memory_requirements2> or <VK_VERSION_1_1>" );
std::vector<VULKAN_HPP_NAMESPACE::SparseImageMemoryRequirements2> sparseMemoryRequirements;
uint32_t sparseMemoryRequirementCount;
getDispatcher()->vkGetImageSparseMemoryRequirements2KHR(
static_cast<VkDevice>( m_device ), reinterpret_cast<const VkImageSparseMemoryRequirementsInfo2 *>( &info ), &sparseMemoryRequirementCount, nullptr );
sparseMemoryRequirements.resize( sparseMemoryRequirementCount );
getDispatcher()->vkGetImageSparseMemoryRequirements2KHR( static_cast<VkDevice>( m_device ),
reinterpret_cast<const VkImageSparseMemoryRequirementsInfo2 *>( &info ),
&sparseMemoryRequirementCount,
reinterpret_cast<VkSparseImageMemoryRequirements2 *>( sparseMemoryRequirements.data() ) );
VULKAN_HPP_ASSERT( sparseMemoryRequirementCount <= sparseMemoryRequirements.size() );
if ( sparseMemoryRequirementCount < sparseMemoryRequirements.size() )
{
sparseMemoryRequirements.resize( sparseMemoryRequirementCount );
}
return sparseMemoryRequirements;
2021-04-28 11:35:14 +00:00
}
//=== VK_KHR_acceleration_structure ===
VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE
VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::CreateReturnType<VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::AccelerationStructureKHR>::Type
Device::createAccelerationStructureKHR( VULKAN_HPP_NAMESPACE::AccelerationStructureCreateInfoKHR const & createInfo,
VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator ) const
VULKAN_HPP_RAII_CREATE_NOEXCEPT
{
VULKAN_HPP_NAMESPACE::AccelerationStructureKHR accelerationStructure;
VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( getDispatcher()->vkCreateAccelerationStructureKHR(
static_cast<VkDevice>( m_device ),
reinterpret_cast<const VkAccelerationStructureCreateInfoKHR *>( &createInfo ),
reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
reinterpret_cast<VkAccelerationStructureKHR *>( &accelerationStructure ) ) );
if ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess )
{
# if defined( VULKAN_HPP_RAII_NO_EXCEPTIONS )
return VULKAN_HPP_UNEXPECTED( result );
# else
VULKAN_HPP_NAMESPACE::detail::throwResultException( result, "Device::createAccelerationStructureKHR" );
# endif
}
return VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::AccelerationStructureKHR(
*this, *reinterpret_cast<VkAccelerationStructureKHR *>( &accelerationStructure ), allocator );
}
VULKAN_HPP_INLINE void CommandBuffer::buildAccelerationStructuresKHR(
VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::AccelerationStructureBuildGeometryInfoKHR> const & infos,
VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::AccelerationStructureBuildRangeInfoKHR * const> const & pBuildRangeInfos ) const
{
VULKAN_HPP_ASSERT( getDispatcher()->vkCmdBuildAccelerationStructuresKHR &&
"Function <vkCmdBuildAccelerationStructuresKHR> requires <VK_KHR_acceleration_structure>" );
# ifdef VULKAN_HPP_NO_EXCEPTIONS
VULKAN_HPP_ASSERT( infos.size() == pBuildRangeInfos.size() );
# else
if ( infos.size() != pBuildRangeInfos.size() )
{
throw LogicError( VULKAN_HPP_NAMESPACE_STRING "::CommandBuffer::buildAccelerationStructuresKHR: infos.size() != pBuildRangeInfos.size()" );
}
# endif /*VULKAN_HPP_NO_EXCEPTIONS*/
getDispatcher()->vkCmdBuildAccelerationStructuresKHR(
static_cast<VkCommandBuffer>( m_commandBuffer ),
infos.size(),
reinterpret_cast<const VkAccelerationStructureBuildGeometryInfoKHR *>( infos.data() ),
reinterpret_cast<const VkAccelerationStructureBuildRangeInfoKHR * const *>( pBuildRangeInfos.data() ) );
}
VULKAN_HPP_INLINE void CommandBuffer::buildAccelerationStructuresIndirectKHR(
VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::AccelerationStructureBuildGeometryInfoKHR> const & infos,
VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::DeviceAddress> const & indirectDeviceAddresses,
VULKAN_HPP_NAMESPACE::ArrayProxy<const uint32_t> const & indirectStrides,
VULKAN_HPP_NAMESPACE::ArrayProxy<const uint32_t * const> const & pMaxPrimitiveCounts ) const
{
VULKAN_HPP_ASSERT( getDispatcher()->vkCmdBuildAccelerationStructuresIndirectKHR &&
"Function <vkCmdBuildAccelerationStructuresIndirectKHR> requires <VK_KHR_acceleration_structure>" );
# ifdef VULKAN_HPP_NO_EXCEPTIONS
VULKAN_HPP_ASSERT( infos.size() == indirectDeviceAddresses.size() );
VULKAN_HPP_ASSERT( infos.size() == indirectStrides.size() );
VULKAN_HPP_ASSERT( infos.size() == pMaxPrimitiveCounts.size() );
# else
if ( infos.size() != indirectDeviceAddresses.size() )
{
throw LogicError( VULKAN_HPP_NAMESPACE_STRING
"::CommandBuffer::buildAccelerationStructuresIndirectKHR: infos.size() != indirectDeviceAddresses.size()" );
}
if ( infos.size() != indirectStrides.size() )
{
throw LogicError( VULKAN_HPP_NAMESPACE_STRING "::CommandBuffer::buildAccelerationStructuresIndirectKHR: infos.size() != indirectStrides.size()" );
}
if ( infos.size() != pMaxPrimitiveCounts.size() )
{
throw LogicError( VULKAN_HPP_NAMESPACE_STRING "::CommandBuffer::buildAccelerationStructuresIndirectKHR: infos.size() != pMaxPrimitiveCounts.size()" );
}
# endif /*VULKAN_HPP_NO_EXCEPTIONS*/
getDispatcher()->vkCmdBuildAccelerationStructuresIndirectKHR( static_cast<VkCommandBuffer>( m_commandBuffer ),
infos.size(),
reinterpret_cast<const VkAccelerationStructureBuildGeometryInfoKHR *>( infos.data() ),
reinterpret_cast<const VkDeviceAddress *>( indirectDeviceAddresses.data() ),
indirectStrides.data(),
pMaxPrimitiveCounts.data() );
}
VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::Result Device::buildAccelerationStructuresKHR(
VULKAN_HPP_NAMESPACE::DeferredOperationKHR deferredOperation,
VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::AccelerationStructureBuildGeometryInfoKHR> const & infos,
VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::AccelerationStructureBuildRangeInfoKHR * const> const & pBuildRangeInfos ) const
2021-04-15 08:49:54 +00:00
{
VULKAN_HPP_ASSERT( getDispatcher()->vkBuildAccelerationStructuresKHR &&
"Function <vkBuildAccelerationStructuresKHR> requires <VK_KHR_acceleration_structure>" );
# ifdef VULKAN_HPP_NO_EXCEPTIONS
VULKAN_HPP_ASSERT( infos.size() == pBuildRangeInfos.size() );
# else
if ( infos.size() != pBuildRangeInfos.size() )
{
throw LogicError( VULKAN_HPP_NAMESPACE_STRING "::Device::buildAccelerationStructuresKHR: infos.size() != pBuildRangeInfos.size()" );
}
# endif /*VULKAN_HPP_NO_EXCEPTIONS*/
2021-04-15 08:49:54 +00:00
VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( getDispatcher()->vkBuildAccelerationStructuresKHR(
static_cast<VkDevice>( m_device ),
static_cast<VkDeferredOperationKHR>( deferredOperation ),
infos.size(),
reinterpret_cast<const VkAccelerationStructureBuildGeometryInfoKHR *>( infos.data() ),
reinterpret_cast<const VkAccelerationStructureBuildRangeInfoKHR * const *>( pBuildRangeInfos.data() ) ) );
VULKAN_HPP_NAMESPACE::detail::resultCheck( result,
VULKAN_HPP_NAMESPACE_STRING "::Device::buildAccelerationStructuresKHR",
{ VULKAN_HPP_NAMESPACE::Result::eSuccess,
VULKAN_HPP_NAMESPACE::Result::eOperationDeferredKHR,
VULKAN_HPP_NAMESPACE::Result::eOperationNotDeferredKHR } );
return static_cast<VULKAN_HPP_NAMESPACE::Result>( result );
2021-04-15 08:49:54 +00:00
}
VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::Result
Device::copyAccelerationStructureKHR( VULKAN_HPP_NAMESPACE::DeferredOperationKHR deferredOperation,
const VULKAN_HPP_NAMESPACE::CopyAccelerationStructureInfoKHR & info ) const
{
VULKAN_HPP_ASSERT( getDispatcher()->vkCopyAccelerationStructureKHR &&
"Function <vkCopyAccelerationStructureKHR> requires <VK_KHR_acceleration_structure>" );
VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>(
getDispatcher()->vkCopyAccelerationStructureKHR( static_cast<VkDevice>( m_device ),
static_cast<VkDeferredOperationKHR>( deferredOperation ),
reinterpret_cast<const VkCopyAccelerationStructureInfoKHR *>( &info ) ) );
VULKAN_HPP_NAMESPACE::detail::resultCheck( result,
VULKAN_HPP_NAMESPACE_STRING "::Device::copyAccelerationStructureKHR",
{ VULKAN_HPP_NAMESPACE::Result::eSuccess,
VULKAN_HPP_NAMESPACE::Result::eOperationDeferredKHR,
VULKAN_HPP_NAMESPACE::Result::eOperationNotDeferredKHR } );
return static_cast<VULKAN_HPP_NAMESPACE::Result>( result );
}
VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::Result
Device::copyAccelerationStructureToMemoryKHR( VULKAN_HPP_NAMESPACE::DeferredOperationKHR deferredOperation,
const VULKAN_HPP_NAMESPACE::CopyAccelerationStructureToMemoryInfoKHR & info ) const
{
VULKAN_HPP_ASSERT( getDispatcher()->vkCopyAccelerationStructureToMemoryKHR &&
"Function <vkCopyAccelerationStructureToMemoryKHR> requires <VK_KHR_acceleration_structure>" );
VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>(
getDispatcher()->vkCopyAccelerationStructureToMemoryKHR( static_cast<VkDevice>( m_device ),
static_cast<VkDeferredOperationKHR>( deferredOperation ),
reinterpret_cast<const VkCopyAccelerationStructureToMemoryInfoKHR *>( &info ) ) );
VULKAN_HPP_NAMESPACE::detail::resultCheck( result,
VULKAN_HPP_NAMESPACE_STRING "::Device::copyAccelerationStructureToMemoryKHR",
{ VULKAN_HPP_NAMESPACE::Result::eSuccess,
VULKAN_HPP_NAMESPACE::Result::eOperationDeferredKHR,
VULKAN_HPP_NAMESPACE::Result::eOperationNotDeferredKHR } );
return static_cast<VULKAN_HPP_NAMESPACE::Result>( result );
}
VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::Result
Device::copyMemoryToAccelerationStructureKHR( VULKAN_HPP_NAMESPACE::DeferredOperationKHR deferredOperation,
const VULKAN_HPP_NAMESPACE::CopyMemoryToAccelerationStructureInfoKHR & info ) const
{
VULKAN_HPP_ASSERT( getDispatcher()->vkCopyMemoryToAccelerationStructureKHR &&
"Function <vkCopyMemoryToAccelerationStructureKHR> requires <VK_KHR_acceleration_structure>" );
VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>(
getDispatcher()->vkCopyMemoryToAccelerationStructureKHR( static_cast<VkDevice>( m_device ),
static_cast<VkDeferredOperationKHR>( deferredOperation ),
reinterpret_cast<const VkCopyMemoryToAccelerationStructureInfoKHR *>( &info ) ) );
VULKAN_HPP_NAMESPACE::detail::resultCheck( result,
VULKAN_HPP_NAMESPACE_STRING "::Device::copyMemoryToAccelerationStructureKHR",
{ VULKAN_HPP_NAMESPACE::Result::eSuccess,
VULKAN_HPP_NAMESPACE::Result::eOperationDeferredKHR,
VULKAN_HPP_NAMESPACE::Result::eOperationNotDeferredKHR } );
return static_cast<VULKAN_HPP_NAMESPACE::Result>( result );
}
template <typename DataType>
VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector<DataType> Device::writeAccelerationStructuresPropertiesKHR(
VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::AccelerationStructureKHR> const & accelerationStructures,
VULKAN_HPP_NAMESPACE::QueryType queryType,
size_t dataSize,
size_t stride ) const
{
VULKAN_HPP_ASSERT( getDispatcher()->vkWriteAccelerationStructuresPropertiesKHR &&
"Function <vkWriteAccelerationStructuresPropertiesKHR> requires <VK_KHR_acceleration_structure>" );
VULKAN_HPP_ASSERT( dataSize % sizeof( DataType ) == 0 );
std::vector<DataType> data( dataSize / sizeof( DataType ) );
VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>(
getDispatcher()->vkWriteAccelerationStructuresPropertiesKHR( static_cast<VkDevice>( m_device ),
accelerationStructures.size(),
reinterpret_cast<const VkAccelerationStructureKHR *>( accelerationStructures.data() ),
static_cast<VkQueryType>( queryType ),
data.size() * sizeof( DataType ),
reinterpret_cast<void *>( data.data() ),
stride ) );
VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::writeAccelerationStructuresPropertiesKHR" );
return data;
}
template <typename DataType>
VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE DataType Device::writeAccelerationStructuresPropertyKHR(
VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::AccelerationStructureKHR> const & accelerationStructures,
VULKAN_HPP_NAMESPACE::QueryType queryType,
size_t stride ) const
{
VULKAN_HPP_ASSERT( getDispatcher()->vkWriteAccelerationStructuresPropertiesKHR &&
"Function <vkWriteAccelerationStructuresPropertiesKHR> requires <VK_KHR_acceleration_structure>" );
DataType data;
VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>(
getDispatcher()->vkWriteAccelerationStructuresPropertiesKHR( static_cast<VkDevice>( m_device ),
accelerationStructures.size(),
reinterpret_cast<const VkAccelerationStructureKHR *>( accelerationStructures.data() ),
static_cast<VkQueryType>( queryType ),
sizeof( DataType ),
reinterpret_cast<void *>( &data ),
stride ) );
VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::writeAccelerationStructuresPropertyKHR" );
return data;
}
VULKAN_HPP_INLINE void
CommandBuffer::copyAccelerationStructureKHR( const VULKAN_HPP_NAMESPACE::CopyAccelerationStructureInfoKHR & info ) const VULKAN_HPP_NOEXCEPT
2021-04-28 11:35:14 +00:00
{
VULKAN_HPP_ASSERT( getDispatcher()->vkCmdCopyAccelerationStructureKHR &&
"Function <vkCmdCopyAccelerationStructureKHR> requires <VK_KHR_acceleration_structure>" );
2021-04-28 11:35:14 +00:00
getDispatcher()->vkCmdCopyAccelerationStructureKHR( static_cast<VkCommandBuffer>( m_commandBuffer ),
reinterpret_cast<const VkCopyAccelerationStructureInfoKHR *>( &info ) );
2021-04-28 11:35:14 +00:00
}
VULKAN_HPP_INLINE void CommandBuffer::copyAccelerationStructureToMemoryKHR(
const VULKAN_HPP_NAMESPACE::CopyAccelerationStructureToMemoryInfoKHR & info ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( getDispatcher()->vkCmdCopyAccelerationStructureToMemoryKHR &&
"Function <vkCmdCopyAccelerationStructureToMemoryKHR> requires <VK_KHR_acceleration_structure>" );
getDispatcher()->vkCmdCopyAccelerationStructureToMemoryKHR( static_cast<VkCommandBuffer>( m_commandBuffer ),
reinterpret_cast<const VkCopyAccelerationStructureToMemoryInfoKHR *>( &info ) );
}
VULKAN_HPP_INLINE void CommandBuffer::copyMemoryToAccelerationStructureKHR(
const VULKAN_HPP_NAMESPACE::CopyMemoryToAccelerationStructureInfoKHR & info ) const VULKAN_HPP_NOEXCEPT
2021-04-15 08:49:54 +00:00
{
2023-03-01 09:17:02 +00:00
VULKAN_HPP_ASSERT( getDispatcher()->vkCmdCopyMemoryToAccelerationStructureKHR &&
"Function <vkCmdCopyMemoryToAccelerationStructureKHR> requires <VK_KHR_acceleration_structure>" );
getDispatcher()->vkCmdCopyMemoryToAccelerationStructureKHR( static_cast<VkCommandBuffer>( m_commandBuffer ),
reinterpret_cast<const VkCopyMemoryToAccelerationStructureInfoKHR *>( &info ) );
}
VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::DeviceAddress
Device::getAccelerationStructureAddressKHR( const VULKAN_HPP_NAMESPACE::AccelerationStructureDeviceAddressInfoKHR & info ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( getDispatcher()->vkGetAccelerationStructureDeviceAddressKHR &&
"Function <vkGetAccelerationStructureDeviceAddressKHR> requires <VK_KHR_acceleration_structure>" );
VkDeviceAddress result = getDispatcher()->vkGetAccelerationStructureDeviceAddressKHR(
static_cast<VkDevice>( m_device ), reinterpret_cast<const VkAccelerationStructureDeviceAddressInfoKHR *>( &info ) );
return static_cast<VULKAN_HPP_NAMESPACE::DeviceAddress>( result );
}
VULKAN_HPP_INLINE void CommandBuffer::writeAccelerationStructuresPropertiesKHR(
VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::AccelerationStructureKHR> const & accelerationStructures,
VULKAN_HPP_NAMESPACE::QueryType queryType,
VULKAN_HPP_NAMESPACE::QueryPool queryPool,
uint32_t firstQuery ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( getDispatcher()->vkCmdWriteAccelerationStructuresPropertiesKHR &&
"Function <vkCmdWriteAccelerationStructuresPropertiesKHR> requires <VK_KHR_acceleration_structure>" );
getDispatcher()->vkCmdWriteAccelerationStructuresPropertiesKHR( static_cast<VkCommandBuffer>( m_commandBuffer ),
accelerationStructures.size(),
reinterpret_cast<const VkAccelerationStructureKHR *>( accelerationStructures.data() ),
static_cast<VkQueryType>( queryType ),
static_cast<VkQueryPool>( queryPool ),
firstQuery );
}
VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::AccelerationStructureCompatibilityKHR Device::getAccelerationStructureCompatibilityKHR(
const VULKAN_HPP_NAMESPACE::AccelerationStructureVersionInfoKHR & versionInfo ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( getDispatcher()->vkGetDeviceAccelerationStructureCompatibilityKHR &&
"Function <vkGetDeviceAccelerationStructureCompatibilityKHR> requires <VK_KHR_acceleration_structure>" );
VULKAN_HPP_NAMESPACE::AccelerationStructureCompatibilityKHR compatibility;
getDispatcher()->vkGetDeviceAccelerationStructureCompatibilityKHR( static_cast<VkDevice>( m_device ),
reinterpret_cast<const VkAccelerationStructureVersionInfoKHR *>( &versionInfo ),
reinterpret_cast<VkAccelerationStructureCompatibilityKHR *>( &compatibility ) );
return compatibility;
}
VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::AccelerationStructureBuildSizesInfoKHR
Device::getAccelerationStructureBuildSizesKHR( VULKAN_HPP_NAMESPACE::AccelerationStructureBuildTypeKHR buildType,
const VULKAN_HPP_NAMESPACE::AccelerationStructureBuildGeometryInfoKHR & buildInfo,
VULKAN_HPP_NAMESPACE::ArrayProxy<const uint32_t> const & maxPrimitiveCounts ) const
{
VULKAN_HPP_ASSERT( getDispatcher()->vkGetAccelerationStructureBuildSizesKHR &&
"Function <vkGetAccelerationStructureBuildSizesKHR> requires <VK_KHR_acceleration_structure>" );
# ifdef VULKAN_HPP_NO_EXCEPTIONS
VULKAN_HPP_ASSERT( maxPrimitiveCounts.size() == buildInfo.geometryCount );
# else
2023-03-01 09:17:02 +00:00
if ( maxPrimitiveCounts.size() != buildInfo.geometryCount )
{
throw LogicError( VULKAN_HPP_NAMESPACE_STRING "::Device::getAccelerationStructureBuildSizesKHR: maxPrimitiveCounts.size() != buildInfo.geometryCount" );
}
# endif /*VULKAN_HPP_NO_EXCEPTIONS*/
2023-03-01 09:17:02 +00:00
VULKAN_HPP_NAMESPACE::AccelerationStructureBuildSizesInfoKHR sizeInfo;
getDispatcher()->vkGetAccelerationStructureBuildSizesKHR( static_cast<VkDevice>( m_device ),
static_cast<VkAccelerationStructureBuildTypeKHR>( buildType ),
reinterpret_cast<const VkAccelerationStructureBuildGeometryInfoKHR *>( &buildInfo ),
maxPrimitiveCounts.data(),
reinterpret_cast<VkAccelerationStructureBuildSizesInfoKHR *>( &sizeInfo ) );
return sizeInfo;
}
//=== VK_KHR_ray_tracing_pipeline ===
VULKAN_HPP_INLINE void CommandBuffer::traceRaysKHR( const VULKAN_HPP_NAMESPACE::StridedDeviceAddressRegionKHR & raygenShaderBindingTable,
const VULKAN_HPP_NAMESPACE::StridedDeviceAddressRegionKHR & missShaderBindingTable,
const VULKAN_HPP_NAMESPACE::StridedDeviceAddressRegionKHR & hitShaderBindingTable,
const VULKAN_HPP_NAMESPACE::StridedDeviceAddressRegionKHR & callableShaderBindingTable,
uint32_t width,
uint32_t height,
uint32_t depth ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( getDispatcher()->vkCmdTraceRaysKHR && "Function <vkCmdTraceRaysKHR> requires <VK_KHR_ray_tracing_pipeline>" );
getDispatcher()->vkCmdTraceRaysKHR( static_cast<VkCommandBuffer>( m_commandBuffer ),
reinterpret_cast<const VkStridedDeviceAddressRegionKHR *>( &raygenShaderBindingTable ),
reinterpret_cast<const VkStridedDeviceAddressRegionKHR *>( &missShaderBindingTable ),
reinterpret_cast<const VkStridedDeviceAddressRegionKHR *>( &hitShaderBindingTable ),
reinterpret_cast<const VkStridedDeviceAddressRegionKHR *>( &callableShaderBindingTable ),
width,
height,
depth );
}
VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE
VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::CreateReturnType<std::vector<VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Pipeline>>::Type
Device::createRayTracingPipelinesKHR(
VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::DeferredOperationKHR> const & deferredOperation,
VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::PipelineCache> const & pipelineCache,
VULKAN_HPP_NAMESPACE::ArrayProxy<VULKAN_HPP_NAMESPACE::RayTracingPipelineCreateInfoKHR> const & createInfos,
VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator ) const
2023-03-01 09:17:02 +00:00
{
std::vector<VULKAN_HPP_NAMESPACE::Pipeline> pipelines( createInfos.size() );
VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( getDispatcher()->vkCreateRayTracingPipelinesKHR(
static_cast<VkDevice>( m_device ),
deferredOperation ? static_cast<VkDeferredOperationKHR>( **deferredOperation ) : 0,
pipelineCache ? static_cast<VkPipelineCache>( **pipelineCache ) : 0,
createInfos.size(),
reinterpret_cast<const VkRayTracingPipelineCreateInfoKHR *>( createInfos.data() ),
reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
reinterpret_cast<VkPipeline *>( pipelines.data() ) ) );
if ( ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess ) && ( result != VULKAN_HPP_NAMESPACE::Result::eOperationDeferredKHR ) &&
( result != VULKAN_HPP_NAMESPACE::Result::eOperationNotDeferredKHR ) && ( result != VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT ) )
{
# if defined( VULKAN_HPP_RAII_NO_EXCEPTIONS )
return VULKAN_HPP_UNEXPECTED( result );
# else
VULKAN_HPP_NAMESPACE::detail::throwResultException( result, "Device::createRayTracingPipelinesKHR" );
# endif
}
std::vector<VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Pipeline> pipelinesRAII;
pipelinesRAII.reserve( pipelines.size() );
for ( auto & pipeline : pipelines )
{
pipelinesRAII.emplace_back( *this, *reinterpret_cast<VkPipeline *>( &pipeline ), allocator, result );
}
return pipelinesRAII;
2023-03-01 09:17:02 +00:00
}
VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE
VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::CreateReturnType<VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Pipeline>::Type
Device::createRayTracingPipelineKHR(
VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::DeferredOperationKHR> const & deferredOperation,
VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::PipelineCache> const & pipelineCache,
VULKAN_HPP_NAMESPACE::RayTracingPipelineCreateInfoKHR const & createInfo,
VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator ) const VULKAN_HPP_RAII_CREATE_NOEXCEPT
2023-03-01 09:17:02 +00:00
{
VULKAN_HPP_NAMESPACE::Pipeline pipeline;
VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( getDispatcher()->vkCreateRayTracingPipelinesKHR(
static_cast<VkDevice>( m_device ),
deferredOperation ? static_cast<VkDeferredOperationKHR>( **deferredOperation ) : 0,
pipelineCache ? static_cast<VkPipelineCache>( **pipelineCache ) : 0,
1,
reinterpret_cast<const VkRayTracingPipelineCreateInfoKHR *>( &createInfo ),
reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
reinterpret_cast<VkPipeline *>( &pipeline ) ) );
if ( ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess ) && ( result != VULKAN_HPP_NAMESPACE::Result::eOperationDeferredKHR ) &&
( result != VULKAN_HPP_NAMESPACE::Result::eOperationNotDeferredKHR ) && ( result != VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT ) )
{
# if defined( VULKAN_HPP_RAII_NO_EXCEPTIONS )
return VULKAN_HPP_UNEXPECTED( result );
# else
VULKAN_HPP_NAMESPACE::detail::throwResultException( result, "Device::createRayTracingPipelineKHR" );
# endif
}
return VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Pipeline( *this, *reinterpret_cast<VkPipeline *>( &pipeline ), allocator, result );
2023-03-01 09:17:02 +00:00
}
template <typename DataType>
VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector<DataType>
Pipeline::getRayTracingShaderGroupHandlesKHR( uint32_t firstGroup, uint32_t groupCount, size_t dataSize ) const
{
VULKAN_HPP_ASSERT( getDispatcher()->vkGetRayTracingShaderGroupHandlesKHR &&
"Function <vkGetRayTracingShaderGroupHandlesKHR> requires <VK_KHR_ray_tracing_pipeline> or <VK_NV_ray_tracing>" );
VULKAN_HPP_ASSERT( dataSize % sizeof( DataType ) == 0 );
std::vector<DataType> data( dataSize / sizeof( DataType ) );
VULKAN_HPP_NAMESPACE::Result result =
static_cast<VULKAN_HPP_NAMESPACE::Result>( getDispatcher()->vkGetRayTracingShaderGroupHandlesKHR( static_cast<VkDevice>( m_device ),
static_cast<VkPipeline>( m_pipeline ),
firstGroup,
groupCount,
data.size() * sizeof( DataType ),
reinterpret_cast<void *>( data.data() ) ) );
VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Pipeline::getRayTracingShaderGroupHandlesKHR" );
2023-03-01 09:17:02 +00:00
return data;
}
template <typename DataType>
VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE DataType Pipeline::getRayTracingShaderGroupHandleKHR( uint32_t firstGroup, uint32_t groupCount ) const
{
VULKAN_HPP_ASSERT( getDispatcher()->vkGetRayTracingShaderGroupHandlesKHR &&
"Function <vkGetRayTracingShaderGroupHandlesKHR> requires <VK_KHR_ray_tracing_pipeline> or <VK_NV_ray_tracing>" );
DataType data;
VULKAN_HPP_NAMESPACE::Result result =
static_cast<VULKAN_HPP_NAMESPACE::Result>( getDispatcher()->vkGetRayTracingShaderGroupHandlesKHR( static_cast<VkDevice>( m_device ),
static_cast<VkPipeline>( m_pipeline ),
firstGroup,
groupCount,
sizeof( DataType ),
reinterpret_cast<void *>( &data ) ) );
VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Pipeline::getRayTracingShaderGroupHandleKHR" );
2023-03-01 09:17:02 +00:00
return data;
}
template <typename DataType>
VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector<DataType>
Pipeline::getRayTracingCaptureReplayShaderGroupHandlesKHR( uint32_t firstGroup, uint32_t groupCount, size_t dataSize ) const
{
VULKAN_HPP_ASSERT( getDispatcher()->vkGetRayTracingCaptureReplayShaderGroupHandlesKHR &&
"Function <vkGetRayTracingCaptureReplayShaderGroupHandlesKHR> requires <VK_KHR_ray_tracing_pipeline>" );
2021-04-15 08:49:54 +00:00
2023-03-01 09:17:02 +00:00
VULKAN_HPP_ASSERT( dataSize % sizeof( DataType ) == 0 );
std::vector<DataType> data( dataSize / sizeof( DataType ) );
VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>(
getDispatcher()->vkGetRayTracingCaptureReplayShaderGroupHandlesKHR( static_cast<VkDevice>( m_device ),
static_cast<VkPipeline>( m_pipeline ),
firstGroup,
groupCount,
data.size() * sizeof( DataType ),
reinterpret_cast<void *>( data.data() ) ) );
VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Pipeline::getRayTracingCaptureReplayShaderGroupHandlesKHR" );
2023-03-01 09:17:02 +00:00
return data;
2021-04-15 08:49:54 +00:00
}
2023-03-01 09:17:02 +00:00
template <typename DataType>
VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE DataType Pipeline::getRayTracingCaptureReplayShaderGroupHandleKHR( uint32_t firstGroup, uint32_t groupCount ) const
{
2023-03-01 09:17:02 +00:00
VULKAN_HPP_ASSERT( getDispatcher()->vkGetRayTracingCaptureReplayShaderGroupHandlesKHR &&
"Function <vkGetRayTracingCaptureReplayShaderGroupHandlesKHR> requires <VK_KHR_ray_tracing_pipeline>" );
DataType data;
VULKAN_HPP_NAMESPACE::Result result =
static_cast<VULKAN_HPP_NAMESPACE::Result>( getDispatcher()->vkGetRayTracingCaptureReplayShaderGroupHandlesKHR( static_cast<VkDevice>( m_device ),
static_cast<VkPipeline>( m_pipeline ),
firstGroup,
groupCount,
sizeof( DataType ),
reinterpret_cast<void *>( &data ) ) );
VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Pipeline::getRayTracingCaptureReplayShaderGroupHandleKHR" );
2023-03-01 09:17:02 +00:00
return data;
}
2023-03-01 09:17:02 +00:00
VULKAN_HPP_INLINE void CommandBuffer::traceRaysIndirectKHR( const VULKAN_HPP_NAMESPACE::StridedDeviceAddressRegionKHR & raygenShaderBindingTable,
const VULKAN_HPP_NAMESPACE::StridedDeviceAddressRegionKHR & missShaderBindingTable,
const VULKAN_HPP_NAMESPACE::StridedDeviceAddressRegionKHR & hitShaderBindingTable,
const VULKAN_HPP_NAMESPACE::StridedDeviceAddressRegionKHR & callableShaderBindingTable,
VULKAN_HPP_NAMESPACE::DeviceAddress indirectDeviceAddress ) const VULKAN_HPP_NOEXCEPT
{
2023-03-01 09:17:02 +00:00
VULKAN_HPP_ASSERT( getDispatcher()->vkCmdTraceRaysIndirectKHR && "Function <vkCmdTraceRaysIndirectKHR> requires <VK_KHR_ray_tracing_pipeline>" );
2023-03-01 09:17:02 +00:00
getDispatcher()->vkCmdTraceRaysIndirectKHR( static_cast<VkCommandBuffer>( m_commandBuffer ),
reinterpret_cast<const VkStridedDeviceAddressRegionKHR *>( &raygenShaderBindingTable ),
reinterpret_cast<const VkStridedDeviceAddressRegionKHR *>( &missShaderBindingTable ),
reinterpret_cast<const VkStridedDeviceAddressRegionKHR *>( &hitShaderBindingTable ),
reinterpret_cast<const VkStridedDeviceAddressRegionKHR *>( &callableShaderBindingTable ),
static_cast<VkDeviceAddress>( indirectDeviceAddress ) );
}
2023-03-01 09:17:02 +00:00
VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::DeviceSize
Pipeline::getRayTracingShaderGroupStackSizeKHR( uint32_t group, VULKAN_HPP_NAMESPACE::ShaderGroupShaderKHR groupShader ) const VULKAN_HPP_NOEXCEPT
{
2023-03-01 09:17:02 +00:00
VULKAN_HPP_ASSERT( getDispatcher()->vkGetRayTracingShaderGroupStackSizeKHR &&
"Function <vkGetRayTracingShaderGroupStackSizeKHR> requires <VK_KHR_ray_tracing_pipeline>" );
2023-03-01 09:17:02 +00:00
VkDeviceSize result = getDispatcher()->vkGetRayTracingShaderGroupStackSizeKHR(
static_cast<VkDevice>( m_device ), static_cast<VkPipeline>( m_pipeline ), group, static_cast<VkShaderGroupShaderKHR>( groupShader ) );
2023-03-01 09:17:02 +00:00
return static_cast<VULKAN_HPP_NAMESPACE::DeviceSize>( result );
}
2023-03-01 09:17:02 +00:00
VULKAN_HPP_INLINE void CommandBuffer::setRayTracingPipelineStackSizeKHR( uint32_t pipelineStackSize ) const VULKAN_HPP_NOEXCEPT
{
2023-03-01 09:17:02 +00:00
VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSetRayTracingPipelineStackSizeKHR &&
"Function <vkCmdSetRayTracingPipelineStackSizeKHR> requires <VK_KHR_ray_tracing_pipeline>" );
2023-03-01 09:17:02 +00:00
getDispatcher()->vkCmdSetRayTracingPipelineStackSizeKHR( static_cast<VkCommandBuffer>( m_commandBuffer ), pipelineStackSize );
}
//=== VK_KHR_sampler_ycbcr_conversion ===
VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE
VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::CreateReturnType<VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::SamplerYcbcrConversion>::Type
Device::createSamplerYcbcrConversionKHR( VULKAN_HPP_NAMESPACE::SamplerYcbcrConversionCreateInfo const & createInfo,
VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator ) const
VULKAN_HPP_RAII_CREATE_NOEXCEPT
{
VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion ycbcrConversion;
VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( getDispatcher()->vkCreateSamplerYcbcrConversionKHR(
static_cast<VkDevice>( m_device ),
reinterpret_cast<const VkSamplerYcbcrConversionCreateInfo *>( &createInfo ),
reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
reinterpret_cast<VkSamplerYcbcrConversion *>( &ycbcrConversion ) ) );
if ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess )
{
# if defined( VULKAN_HPP_RAII_NO_EXCEPTIONS )
return VULKAN_HPP_UNEXPECTED( result );
# else
VULKAN_HPP_NAMESPACE::detail::throwResultException( result, "Device::createSamplerYcbcrConversionKHR" );
# endif
}
return VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::SamplerYcbcrConversion(
*this, *reinterpret_cast<VkSamplerYcbcrConversion *>( &ycbcrConversion ), allocator );
}
VULKAN_HPP_INLINE void
Device::destroySamplerYcbcrConversionKHR( VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion ycbcrConversion,
Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( getDispatcher()->vkDestroySamplerYcbcrConversionKHR &&
2023-03-01 09:17:02 +00:00
"Function <vkDestroySamplerYcbcrConversionKHR> requires <VK_KHR_sampler_ycbcr_conversion> or <VK_VERSION_1_1>" );
getDispatcher()->vkDestroySamplerYcbcrConversionKHR(
static_cast<VkDevice>( m_device ),
static_cast<VkSamplerYcbcrConversion>( ycbcrConversion ),
reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) );
}
//=== VK_KHR_bind_memory2 ===
VULKAN_HPP_INLINE void
Device::bindBufferMemory2KHR( VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::BindBufferMemoryInfo> const & bindInfos ) const
{
2023-03-01 09:17:02 +00:00
VULKAN_HPP_ASSERT( getDispatcher()->vkBindBufferMemory2KHR && "Function <vkBindBufferMemory2KHR> requires <VK_KHR_bind_memory2> or <VK_VERSION_1_1>" );
VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( getDispatcher()->vkBindBufferMemory2KHR(
static_cast<VkDevice>( m_device ), bindInfos.size(), reinterpret_cast<const VkBindBufferMemoryInfo *>( bindInfos.data() ) ) );
VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::bindBufferMemory2KHR" );
}
VULKAN_HPP_INLINE void
Device::bindImageMemory2KHR( VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::BindImageMemoryInfo> const & bindInfos ) const
2021-04-28 11:35:14 +00:00
{
2023-03-01 09:17:02 +00:00
VULKAN_HPP_ASSERT( getDispatcher()->vkBindImageMemory2KHR && "Function <vkBindImageMemory2KHR> requires <VK_KHR_bind_memory2> or <VK_VERSION_1_1>" );
2021-04-28 11:35:14 +00:00
VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( getDispatcher()->vkBindImageMemory2KHR(
static_cast<VkDevice>( m_device ), bindInfos.size(), reinterpret_cast<const VkBindImageMemoryInfo *>( bindInfos.data() ) ) );
VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::bindImageMemory2KHR" );
}
//=== VK_EXT_image_drm_format_modifier ===
VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::ImageDrmFormatModifierPropertiesEXT Image::getDrmFormatModifierPropertiesEXT() const
{
VULKAN_HPP_ASSERT( getDispatcher()->vkGetImageDrmFormatModifierPropertiesEXT &&
"Function <vkGetImageDrmFormatModifierPropertiesEXT> requires <VK_EXT_image_drm_format_modifier>" );
VULKAN_HPP_NAMESPACE::ImageDrmFormatModifierPropertiesEXT properties;
VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( getDispatcher()->vkGetImageDrmFormatModifierPropertiesEXT(
static_cast<VkDevice>( m_device ), static_cast<VkImage>( m_image ), reinterpret_cast<VkImageDrmFormatModifierPropertiesEXT *>( &properties ) ) );
VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Image::getDrmFormatModifierPropertiesEXT" );
return properties;
}
//=== VK_EXT_validation_cache ===
VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE
VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::CreateReturnType<VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::ValidationCacheEXT>::Type
Device::createValidationCacheEXT( VULKAN_HPP_NAMESPACE::ValidationCacheCreateInfoEXT const & createInfo,
VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator ) const
VULKAN_HPP_RAII_CREATE_NOEXCEPT
{
VULKAN_HPP_NAMESPACE::ValidationCacheEXT validationCache;
VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( getDispatcher()->vkCreateValidationCacheEXT(
static_cast<VkDevice>( m_device ),
reinterpret_cast<const VkValidationCacheCreateInfoEXT *>( &createInfo ),
reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
reinterpret_cast<VkValidationCacheEXT *>( &validationCache ) ) );
if ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess )
{
# if defined( VULKAN_HPP_RAII_NO_EXCEPTIONS )
return VULKAN_HPP_UNEXPECTED( result );
# else
VULKAN_HPP_NAMESPACE::detail::throwResultException( result, "Device::createValidationCacheEXT" );
# endif
}
return VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::ValidationCacheEXT(
*this, *reinterpret_cast<VkValidationCacheEXT *>( &validationCache ), allocator );
}
VULKAN_HPP_INLINE void ValidationCacheEXT::merge( VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::ValidationCacheEXT> const & srcCaches ) const
{
VULKAN_HPP_ASSERT( getDispatcher()->vkMergeValidationCachesEXT && "Function <vkMergeValidationCachesEXT> requires <VK_EXT_validation_cache>" );
VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>(
getDispatcher()->vkMergeValidationCachesEXT( static_cast<VkDevice>( m_device ),
static_cast<VkValidationCacheEXT>( m_validationCache ),
srcCaches.size(),
reinterpret_cast<const VkValidationCacheEXT *>( srcCaches.data() ) ) );
VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::ValidationCacheEXT::merge" );
}
VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector<uint8_t> ValidationCacheEXT::getData() const
{
VULKAN_HPP_ASSERT( getDispatcher()->vkGetValidationCacheDataEXT && "Function <vkGetValidationCacheDataEXT> requires <VK_EXT_validation_cache>" );
std::vector<uint8_t> data;
size_t dataSize;
VULKAN_HPP_NAMESPACE::Result result;
do
{
result = static_cast<VULKAN_HPP_NAMESPACE::Result>( getDispatcher()->vkGetValidationCacheDataEXT(
static_cast<VkDevice>( m_device ), static_cast<VkValidationCacheEXT>( m_validationCache ), &dataSize, nullptr ) );
if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) && dataSize )
{
data.resize( dataSize );
result = static_cast<VULKAN_HPP_NAMESPACE::Result>( getDispatcher()->vkGetValidationCacheDataEXT(
static_cast<VkDevice>( m_device ), static_cast<VkValidationCacheEXT>( m_validationCache ), &dataSize, reinterpret_cast<void *>( data.data() ) ) );
}
} while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete );
VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::ValidationCacheEXT::getData" );
VULKAN_HPP_ASSERT( dataSize <= data.size() );
if ( dataSize < data.size() )
{
data.resize( dataSize );
}
return data;
}
//=== VK_NV_shading_rate_image ===
VULKAN_HPP_INLINE void CommandBuffer::bindShadingRateImageNV( VULKAN_HPP_NAMESPACE::ImageView imageView,
VULKAN_HPP_NAMESPACE::ImageLayout imageLayout ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( getDispatcher()->vkCmdBindShadingRateImageNV && "Function <vkCmdBindShadingRateImageNV> requires <VK_NV_shading_rate_image>" );
getDispatcher()->vkCmdBindShadingRateImageNV(
static_cast<VkCommandBuffer>( m_commandBuffer ), static_cast<VkImageView>( imageView ), static_cast<VkImageLayout>( imageLayout ) );
}
VULKAN_HPP_INLINE void CommandBuffer::setViewportShadingRatePaletteNV(
uint32_t firstViewport,
VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::ShadingRatePaletteNV> const & shadingRatePalettes ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSetViewportShadingRatePaletteNV &&
"Function <vkCmdSetViewportShadingRatePaletteNV> requires <VK_NV_shading_rate_image>" );
getDispatcher()->vkCmdSetViewportShadingRatePaletteNV( static_cast<VkCommandBuffer>( m_commandBuffer ),
firstViewport,
shadingRatePalettes.size(),
reinterpret_cast<const VkShadingRatePaletteNV *>( shadingRatePalettes.data() ) );
}
VULKAN_HPP_INLINE void CommandBuffer::setCoarseSampleOrderNV(
VULKAN_HPP_NAMESPACE::CoarseSampleOrderTypeNV sampleOrderType,
VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::CoarseSampleOrderCustomNV> const & customSampleOrders ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSetCoarseSampleOrderNV && "Function <vkCmdSetCoarseSampleOrderNV> requires <VK_NV_shading_rate_image>" );
getDispatcher()->vkCmdSetCoarseSampleOrderNV( static_cast<VkCommandBuffer>( m_commandBuffer ),
static_cast<VkCoarseSampleOrderTypeNV>( sampleOrderType ),
customSampleOrders.size(),
reinterpret_cast<const VkCoarseSampleOrderCustomNV *>( customSampleOrders.data() ) );
}
//=== VK_NV_ray_tracing ===
VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE
VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::CreateReturnType<VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::AccelerationStructureNV>::Type
Device::createAccelerationStructureNV( VULKAN_HPP_NAMESPACE::AccelerationStructureCreateInfoNV const & createInfo,
VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator ) const
VULKAN_HPP_RAII_CREATE_NOEXCEPT
{
VULKAN_HPP_NAMESPACE::AccelerationStructureNV accelerationStructure;
VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( getDispatcher()->vkCreateAccelerationStructureNV(
static_cast<VkDevice>( m_device ),
reinterpret_cast<const VkAccelerationStructureCreateInfoNV *>( &createInfo ),
reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
reinterpret_cast<VkAccelerationStructureNV *>( &accelerationStructure ) ) );
if ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess )
{
# if defined( VULKAN_HPP_RAII_NO_EXCEPTIONS )
return VULKAN_HPP_UNEXPECTED( result );
# else
VULKAN_HPP_NAMESPACE::detail::throwResultException( result, "Device::createAccelerationStructureNV" );
# endif
}
return VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::AccelerationStructureNV(
*this, *reinterpret_cast<VkAccelerationStructureNV *>( &accelerationStructure ), allocator );
}
VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::MemoryRequirements2KHR Device::getAccelerationStructureMemoryRequirementsNV(
const VULKAN_HPP_NAMESPACE::AccelerationStructureMemoryRequirementsInfoNV & info ) const VULKAN_HPP_NOEXCEPT
2021-04-15 08:49:54 +00:00
{
VULKAN_HPP_ASSERT( getDispatcher()->vkGetAccelerationStructureMemoryRequirementsNV &&
"Function <vkGetAccelerationStructureMemoryRequirementsNV> requires <VK_NV_ray_tracing>" );
2021-04-15 08:49:54 +00:00
VULKAN_HPP_NAMESPACE::MemoryRequirements2KHR memoryRequirements;
getDispatcher()->vkGetAccelerationStructureMemoryRequirementsNV( static_cast<VkDevice>( m_device ),
reinterpret_cast<const VkAccelerationStructureMemoryRequirementsInfoNV *>( &info ),
reinterpret_cast<VkMemoryRequirements2KHR *>( &memoryRequirements ) );
return memoryRequirements;
2021-04-15 08:49:54 +00:00
}
template <typename X, typename Y, typename... Z>
VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::StructureChain<X, Y, Z...> Device::getAccelerationStructureMemoryRequirementsNV(
const VULKAN_HPP_NAMESPACE::AccelerationStructureMemoryRequirementsInfoNV & info ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( getDispatcher()->vkGetAccelerationStructureMemoryRequirementsNV &&
"Function <vkGetAccelerationStructureMemoryRequirementsNV> requires <VK_NV_ray_tracing>" );
VULKAN_HPP_NAMESPACE::StructureChain<X, Y, Z...> structureChain;
VULKAN_HPP_NAMESPACE::MemoryRequirements2KHR & memoryRequirements = structureChain.template get<VULKAN_HPP_NAMESPACE::MemoryRequirements2KHR>();
getDispatcher()->vkGetAccelerationStructureMemoryRequirementsNV( static_cast<VkDevice>( m_device ),
reinterpret_cast<const VkAccelerationStructureMemoryRequirementsInfoNV *>( &info ),
reinterpret_cast<VkMemoryRequirements2KHR *>( &memoryRequirements ) );
return structureChain;
}
VULKAN_HPP_INLINE void Device::bindAccelerationStructureMemoryNV(
VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::BindAccelerationStructureMemoryInfoNV> const & bindInfos ) const
2021-04-19 07:29:52 +00:00
{
VULKAN_HPP_ASSERT( getDispatcher()->vkBindAccelerationStructureMemoryNV &&
"Function <vkBindAccelerationStructureMemoryNV> requires <VK_NV_ray_tracing>" );
2021-04-19 07:29:52 +00:00
VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( getDispatcher()->vkBindAccelerationStructureMemoryNV(
static_cast<VkDevice>( m_device ), bindInfos.size(), reinterpret_cast<const VkBindAccelerationStructureMemoryInfoNV *>( bindInfos.data() ) ) );
VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::bindAccelerationStructureMemoryNV" );
2021-04-19 07:29:52 +00:00
}
VULKAN_HPP_INLINE void CommandBuffer::buildAccelerationStructureNV( const VULKAN_HPP_NAMESPACE::AccelerationStructureInfoNV & info,
VULKAN_HPP_NAMESPACE::Buffer instanceData,
VULKAN_HPP_NAMESPACE::DeviceSize instanceOffset,
VULKAN_HPP_NAMESPACE::Bool32 update,
VULKAN_HPP_NAMESPACE::AccelerationStructureNV dst,
VULKAN_HPP_NAMESPACE::AccelerationStructureNV src,
VULKAN_HPP_NAMESPACE::Buffer scratch,
VULKAN_HPP_NAMESPACE::DeviceSize scratchOffset ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( getDispatcher()->vkCmdBuildAccelerationStructureNV && "Function <vkCmdBuildAccelerationStructureNV> requires <VK_NV_ray_tracing>" );
getDispatcher()->vkCmdBuildAccelerationStructureNV( static_cast<VkCommandBuffer>( m_commandBuffer ),
reinterpret_cast<const VkAccelerationStructureInfoNV *>( &info ),
static_cast<VkBuffer>( instanceData ),
static_cast<VkDeviceSize>( instanceOffset ),
static_cast<VkBool32>( update ),
static_cast<VkAccelerationStructureNV>( dst ),
static_cast<VkAccelerationStructureNV>( src ),
static_cast<VkBuffer>( scratch ),
static_cast<VkDeviceSize>( scratchOffset ) );
}
VULKAN_HPP_INLINE void CommandBuffer::copyAccelerationStructureNV( VULKAN_HPP_NAMESPACE::AccelerationStructureNV dst,
VULKAN_HPP_NAMESPACE::AccelerationStructureNV src,
VULKAN_HPP_NAMESPACE::CopyAccelerationStructureModeKHR mode ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( getDispatcher()->vkCmdCopyAccelerationStructureNV && "Function <vkCmdCopyAccelerationStructureNV> requires <VK_NV_ray_tracing>" );
getDispatcher()->vkCmdCopyAccelerationStructureNV( static_cast<VkCommandBuffer>( m_commandBuffer ),
static_cast<VkAccelerationStructureNV>( dst ),
static_cast<VkAccelerationStructureNV>( src ),
static_cast<VkCopyAccelerationStructureModeKHR>( mode ) );
}
VULKAN_HPP_INLINE void CommandBuffer::traceRaysNV( VULKAN_HPP_NAMESPACE::Buffer raygenShaderBindingTableBuffer,
VULKAN_HPP_NAMESPACE::DeviceSize raygenShaderBindingOffset,
VULKAN_HPP_NAMESPACE::Buffer missShaderBindingTableBuffer,
VULKAN_HPP_NAMESPACE::DeviceSize missShaderBindingOffset,
VULKAN_HPP_NAMESPACE::DeviceSize missShaderBindingStride,
VULKAN_HPP_NAMESPACE::Buffer hitShaderBindingTableBuffer,
VULKAN_HPP_NAMESPACE::DeviceSize hitShaderBindingOffset,
VULKAN_HPP_NAMESPACE::DeviceSize hitShaderBindingStride,
VULKAN_HPP_NAMESPACE::Buffer callableShaderBindingTableBuffer,
VULKAN_HPP_NAMESPACE::DeviceSize callableShaderBindingOffset,
VULKAN_HPP_NAMESPACE::DeviceSize callableShaderBindingStride,
uint32_t width,
uint32_t height,
uint32_t depth ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( getDispatcher()->vkCmdTraceRaysNV && "Function <vkCmdTraceRaysNV> requires <VK_NV_ray_tracing>" );
getDispatcher()->vkCmdTraceRaysNV( static_cast<VkCommandBuffer>( m_commandBuffer ),
static_cast<VkBuffer>( raygenShaderBindingTableBuffer ),
static_cast<VkDeviceSize>( raygenShaderBindingOffset ),
static_cast<VkBuffer>( missShaderBindingTableBuffer ),
static_cast<VkDeviceSize>( missShaderBindingOffset ),
static_cast<VkDeviceSize>( missShaderBindingStride ),
static_cast<VkBuffer>( hitShaderBindingTableBuffer ),
static_cast<VkDeviceSize>( hitShaderBindingOffset ),
static_cast<VkDeviceSize>( hitShaderBindingStride ),
static_cast<VkBuffer>( callableShaderBindingTableBuffer ),
static_cast<VkDeviceSize>( callableShaderBindingOffset ),
static_cast<VkDeviceSize>( callableShaderBindingStride ),
width,
height,
depth );
}
VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE
VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::CreateReturnType<std::vector<VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Pipeline>>::Type
Device::createRayTracingPipelinesNV(
VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::PipelineCache> const & pipelineCache,
VULKAN_HPP_NAMESPACE::ArrayProxy<VULKAN_HPP_NAMESPACE::RayTracingPipelineCreateInfoNV> const & createInfos,
VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator ) const
{
std::vector<VULKAN_HPP_NAMESPACE::Pipeline> pipelines( createInfos.size() );
VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( getDispatcher()->vkCreateRayTracingPipelinesNV(
static_cast<VkDevice>( m_device ),
pipelineCache ? static_cast<VkPipelineCache>( **pipelineCache ) : 0,
createInfos.size(),
reinterpret_cast<const VkRayTracingPipelineCreateInfoNV *>( createInfos.data() ),
reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
reinterpret_cast<VkPipeline *>( pipelines.data() ) ) );
if ( ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess ) && ( result != VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT ) )
{
# if defined( VULKAN_HPP_RAII_NO_EXCEPTIONS )
return VULKAN_HPP_UNEXPECTED( result );
# else
VULKAN_HPP_NAMESPACE::detail::throwResultException( result, "Device::createRayTracingPipelinesNV" );
# endif
}
std::vector<VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Pipeline> pipelinesRAII;
pipelinesRAII.reserve( pipelines.size() );
for ( auto & pipeline : pipelines )
{
pipelinesRAII.emplace_back( *this, *reinterpret_cast<VkPipeline *>( &pipeline ), allocator, result );
}
return pipelinesRAII;
}
VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE
VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::CreateReturnType<VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Pipeline>::Type
Device::createRayTracingPipelineNV(
VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::PipelineCache> const & pipelineCache,
VULKAN_HPP_NAMESPACE::RayTracingPipelineCreateInfoNV const & createInfo,
VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator ) const VULKAN_HPP_RAII_CREATE_NOEXCEPT
{
VULKAN_HPP_NAMESPACE::Pipeline pipeline;
VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( getDispatcher()->vkCreateRayTracingPipelinesNV(
static_cast<VkDevice>( m_device ),
pipelineCache ? static_cast<VkPipelineCache>( **pipelineCache ) : 0,
1,
reinterpret_cast<const VkRayTracingPipelineCreateInfoNV *>( &createInfo ),
reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
reinterpret_cast<VkPipeline *>( &pipeline ) ) );
if ( ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess ) && ( result != VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT ) )
{
# if defined( VULKAN_HPP_RAII_NO_EXCEPTIONS )
return VULKAN_HPP_UNEXPECTED( result );
# else
VULKAN_HPP_NAMESPACE::detail::throwResultException( result, "Device::createRayTracingPipelineNV" );
# endif
}
return VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Pipeline( *this, *reinterpret_cast<VkPipeline *>( &pipeline ), allocator, result );
}
template <typename DataType>
VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector<DataType>
Pipeline::getRayTracingShaderGroupHandlesNV( uint32_t firstGroup, uint32_t groupCount, size_t dataSize ) const
{
VULKAN_HPP_ASSERT( getDispatcher()->vkGetRayTracingShaderGroupHandlesNV &&
2023-03-01 09:17:02 +00:00
"Function <vkGetRayTracingShaderGroupHandlesNV> requires <VK_KHR_ray_tracing_pipeline> or <VK_NV_ray_tracing>" );
VULKAN_HPP_ASSERT( dataSize % sizeof( DataType ) == 0 );
std::vector<DataType> data( dataSize / sizeof( DataType ) );
VULKAN_HPP_NAMESPACE::Result result =
static_cast<VULKAN_HPP_NAMESPACE::Result>( getDispatcher()->vkGetRayTracingShaderGroupHandlesNV( static_cast<VkDevice>( m_device ),
static_cast<VkPipeline>( m_pipeline ),
firstGroup,
groupCount,
data.size() * sizeof( DataType ),
reinterpret_cast<void *>( data.data() ) ) );
VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Pipeline::getRayTracingShaderGroupHandlesNV" );
return data;
}
template <typename DataType>
VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE DataType Pipeline::getRayTracingShaderGroupHandleNV( uint32_t firstGroup, uint32_t groupCount ) const
{
VULKAN_HPP_ASSERT( getDispatcher()->vkGetRayTracingShaderGroupHandlesNV &&
2023-03-01 09:17:02 +00:00
"Function <vkGetRayTracingShaderGroupHandlesNV> requires <VK_KHR_ray_tracing_pipeline> or <VK_NV_ray_tracing>" );
DataType data;
VULKAN_HPP_NAMESPACE::Result result =
static_cast<VULKAN_HPP_NAMESPACE::Result>( getDispatcher()->vkGetRayTracingShaderGroupHandlesNV( static_cast<VkDevice>( m_device ),
static_cast<VkPipeline>( m_pipeline ),
firstGroup,
groupCount,
sizeof( DataType ),
reinterpret_cast<void *>( &data ) ) );
VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Pipeline::getRayTracingShaderGroupHandleNV" );
return data;
}
template <typename DataType>
VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector<DataType> AccelerationStructureNV::getHandle( size_t dataSize ) const
{
VULKAN_HPP_ASSERT( getDispatcher()->vkGetAccelerationStructureHandleNV && "Function <vkGetAccelerationStructureHandleNV> requires <VK_NV_ray_tracing>" );
VULKAN_HPP_ASSERT( dataSize % sizeof( DataType ) == 0 );
std::vector<DataType> data( dataSize / sizeof( DataType ) );
VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>(
getDispatcher()->vkGetAccelerationStructureHandleNV( static_cast<VkDevice>( m_device ),
static_cast<VkAccelerationStructureNV>( m_accelerationStructure ),
data.size() * sizeof( DataType ),
reinterpret_cast<void *>( data.data() ) ) );
VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::AccelerationStructureNV::getHandle" );
return data;
}
template <typename DataType>
VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE DataType AccelerationStructureNV::getHandle() const
2021-04-28 11:35:14 +00:00
{
VULKAN_HPP_ASSERT( getDispatcher()->vkGetAccelerationStructureHandleNV && "Function <vkGetAccelerationStructureHandleNV> requires <VK_NV_ray_tracing>" );
DataType data;
VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>(
getDispatcher()->vkGetAccelerationStructureHandleNV( static_cast<VkDevice>( m_device ),
static_cast<VkAccelerationStructureNV>( m_accelerationStructure ),
sizeof( DataType ),
reinterpret_cast<void *>( &data ) ) );
VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::AccelerationStructureNV::getHandle" );
return data;
2021-04-28 11:35:14 +00:00
}
VULKAN_HPP_INLINE void CommandBuffer::writeAccelerationStructuresPropertiesNV(
VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::AccelerationStructureNV> const & accelerationStructures,
VULKAN_HPP_NAMESPACE::QueryType queryType,
VULKAN_HPP_NAMESPACE::QueryPool queryPool,
uint32_t firstQuery ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( getDispatcher()->vkCmdWriteAccelerationStructuresPropertiesNV &&
"Function <vkCmdWriteAccelerationStructuresPropertiesNV> requires <VK_NV_ray_tracing>" );
getDispatcher()->vkCmdWriteAccelerationStructuresPropertiesNV( static_cast<VkCommandBuffer>( m_commandBuffer ),
accelerationStructures.size(),
reinterpret_cast<const VkAccelerationStructureNV *>( accelerationStructures.data() ),
static_cast<VkQueryType>( queryType ),
static_cast<VkQueryPool>( queryPool ),
firstQuery );
}
VULKAN_HPP_INLINE void Pipeline::compileDeferredNV( uint32_t shader ) const
{
VULKAN_HPP_ASSERT( getDispatcher()->vkCompileDeferredNV && "Function <vkCompileDeferredNV> requires <VK_NV_ray_tracing>" );
VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>(
getDispatcher()->vkCompileDeferredNV( static_cast<VkDevice>( m_device ), static_cast<VkPipeline>( m_pipeline ), shader ) );
VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Pipeline::compileDeferredNV" );
}
//=== VK_KHR_maintenance3 ===
VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::DescriptorSetLayoutSupport
Device::getDescriptorSetLayoutSupportKHR( const VULKAN_HPP_NAMESPACE::DescriptorSetLayoutCreateInfo & createInfo ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( getDispatcher()->vkGetDescriptorSetLayoutSupportKHR &&
2023-03-01 09:17:02 +00:00
"Function <vkGetDescriptorSetLayoutSupportKHR> requires <VK_KHR_maintenance3> or <VK_VERSION_1_1>" );
VULKAN_HPP_NAMESPACE::DescriptorSetLayoutSupport support;
getDispatcher()->vkGetDescriptorSetLayoutSupportKHR( static_cast<VkDevice>( m_device ),
reinterpret_cast<const VkDescriptorSetLayoutCreateInfo *>( &createInfo ),
reinterpret_cast<VkDescriptorSetLayoutSupport *>( &support ) );
return support;
}
template <typename X, typename Y, typename... Z>
VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::StructureChain<X, Y, Z...>
Device::getDescriptorSetLayoutSupportKHR( const VULKAN_HPP_NAMESPACE::DescriptorSetLayoutCreateInfo & createInfo ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( getDispatcher()->vkGetDescriptorSetLayoutSupportKHR &&
2023-03-01 09:17:02 +00:00
"Function <vkGetDescriptorSetLayoutSupportKHR> requires <VK_KHR_maintenance3> or <VK_VERSION_1_1>" );
VULKAN_HPP_NAMESPACE::StructureChain<X, Y, Z...> structureChain;
VULKAN_HPP_NAMESPACE::DescriptorSetLayoutSupport & support = structureChain.template get<VULKAN_HPP_NAMESPACE::DescriptorSetLayoutSupport>();
getDispatcher()->vkGetDescriptorSetLayoutSupportKHR( static_cast<VkDevice>( m_device ),
reinterpret_cast<const VkDescriptorSetLayoutCreateInfo *>( &createInfo ),
reinterpret_cast<VkDescriptorSetLayoutSupport *>( &support ) );
return structureChain;
}
//=== VK_KHR_draw_indirect_count ===
VULKAN_HPP_INLINE void CommandBuffer::drawIndirectCountKHR( VULKAN_HPP_NAMESPACE::Buffer buffer,
VULKAN_HPP_NAMESPACE::DeviceSize offset,
VULKAN_HPP_NAMESPACE::Buffer countBuffer,
VULKAN_HPP_NAMESPACE::DeviceSize countBufferOffset,
uint32_t maxDrawCount,
uint32_t stride ) const VULKAN_HPP_NOEXCEPT
{
2023-03-01 09:17:02 +00:00
VULKAN_HPP_ASSERT( getDispatcher()->vkCmdDrawIndirectCountKHR &&
"Function <vkCmdDrawIndirectCountKHR> requires <VK_AMD_draw_indirect_count> or <VK_KHR_draw_indirect_count> or <VK_VERSION_1_2>" );
getDispatcher()->vkCmdDrawIndirectCountKHR( static_cast<VkCommandBuffer>( m_commandBuffer ),
static_cast<VkBuffer>( buffer ),
static_cast<VkDeviceSize>( offset ),
static_cast<VkBuffer>( countBuffer ),
static_cast<VkDeviceSize>( countBufferOffset ),
maxDrawCount,
stride );
}
VULKAN_HPP_INLINE void CommandBuffer::drawIndexedIndirectCountKHR( VULKAN_HPP_NAMESPACE::Buffer buffer,
VULKAN_HPP_NAMESPACE::DeviceSize offset,
VULKAN_HPP_NAMESPACE::Buffer countBuffer,
VULKAN_HPP_NAMESPACE::DeviceSize countBufferOffset,
uint32_t maxDrawCount,
uint32_t stride ) const VULKAN_HPP_NOEXCEPT
{
2023-03-01 09:17:02 +00:00
VULKAN_HPP_ASSERT(
getDispatcher()->vkCmdDrawIndexedIndirectCountKHR &&
"Function <vkCmdDrawIndexedIndirectCountKHR> requires <VK_AMD_draw_indirect_count> or <VK_KHR_draw_indirect_count> or <VK_VERSION_1_2>" );
getDispatcher()->vkCmdDrawIndexedIndirectCountKHR( static_cast<VkCommandBuffer>( m_commandBuffer ),
static_cast<VkBuffer>( buffer ),
static_cast<VkDeviceSize>( offset ),
static_cast<VkBuffer>( countBuffer ),
static_cast<VkDeviceSize>( countBufferOffset ),
maxDrawCount,
stride );
}
//=== VK_EXT_external_memory_host ===
VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::MemoryHostPointerPropertiesEXT
Device::getMemoryHostPointerPropertiesEXT( VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType, const void * pHostPointer ) const
{
VULKAN_HPP_ASSERT( getDispatcher()->vkGetMemoryHostPointerPropertiesEXT &&
"Function <vkGetMemoryHostPointerPropertiesEXT> requires <VK_EXT_external_memory_host>" );
VULKAN_HPP_NAMESPACE::MemoryHostPointerPropertiesEXT memoryHostPointerProperties;
VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>(
getDispatcher()->vkGetMemoryHostPointerPropertiesEXT( static_cast<VkDevice>( m_device ),
static_cast<VkExternalMemoryHandleTypeFlagBits>( handleType ),
pHostPointer,
reinterpret_cast<VkMemoryHostPointerPropertiesEXT *>( &memoryHostPointerProperties ) ) );
VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getMemoryHostPointerPropertiesEXT" );
return memoryHostPointerProperties;
}
//=== VK_AMD_buffer_marker ===
VULKAN_HPP_INLINE void CommandBuffer::writeBufferMarkerAMD( VULKAN_HPP_NAMESPACE::PipelineStageFlagBits pipelineStage,
VULKAN_HPP_NAMESPACE::Buffer dstBuffer,
VULKAN_HPP_NAMESPACE::DeviceSize dstOffset,
uint32_t marker ) const VULKAN_HPP_NOEXCEPT
2021-04-19 07:29:52 +00:00
{
VULKAN_HPP_ASSERT( getDispatcher()->vkCmdWriteBufferMarkerAMD && "Function <vkCmdWriteBufferMarkerAMD> requires <VK_AMD_buffer_marker>" );
2021-04-19 07:29:52 +00:00
getDispatcher()->vkCmdWriteBufferMarkerAMD( static_cast<VkCommandBuffer>( m_commandBuffer ),
static_cast<VkPipelineStageFlagBits>( pipelineStage ),
static_cast<VkBuffer>( dstBuffer ),
static_cast<VkDeviceSize>( dstOffset ),
marker );
2021-04-19 07:29:52 +00:00
}
VULKAN_HPP_INLINE void CommandBuffer::writeBufferMarker2AMD( VULKAN_HPP_NAMESPACE::PipelineStageFlags2 stage,
VULKAN_HPP_NAMESPACE::Buffer dstBuffer,
VULKAN_HPP_NAMESPACE::DeviceSize dstOffset,
uint32_t marker ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( getDispatcher()->vkCmdWriteBufferMarker2AMD && "Function <vkCmdWriteBufferMarker2AMD> requires <VK_AMD_buffer_marker>" );
getDispatcher()->vkCmdWriteBufferMarker2AMD( static_cast<VkCommandBuffer>( m_commandBuffer ),
static_cast<VkPipelineStageFlags2>( stage ),
static_cast<VkBuffer>( dstBuffer ),
static_cast<VkDeviceSize>( dstOffset ),
marker );
}
//=== VK_EXT_calibrated_timestamps ===
2021-04-19 07:29:52 +00:00
VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector<VULKAN_HPP_NAMESPACE::TimeDomainKHR> PhysicalDevice::getCalibrateableTimeDomainsEXT() const
{
VULKAN_HPP_ASSERT(
getDispatcher()->vkGetPhysicalDeviceCalibrateableTimeDomainsEXT &&
"Function <vkGetPhysicalDeviceCalibrateableTimeDomainsEXT> requires <VK_EXT_calibrated_timestamps> or <VK_KHR_calibrated_timestamps>" );
std::vector<VULKAN_HPP_NAMESPACE::TimeDomainKHR> timeDomains;
uint32_t timeDomainCount;
VULKAN_HPP_NAMESPACE::Result result;
do
{
result = static_cast<VULKAN_HPP_NAMESPACE::Result>(
getDispatcher()->vkGetPhysicalDeviceCalibrateableTimeDomainsEXT( static_cast<VkPhysicalDevice>( m_physicalDevice ), &timeDomainCount, nullptr ) );
if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) && timeDomainCount )
{
timeDomains.resize( timeDomainCount );
result = static_cast<VULKAN_HPP_NAMESPACE::Result>( getDispatcher()->vkGetPhysicalDeviceCalibrateableTimeDomainsEXT(
static_cast<VkPhysicalDevice>( m_physicalDevice ), &timeDomainCount, reinterpret_cast<VkTimeDomainKHR *>( timeDomains.data() ) ) );
}
} while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete );
VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getCalibrateableTimeDomainsEXT" );
VULKAN_HPP_ASSERT( timeDomainCount <= timeDomains.size() );
if ( timeDomainCount < timeDomains.size() )
{
timeDomains.resize( timeDomainCount );
}
return timeDomains;
}
VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::pair<std::vector<uint64_t>, uint64_t> Device::getCalibratedTimestampsEXT(
VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::CalibratedTimestampInfoKHR> const & timestampInfos ) const
{
VULKAN_HPP_ASSERT( getDispatcher()->vkGetCalibratedTimestampsEXT &&
"Function <vkGetCalibratedTimestampsEXT> requires <VK_EXT_calibrated_timestamps> or <VK_KHR_calibrated_timestamps>" );
std::pair<std::vector<uint64_t>, uint64_t> data_( std::piecewise_construct, std::forward_as_tuple( timestampInfos.size() ), std::forward_as_tuple( 0 ) );
std::vector<uint64_t> & timestamps = data_.first;
uint64_t & maxDeviation = data_.second;
VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>(
getDispatcher()->vkGetCalibratedTimestampsEXT( static_cast<VkDevice>( m_device ),
timestampInfos.size(),
reinterpret_cast<const VkCalibratedTimestampInfoKHR *>( timestampInfos.data() ),
timestamps.data(),
&maxDeviation ) );
VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getCalibratedTimestampsEXT" );
return data_;
}
VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::pair<uint64_t, uint64_t>
Device::getCalibratedTimestampEXT( const VULKAN_HPP_NAMESPACE::CalibratedTimestampInfoKHR & timestampInfo ) const
{
VULKAN_HPP_ASSERT( getDispatcher()->vkGetCalibratedTimestampsEXT &&
"Function <vkGetCalibratedTimestampsEXT> requires <VK_EXT_calibrated_timestamps> or <VK_KHR_calibrated_timestamps>" );
std::pair<uint64_t, uint64_t> data_;
uint64_t & timestamp = data_.first;
uint64_t & maxDeviation = data_.second;
VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( getDispatcher()->vkGetCalibratedTimestampsEXT(
static_cast<VkDevice>( m_device ), 1, reinterpret_cast<const VkCalibratedTimestampInfoKHR *>( &timestampInfo ), &timestamp, &maxDeviation ) );
VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getCalibratedTimestampEXT" );
return data_;
}
//=== VK_NV_mesh_shader ===
VULKAN_HPP_INLINE void CommandBuffer::drawMeshTasksNV( uint32_t taskCount, uint32_t firstTask ) const VULKAN_HPP_NOEXCEPT
2021-04-19 07:29:52 +00:00
{
VULKAN_HPP_ASSERT( getDispatcher()->vkCmdDrawMeshTasksNV && "Function <vkCmdDrawMeshTasksNV> requires <VK_NV_mesh_shader>" );
2021-04-19 07:29:52 +00:00
getDispatcher()->vkCmdDrawMeshTasksNV( static_cast<VkCommandBuffer>( m_commandBuffer ), taskCount, firstTask );
2021-04-19 07:29:52 +00:00
}
VULKAN_HPP_INLINE void CommandBuffer::drawMeshTasksIndirectNV( VULKAN_HPP_NAMESPACE::Buffer buffer,
VULKAN_HPP_NAMESPACE::DeviceSize offset,
uint32_t drawCount,
uint32_t stride ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( getDispatcher()->vkCmdDrawMeshTasksIndirectNV && "Function <vkCmdDrawMeshTasksIndirectNV> requires <VK_NV_mesh_shader>" );
getDispatcher()->vkCmdDrawMeshTasksIndirectNV(
static_cast<VkCommandBuffer>( m_commandBuffer ), static_cast<VkBuffer>( buffer ), static_cast<VkDeviceSize>( offset ), drawCount, stride );
}
VULKAN_HPP_INLINE void CommandBuffer::drawMeshTasksIndirectCountNV( VULKAN_HPP_NAMESPACE::Buffer buffer,
VULKAN_HPP_NAMESPACE::DeviceSize offset,
VULKAN_HPP_NAMESPACE::Buffer countBuffer,
VULKAN_HPP_NAMESPACE::DeviceSize countBufferOffset,
uint32_t maxDrawCount,
uint32_t stride ) const VULKAN_HPP_NOEXCEPT
2021-04-19 07:29:52 +00:00
{
VULKAN_HPP_ASSERT( getDispatcher()->vkCmdDrawMeshTasksIndirectCountNV && "Function <vkCmdDrawMeshTasksIndirectCountNV> requires <VK_NV_mesh_shader>" );
2021-04-19 07:29:52 +00:00
getDispatcher()->vkCmdDrawMeshTasksIndirectCountNV( static_cast<VkCommandBuffer>( m_commandBuffer ),
static_cast<VkBuffer>( buffer ),
static_cast<VkDeviceSize>( offset ),
static_cast<VkBuffer>( countBuffer ),
static_cast<VkDeviceSize>( countBufferOffset ),
maxDrawCount,
stride );
2021-04-19 07:29:52 +00:00
}
//=== VK_NV_scissor_exclusive ===
VULKAN_HPP_INLINE void CommandBuffer::setExclusiveScissorEnableNV(
uint32_t firstExclusiveScissor,
VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::Bool32> const & exclusiveScissorEnables ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSetExclusiveScissorEnableNV &&
"Function <vkCmdSetExclusiveScissorEnableNV> requires <VK_NV_scissor_exclusive>" );
getDispatcher()->vkCmdSetExclusiveScissorEnableNV( static_cast<VkCommandBuffer>( m_commandBuffer ),
firstExclusiveScissor,
exclusiveScissorEnables.size(),
reinterpret_cast<const VkBool32 *>( exclusiveScissorEnables.data() ) );
}
VULKAN_HPP_INLINE void CommandBuffer::setExclusiveScissorNV(
uint32_t firstExclusiveScissor, VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::Rect2D> const & exclusiveScissors ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSetExclusiveScissorNV && "Function <vkCmdSetExclusiveScissorNV> requires <VK_NV_scissor_exclusive>" );
getDispatcher()->vkCmdSetExclusiveScissorNV( static_cast<VkCommandBuffer>( m_commandBuffer ),
firstExclusiveScissor,
exclusiveScissors.size(),
reinterpret_cast<const VkRect2D *>( exclusiveScissors.data() ) );
}
//=== VK_NV_device_diagnostic_checkpoints ===
template <typename CheckpointMarkerType>
VULKAN_HPP_INLINE void CommandBuffer::setCheckpointNV( CheckpointMarkerType const & checkpointMarker ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSetCheckpointNV && "Function <vkCmdSetCheckpointNV> requires <VK_NV_device_diagnostic_checkpoints>" );
getDispatcher()->vkCmdSetCheckpointNV( static_cast<VkCommandBuffer>( m_commandBuffer ), reinterpret_cast<const void *>( &checkpointMarker ) );
}
VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector<VULKAN_HPP_NAMESPACE::CheckpointDataNV> Queue::getCheckpointDataNV() const
{
VULKAN_HPP_ASSERT( getDispatcher()->vkGetQueueCheckpointDataNV &&
"Function <vkGetQueueCheckpointDataNV> requires <VK_NV_device_diagnostic_checkpoints>" );
std::vector<VULKAN_HPP_NAMESPACE::CheckpointDataNV> checkpointData;
uint32_t checkpointDataCount;
getDispatcher()->vkGetQueueCheckpointDataNV( static_cast<VkQueue>( m_queue ), &checkpointDataCount, nullptr );
checkpointData.resize( checkpointDataCount );
getDispatcher()->vkGetQueueCheckpointDataNV(
static_cast<VkQueue>( m_queue ), &checkpointDataCount, reinterpret_cast<VkCheckpointDataNV *>( checkpointData.data() ) );
VULKAN_HPP_ASSERT( checkpointDataCount <= checkpointData.size() );
if ( checkpointDataCount < checkpointData.size() )
{
checkpointData.resize( checkpointDataCount );
}
return checkpointData;
}
VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector<VULKAN_HPP_NAMESPACE::CheckpointData2NV> Queue::getCheckpointData2NV() const
{
VULKAN_HPP_ASSERT( getDispatcher()->vkGetQueueCheckpointData2NV &&
"Function <vkGetQueueCheckpointData2NV> requires <VK_NV_device_diagnostic_checkpoints>" );
std::vector<VULKAN_HPP_NAMESPACE::CheckpointData2NV> checkpointData;
uint32_t checkpointDataCount;
getDispatcher()->vkGetQueueCheckpointData2NV( static_cast<VkQueue>( m_queue ), &checkpointDataCount, nullptr );
checkpointData.resize( checkpointDataCount );
getDispatcher()->vkGetQueueCheckpointData2NV(
static_cast<VkQueue>( m_queue ), &checkpointDataCount, reinterpret_cast<VkCheckpointData2NV *>( checkpointData.data() ) );
VULKAN_HPP_ASSERT( checkpointDataCount <= checkpointData.size() );
if ( checkpointDataCount < checkpointData.size() )
{
checkpointData.resize( checkpointDataCount );
}
return checkpointData;
}
//=== VK_KHR_timeline_semaphore ===
VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE uint64_t Semaphore::getCounterValueKHR() const
{
2023-03-01 09:17:02 +00:00
VULKAN_HPP_ASSERT( getDispatcher()->vkGetSemaphoreCounterValueKHR &&
"Function <vkGetSemaphoreCounterValueKHR> requires <VK_KHR_timeline_semaphore> or <VK_VERSION_1_2>" );
uint64_t value;
VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>(
getDispatcher()->vkGetSemaphoreCounterValueKHR( static_cast<VkDevice>( m_device ), static_cast<VkSemaphore>( m_semaphore ), &value ) );
VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Semaphore::getCounterValueKHR" );
return value;
}
VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::Result Device::waitSemaphoresKHR( const VULKAN_HPP_NAMESPACE::SemaphoreWaitInfo & waitInfo,
uint64_t timeout ) const
{
2023-03-01 09:17:02 +00:00
VULKAN_HPP_ASSERT( getDispatcher()->vkWaitSemaphoresKHR && "Function <vkWaitSemaphoresKHR> requires <VK_KHR_timeline_semaphore> or <VK_VERSION_1_2>" );
VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>(
getDispatcher()->vkWaitSemaphoresKHR( static_cast<VkDevice>( m_device ), reinterpret_cast<const VkSemaphoreWaitInfo *>( &waitInfo ), timeout ) );
VULKAN_HPP_NAMESPACE::detail::resultCheck(
result, VULKAN_HPP_NAMESPACE_STRING "::Device::waitSemaphoresKHR", { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::eTimeout } );
return static_cast<VULKAN_HPP_NAMESPACE::Result>( result );
}
VULKAN_HPP_INLINE void Device::signalSemaphoreKHR( const VULKAN_HPP_NAMESPACE::SemaphoreSignalInfo & signalInfo ) const
{
2023-03-01 09:17:02 +00:00
VULKAN_HPP_ASSERT( getDispatcher()->vkSignalSemaphoreKHR && "Function <vkSignalSemaphoreKHR> requires <VK_KHR_timeline_semaphore> or <VK_VERSION_1_2>" );
VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>(
getDispatcher()->vkSignalSemaphoreKHR( static_cast<VkDevice>( m_device ), reinterpret_cast<const VkSemaphoreSignalInfo *>( &signalInfo ) ) );
VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::signalSemaphoreKHR" );
}
//=== VK_INTEL_performance_query ===
VULKAN_HPP_INLINE void Device::initializePerformanceApiINTEL( const VULKAN_HPP_NAMESPACE::InitializePerformanceApiInfoINTEL & initializeInfo ) const
{
VULKAN_HPP_ASSERT( getDispatcher()->vkInitializePerformanceApiINTEL &&
"Function <vkInitializePerformanceApiINTEL> requires <VK_INTEL_performance_query>" );
VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( getDispatcher()->vkInitializePerformanceApiINTEL(
static_cast<VkDevice>( m_device ), reinterpret_cast<const VkInitializePerformanceApiInfoINTEL *>( &initializeInfo ) ) );
VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::initializePerformanceApiINTEL" );
}
VULKAN_HPP_INLINE void Device::uninitializePerformanceApiINTEL() const VULKAN_HPP_NOEXCEPT
2021-04-15 08:49:54 +00:00
{
VULKAN_HPP_ASSERT( getDispatcher()->vkUninitializePerformanceApiINTEL &&
"Function <vkUninitializePerformanceApiINTEL> requires <VK_INTEL_performance_query>" );
2021-04-15 08:49:54 +00:00
getDispatcher()->vkUninitializePerformanceApiINTEL( static_cast<VkDevice>( m_device ) );
2021-04-15 08:49:54 +00:00
}
VULKAN_HPP_INLINE void CommandBuffer::setPerformanceMarkerINTEL( const VULKAN_HPP_NAMESPACE::PerformanceMarkerInfoINTEL & markerInfo ) const
{
VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSetPerformanceMarkerINTEL && "Function <vkCmdSetPerformanceMarkerINTEL> requires <VK_INTEL_performance_query>" );
VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( getDispatcher()->vkCmdSetPerformanceMarkerINTEL(
static_cast<VkCommandBuffer>( m_commandBuffer ), reinterpret_cast<const VkPerformanceMarkerInfoINTEL *>( &markerInfo ) ) );
VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::CommandBuffer::setPerformanceMarkerINTEL" );
}
VULKAN_HPP_INLINE void CommandBuffer::setPerformanceStreamMarkerINTEL( const VULKAN_HPP_NAMESPACE::PerformanceStreamMarkerInfoINTEL & markerInfo ) const
{
VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSetPerformanceStreamMarkerINTEL &&
"Function <vkCmdSetPerformanceStreamMarkerINTEL> requires <VK_INTEL_performance_query>" );
VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( getDispatcher()->vkCmdSetPerformanceStreamMarkerINTEL(
static_cast<VkCommandBuffer>( m_commandBuffer ), reinterpret_cast<const VkPerformanceStreamMarkerInfoINTEL *>( &markerInfo ) ) );
VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::CommandBuffer::setPerformanceStreamMarkerINTEL" );
}
VULKAN_HPP_INLINE void CommandBuffer::setPerformanceOverrideINTEL( const VULKAN_HPP_NAMESPACE::PerformanceOverrideInfoINTEL & overrideInfo ) const
{
VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSetPerformanceOverrideINTEL &&
"Function <vkCmdSetPerformanceOverrideINTEL> requires <VK_INTEL_performance_query>" );
VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( getDispatcher()->vkCmdSetPerformanceOverrideINTEL(
static_cast<VkCommandBuffer>( m_commandBuffer ), reinterpret_cast<const VkPerformanceOverrideInfoINTEL *>( &overrideInfo ) ) );
VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::CommandBuffer::setPerformanceOverrideINTEL" );
}
VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::CreateReturnType<
VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::PerformanceConfigurationINTEL>::Type
Device::acquirePerformanceConfigurationINTEL( VULKAN_HPP_NAMESPACE::PerformanceConfigurationAcquireInfoINTEL const & acquireInfo ) const
VULKAN_HPP_RAII_CREATE_NOEXCEPT
{
VULKAN_HPP_NAMESPACE::PerformanceConfigurationINTEL configuration;
VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>(
getDispatcher()->vkAcquirePerformanceConfigurationINTEL( static_cast<VkDevice>( m_device ),
reinterpret_cast<const VkPerformanceConfigurationAcquireInfoINTEL *>( &acquireInfo ),
reinterpret_cast<VkPerformanceConfigurationINTEL *>( &configuration ) ) );
if ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess )
{
# if defined( VULKAN_HPP_RAII_NO_EXCEPTIONS )
return VULKAN_HPP_UNEXPECTED( result );
# else
VULKAN_HPP_NAMESPACE::detail::throwResultException( result, "Device::acquirePerformanceConfigurationINTEL" );
# endif
}
return VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::PerformanceConfigurationINTEL(
*this, *reinterpret_cast<VkPerformanceConfigurationINTEL *>( &configuration ) );
}
VULKAN_HPP_INLINE void Queue::setPerformanceConfigurationINTEL( VULKAN_HPP_NAMESPACE::PerformanceConfigurationINTEL configuration ) const
{
VULKAN_HPP_ASSERT( getDispatcher()->vkQueueSetPerformanceConfigurationINTEL &&
"Function <vkQueueSetPerformanceConfigurationINTEL> requires <VK_INTEL_performance_query>" );
VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( getDispatcher()->vkQueueSetPerformanceConfigurationINTEL(
static_cast<VkQueue>( m_queue ), static_cast<VkPerformanceConfigurationINTEL>( configuration ) ) );
VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Queue::setPerformanceConfigurationINTEL" );
}
VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::PerformanceValueINTEL
Device::getPerformanceParameterINTEL( VULKAN_HPP_NAMESPACE::PerformanceParameterTypeINTEL parameter ) const
{
VULKAN_HPP_ASSERT( getDispatcher()->vkGetPerformanceParameterINTEL && "Function <vkGetPerformanceParameterINTEL> requires <VK_INTEL_performance_query>" );
VULKAN_HPP_NAMESPACE::PerformanceValueINTEL value;
VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( getDispatcher()->vkGetPerformanceParameterINTEL(
static_cast<VkDevice>( m_device ), static_cast<VkPerformanceParameterTypeINTEL>( parameter ), reinterpret_cast<VkPerformanceValueINTEL *>( &value ) ) );
VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getPerformanceParameterINTEL" );
return value;
}
//=== VK_AMD_display_native_hdr ===
VULKAN_HPP_INLINE void SwapchainKHR::setLocalDimmingAMD( VULKAN_HPP_NAMESPACE::Bool32 localDimmingEnable ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( getDispatcher()->vkSetLocalDimmingAMD && "Function <vkSetLocalDimmingAMD> requires <VK_AMD_display_native_hdr>" );
getDispatcher()->vkSetLocalDimmingAMD(
static_cast<VkDevice>( m_device ), static_cast<VkSwapchainKHR>( m_swapchain ), static_cast<VkBool32>( localDimmingEnable ) );
}
# if defined( VK_USE_PLATFORM_FUCHSIA )
//=== VK_FUCHSIA_imagepipe_surface ===
VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE
VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::CreateReturnType<VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::SurfaceKHR>::Type
Instance::createImagePipeSurfaceFUCHSIA( VULKAN_HPP_NAMESPACE::ImagePipeSurfaceCreateInfoFUCHSIA const & createInfo,
VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator ) const
VULKAN_HPP_RAII_CREATE_NOEXCEPT
{
VULKAN_HPP_NAMESPACE::SurfaceKHR surface;
VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( getDispatcher()->vkCreateImagePipeSurfaceFUCHSIA(
static_cast<VkInstance>( m_instance ),
reinterpret_cast<const VkImagePipeSurfaceCreateInfoFUCHSIA *>( &createInfo ),
reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
reinterpret_cast<VkSurfaceKHR *>( &surface ) ) );
if ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess )
{
# if defined( VULKAN_HPP_RAII_NO_EXCEPTIONS )
return VULKAN_HPP_UNEXPECTED( result );
# else
VULKAN_HPP_NAMESPACE::detail::throwResultException( result, "Instance::createImagePipeSurfaceFUCHSIA" );
# endif
}
return VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::SurfaceKHR( *this, *reinterpret_cast<VkSurfaceKHR *>( &surface ), allocator );
}
# endif /*VK_USE_PLATFORM_FUCHSIA*/
# if defined( VK_USE_PLATFORM_METAL_EXT )
//=== VK_EXT_metal_surface ===
VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE
VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::CreateReturnType<VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::SurfaceKHR>::Type
Instance::createMetalSurfaceEXT( VULKAN_HPP_NAMESPACE::MetalSurfaceCreateInfoEXT const & createInfo,
VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator ) const
VULKAN_HPP_RAII_CREATE_NOEXCEPT
{
VULKAN_HPP_NAMESPACE::SurfaceKHR surface;
VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( getDispatcher()->vkCreateMetalSurfaceEXT(
static_cast<VkInstance>( m_instance ),
reinterpret_cast<const VkMetalSurfaceCreateInfoEXT *>( &createInfo ),
reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
reinterpret_cast<VkSurfaceKHR *>( &surface ) ) );
if ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess )
{
# if defined( VULKAN_HPP_RAII_NO_EXCEPTIONS )
return VULKAN_HPP_UNEXPECTED( result );
# else
VULKAN_HPP_NAMESPACE::detail::throwResultException( result, "Instance::createMetalSurfaceEXT" );
# endif
}
return VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::SurfaceKHR( *this, *reinterpret_cast<VkSurfaceKHR *>( &surface ), allocator );
}
# endif /*VK_USE_PLATFORM_METAL_EXT*/
//=== VK_KHR_fragment_shading_rate ===
VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector<VULKAN_HPP_NAMESPACE::PhysicalDeviceFragmentShadingRateKHR>
PhysicalDevice::getFragmentShadingRatesKHR() const
{
VULKAN_HPP_ASSERT( getDispatcher()->vkGetPhysicalDeviceFragmentShadingRatesKHR &&
"Function <vkGetPhysicalDeviceFragmentShadingRatesKHR> requires <VK_KHR_fragment_shading_rate>" );
std::vector<VULKAN_HPP_NAMESPACE::PhysicalDeviceFragmentShadingRateKHR> fragmentShadingRates;
uint32_t fragmentShadingRateCount;
VULKAN_HPP_NAMESPACE::Result result;
do
{
result = static_cast<VULKAN_HPP_NAMESPACE::Result>( getDispatcher()->vkGetPhysicalDeviceFragmentShadingRatesKHR(
static_cast<VkPhysicalDevice>( m_physicalDevice ), &fragmentShadingRateCount, nullptr ) );
if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) && fragmentShadingRateCount )
{
fragmentShadingRates.resize( fragmentShadingRateCount );
result = static_cast<VULKAN_HPP_NAMESPACE::Result>( getDispatcher()->vkGetPhysicalDeviceFragmentShadingRatesKHR(
static_cast<VkPhysicalDevice>( m_physicalDevice ),
&fragmentShadingRateCount,
reinterpret_cast<VkPhysicalDeviceFragmentShadingRateKHR *>( fragmentShadingRates.data() ) ) );
}
} while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete );
VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getFragmentShadingRatesKHR" );
VULKAN_HPP_ASSERT( fragmentShadingRateCount <= fragmentShadingRates.size() );
if ( fragmentShadingRateCount < fragmentShadingRates.size() )
{
fragmentShadingRates.resize( fragmentShadingRateCount );
}
return fragmentShadingRates;
}
VULKAN_HPP_INLINE void
CommandBuffer::setFragmentShadingRateKHR( const VULKAN_HPP_NAMESPACE::Extent2D & fragmentSize,
const VULKAN_HPP_NAMESPACE::FragmentShadingRateCombinerOpKHR combinerOps[2] ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSetFragmentShadingRateKHR &&
"Function <vkCmdSetFragmentShadingRateKHR> requires <VK_KHR_fragment_shading_rate>" );
getDispatcher()->vkCmdSetFragmentShadingRateKHR( static_cast<VkCommandBuffer>( m_commandBuffer ),
reinterpret_cast<const VkExtent2D *>( &fragmentSize ),
reinterpret_cast<const VkFragmentShadingRateCombinerOpKHR *>( combinerOps ) );
}
//=== VK_KHR_dynamic_rendering_local_read ===
VULKAN_HPP_INLINE void CommandBuffer::setRenderingAttachmentLocationsKHR(
const VULKAN_HPP_NAMESPACE::RenderingAttachmentLocationInfoKHR & locationInfo ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSetRenderingAttachmentLocationsKHR &&
"Function <vkCmdSetRenderingAttachmentLocationsKHR> requires <VK_KHR_dynamic_rendering_local_read>" );
getDispatcher()->vkCmdSetRenderingAttachmentLocationsKHR( static_cast<VkCommandBuffer>( m_commandBuffer ),
reinterpret_cast<const VkRenderingAttachmentLocationInfoKHR *>( &locationInfo ) );
}
VULKAN_HPP_INLINE void CommandBuffer::setRenderingInputAttachmentIndicesKHR(
const VULKAN_HPP_NAMESPACE::RenderingInputAttachmentIndexInfoKHR & inputAttachmentIndexInfo ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSetRenderingInputAttachmentIndicesKHR &&
"Function <vkCmdSetRenderingInputAttachmentIndicesKHR> requires <VK_KHR_dynamic_rendering_local_read>" );
getDispatcher()->vkCmdSetRenderingInputAttachmentIndicesKHR(
static_cast<VkCommandBuffer>( m_commandBuffer ), reinterpret_cast<const VkRenderingInputAttachmentIndexInfoKHR *>( &inputAttachmentIndexInfo ) );
}
//=== VK_EXT_buffer_device_address ===
VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::DeviceAddress
Device::getBufferAddressEXT( const VULKAN_HPP_NAMESPACE::BufferDeviceAddressInfo & info ) const VULKAN_HPP_NOEXCEPT
{
2023-03-01 09:17:02 +00:00
VULKAN_HPP_ASSERT(
getDispatcher()->vkGetBufferDeviceAddressEXT &&
"Function <vkGetBufferDeviceAddressEXT> requires <VK_EXT_buffer_device_address> or <VK_KHR_buffer_device_address> or <VK_VERSION_1_2>" );
VkDeviceAddress result =
getDispatcher()->vkGetBufferDeviceAddressEXT( static_cast<VkDevice>( m_device ), reinterpret_cast<const VkBufferDeviceAddressInfo *>( &info ) );
return static_cast<VULKAN_HPP_NAMESPACE::DeviceAddress>( result );
}
//=== VK_EXT_tooling_info ===
VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector<VULKAN_HPP_NAMESPACE::PhysicalDeviceToolProperties> PhysicalDevice::getToolPropertiesEXT() const
{
VULKAN_HPP_ASSERT( getDispatcher()->vkGetPhysicalDeviceToolPropertiesEXT &&
2023-03-01 09:17:02 +00:00
"Function <vkGetPhysicalDeviceToolPropertiesEXT> requires <VK_EXT_tooling_info> or <VK_VERSION_1_3>" );
2022-01-26 00:42:08 +00:00
std::vector<VULKAN_HPP_NAMESPACE::PhysicalDeviceToolProperties> toolProperties;
uint32_t toolCount;
VULKAN_HPP_NAMESPACE::Result result;
do
{
result = static_cast<VULKAN_HPP_NAMESPACE::Result>(
getDispatcher()->vkGetPhysicalDeviceToolPropertiesEXT( static_cast<VkPhysicalDevice>( m_physicalDevice ), &toolCount, nullptr ) );
if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) && toolCount )
{
toolProperties.resize( toolCount );
result = static_cast<VULKAN_HPP_NAMESPACE::Result>( getDispatcher()->vkGetPhysicalDeviceToolPropertiesEXT(
static_cast<VkPhysicalDevice>( m_physicalDevice ), &toolCount, reinterpret_cast<VkPhysicalDeviceToolProperties *>( toolProperties.data() ) ) );
}
} while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete );
VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getToolPropertiesEXT" );
VULKAN_HPP_ASSERT( toolCount <= toolProperties.size() );
if ( toolCount < toolProperties.size() )
{
toolProperties.resize( toolCount );
}
return toolProperties;
}
2021-07-21 07:13:46 +00:00
//=== VK_KHR_present_wait ===
VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::Result SwapchainKHR::waitForPresent( uint64_t presentId, uint64_t timeout ) const
2021-07-21 07:13:46 +00:00
{
VULKAN_HPP_ASSERT( getDispatcher()->vkWaitForPresentKHR && "Function <vkWaitForPresentKHR> requires <VK_KHR_present_wait>" );
2021-07-21 07:13:46 +00:00
VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>(
getDispatcher()->vkWaitForPresentKHR( static_cast<VkDevice>( m_device ), static_cast<VkSwapchainKHR>( m_swapchain ), presentId, timeout ) );
VULKAN_HPP_NAMESPACE::detail::resultCheck(
result,
VULKAN_HPP_NAMESPACE_STRING "::SwapchainKHR::waitForPresent",
{ VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::eTimeout, VULKAN_HPP_NAMESPACE::Result::eSuboptimalKHR } );
return static_cast<VULKAN_HPP_NAMESPACE::Result>( result );
2021-07-21 07:13:46 +00:00
}
//=== VK_NV_cooperative_matrix ===
VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector<VULKAN_HPP_NAMESPACE::CooperativeMatrixPropertiesNV>
PhysicalDevice::getCooperativeMatrixPropertiesNV() const
{
VULKAN_HPP_ASSERT( getDispatcher()->vkGetPhysicalDeviceCooperativeMatrixPropertiesNV &&
"Function <vkGetPhysicalDeviceCooperativeMatrixPropertiesNV> requires <VK_NV_cooperative_matrix>" );
std::vector<VULKAN_HPP_NAMESPACE::CooperativeMatrixPropertiesNV> properties;
uint32_t propertyCount;
VULKAN_HPP_NAMESPACE::Result result;
do
{
result = static_cast<VULKAN_HPP_NAMESPACE::Result>(
getDispatcher()->vkGetPhysicalDeviceCooperativeMatrixPropertiesNV( static_cast<VkPhysicalDevice>( m_physicalDevice ), &propertyCount, nullptr ) );
if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) && propertyCount )
{
properties.resize( propertyCount );
result = static_cast<VULKAN_HPP_NAMESPACE::Result>( getDispatcher()->vkGetPhysicalDeviceCooperativeMatrixPropertiesNV(
static_cast<VkPhysicalDevice>( m_physicalDevice ), &propertyCount, reinterpret_cast<VkCooperativeMatrixPropertiesNV *>( properties.data() ) ) );
}
} while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete );
VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getCooperativeMatrixPropertiesNV" );
VULKAN_HPP_ASSERT( propertyCount <= properties.size() );
if ( propertyCount < properties.size() )
{
properties.resize( propertyCount );
}
return properties;
}
//=== VK_NV_coverage_reduction_mode ===
VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector<VULKAN_HPP_NAMESPACE::FramebufferMixedSamplesCombinationNV>
PhysicalDevice::getSupportedFramebufferMixedSamplesCombinationsNV() const
{
VULKAN_HPP_ASSERT( getDispatcher()->vkGetPhysicalDeviceSupportedFramebufferMixedSamplesCombinationsNV &&
"Function <vkGetPhysicalDeviceSupportedFramebufferMixedSamplesCombinationsNV> requires <VK_NV_coverage_reduction_mode>" );
std::vector<VULKAN_HPP_NAMESPACE::FramebufferMixedSamplesCombinationNV> combinations;
uint32_t combinationCount;
VULKAN_HPP_NAMESPACE::Result result;
do
{
result = static_cast<VULKAN_HPP_NAMESPACE::Result>( getDispatcher()->vkGetPhysicalDeviceSupportedFramebufferMixedSamplesCombinationsNV(
static_cast<VkPhysicalDevice>( m_physicalDevice ), &combinationCount, nullptr ) );
if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) && combinationCount )
{
combinations.resize( combinationCount );
result = static_cast<VULKAN_HPP_NAMESPACE::Result>( getDispatcher()->vkGetPhysicalDeviceSupportedFramebufferMixedSamplesCombinationsNV(
static_cast<VkPhysicalDevice>( m_physicalDevice ),
&combinationCount,
reinterpret_cast<VkFramebufferMixedSamplesCombinationNV *>( combinations.data() ) ) );
}
} while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete );
VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getSupportedFramebufferMixedSamplesCombinationsNV" );
VULKAN_HPP_ASSERT( combinationCount <= combinations.size() );
if ( combinationCount < combinations.size() )
{
combinations.resize( combinationCount );
}
return combinations;
}
# if defined( VK_USE_PLATFORM_WIN32_KHR )
//=== VK_EXT_full_screen_exclusive ===
VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector<VULKAN_HPP_NAMESPACE::PresentModeKHR>
PhysicalDevice::getSurfacePresentModes2EXT( const VULKAN_HPP_NAMESPACE::PhysicalDeviceSurfaceInfo2KHR & surfaceInfo ) const
{
VULKAN_HPP_ASSERT( getDispatcher()->vkGetPhysicalDeviceSurfacePresentModes2EXT &&
"Function <vkGetPhysicalDeviceSurfacePresentModes2EXT> requires <VK_EXT_full_screen_exclusive>" );
std::vector<VULKAN_HPP_NAMESPACE::PresentModeKHR> presentModes;
uint32_t presentModeCount;
VULKAN_HPP_NAMESPACE::Result result;
do
{
result = static_cast<VULKAN_HPP_NAMESPACE::Result>(
getDispatcher()->vkGetPhysicalDeviceSurfacePresentModes2EXT( static_cast<VkPhysicalDevice>( m_physicalDevice ),
reinterpret_cast<const VkPhysicalDeviceSurfaceInfo2KHR *>( &surfaceInfo ),
&presentModeCount,
nullptr ) );
if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) && presentModeCount )
{
presentModes.resize( presentModeCount );
result = static_cast<VULKAN_HPP_NAMESPACE::Result>(
getDispatcher()->vkGetPhysicalDeviceSurfacePresentModes2EXT( static_cast<VkPhysicalDevice>( m_physicalDevice ),
reinterpret_cast<const VkPhysicalDeviceSurfaceInfo2KHR *>( &surfaceInfo ),
&presentModeCount,
reinterpret_cast<VkPresentModeKHR *>( presentModes.data() ) ) );
}
} while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete );
VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getSurfacePresentModes2EXT" );
VULKAN_HPP_ASSERT( presentModeCount <= presentModes.size() );
if ( presentModeCount < presentModes.size() )
{
presentModes.resize( presentModeCount );
}
return presentModes;
}
VULKAN_HPP_INLINE void SwapchainKHR::acquireFullScreenExclusiveModeEXT() const
{
VULKAN_HPP_ASSERT( getDispatcher()->vkAcquireFullScreenExclusiveModeEXT &&
"Function <vkAcquireFullScreenExclusiveModeEXT> requires <VK_EXT_full_screen_exclusive>" );
VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>(
getDispatcher()->vkAcquireFullScreenExclusiveModeEXT( static_cast<VkDevice>( m_device ), static_cast<VkSwapchainKHR>( m_swapchain ) ) );
VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::SwapchainKHR::acquireFullScreenExclusiveModeEXT" );
}
VULKAN_HPP_INLINE void SwapchainKHR::releaseFullScreenExclusiveModeEXT() const
{
VULKAN_HPP_ASSERT( getDispatcher()->vkReleaseFullScreenExclusiveModeEXT &&
"Function <vkReleaseFullScreenExclusiveModeEXT> requires <VK_EXT_full_screen_exclusive>" );
VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>(
getDispatcher()->vkReleaseFullScreenExclusiveModeEXT( static_cast<VkDevice>( m_device ), static_cast<VkSwapchainKHR>( m_swapchain ) ) );
VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::SwapchainKHR::releaseFullScreenExclusiveModeEXT" );
}
VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::DeviceGroupPresentModeFlagsKHR
Device::getGroupSurfacePresentModes2EXT( const VULKAN_HPP_NAMESPACE::PhysicalDeviceSurfaceInfo2KHR & surfaceInfo ) const
{
VULKAN_HPP_ASSERT( getDispatcher()->vkGetDeviceGroupSurfacePresentModes2EXT &&
"Function <vkGetDeviceGroupSurfacePresentModes2EXT> requires <VK_EXT_full_screen_exclusive>" );
VULKAN_HPP_NAMESPACE::DeviceGroupPresentModeFlagsKHR modes;
VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>(
getDispatcher()->vkGetDeviceGroupSurfacePresentModes2EXT( static_cast<VkDevice>( m_device ),
reinterpret_cast<const VkPhysicalDeviceSurfaceInfo2KHR *>( &surfaceInfo ),
reinterpret_cast<VkDeviceGroupPresentModeFlagsKHR *>( &modes ) ) );
VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getGroupSurfacePresentModes2EXT" );
return modes;
}
# endif /*VK_USE_PLATFORM_WIN32_KHR*/
//=== VK_EXT_headless_surface ===
VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE
VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::CreateReturnType<VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::SurfaceKHR>::Type
Instance::createHeadlessSurfaceEXT( VULKAN_HPP_NAMESPACE::HeadlessSurfaceCreateInfoEXT const & createInfo,
VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator ) const
VULKAN_HPP_RAII_CREATE_NOEXCEPT
{
VULKAN_HPP_NAMESPACE::SurfaceKHR surface;
VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( getDispatcher()->vkCreateHeadlessSurfaceEXT(
static_cast<VkInstance>( m_instance ),
reinterpret_cast<const VkHeadlessSurfaceCreateInfoEXT *>( &createInfo ),
reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
reinterpret_cast<VkSurfaceKHR *>( &surface ) ) );
if ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess )
{
# if defined( VULKAN_HPP_RAII_NO_EXCEPTIONS )
return VULKAN_HPP_UNEXPECTED( result );
# else
VULKAN_HPP_NAMESPACE::detail::throwResultException( result, "Instance::createHeadlessSurfaceEXT" );
# endif
}
return VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::SurfaceKHR( *this, *reinterpret_cast<VkSurfaceKHR *>( &surface ), allocator );
}
//=== VK_KHR_buffer_device_address ===
VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::DeviceAddress
Device::getBufferAddressKHR( const VULKAN_HPP_NAMESPACE::BufferDeviceAddressInfo & info ) const VULKAN_HPP_NOEXCEPT
{
2023-03-01 09:17:02 +00:00
VULKAN_HPP_ASSERT(
getDispatcher()->vkGetBufferDeviceAddressKHR &&
"Function <vkGetBufferDeviceAddressKHR> requires <VK_EXT_buffer_device_address> or <VK_KHR_buffer_device_address> or <VK_VERSION_1_2>" );
VkDeviceAddress result =
getDispatcher()->vkGetBufferDeviceAddressKHR( static_cast<VkDevice>( m_device ), reinterpret_cast<const VkBufferDeviceAddressInfo *>( &info ) );
return static_cast<VULKAN_HPP_NAMESPACE::DeviceAddress>( result );
}
VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE uint64_t
Device::getBufferOpaqueCaptureAddressKHR( const VULKAN_HPP_NAMESPACE::BufferDeviceAddressInfo & info ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( getDispatcher()->vkGetBufferOpaqueCaptureAddressKHR &&
2023-03-01 09:17:02 +00:00
"Function <vkGetBufferOpaqueCaptureAddressKHR> requires <VK_KHR_buffer_device_address> or <VK_VERSION_1_2>" );
uint64_t result =
getDispatcher()->vkGetBufferOpaqueCaptureAddressKHR( static_cast<VkDevice>( m_device ), reinterpret_cast<const VkBufferDeviceAddressInfo *>( &info ) );
return result;
}
VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE uint64_t
Device::getMemoryOpaqueCaptureAddressKHR( const VULKAN_HPP_NAMESPACE::DeviceMemoryOpaqueCaptureAddressInfo & info ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( getDispatcher()->vkGetDeviceMemoryOpaqueCaptureAddressKHR &&
2023-03-01 09:17:02 +00:00
"Function <vkGetDeviceMemoryOpaqueCaptureAddressKHR> requires <VK_KHR_buffer_device_address> or <VK_VERSION_1_2>" );
uint64_t result = getDispatcher()->vkGetDeviceMemoryOpaqueCaptureAddressKHR( static_cast<VkDevice>( m_device ),
reinterpret_cast<const VkDeviceMemoryOpaqueCaptureAddressInfo *>( &info ) );
return result;
}
//=== VK_EXT_line_rasterization ===
VULKAN_HPP_INLINE void CommandBuffer::setLineStippleEXT( uint32_t lineStippleFactor, uint16_t lineStipplePattern ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSetLineStippleEXT &&
"Function <vkCmdSetLineStippleEXT> requires <VK_EXT_line_rasterization> or <VK_KHR_line_rasterization>" );
getDispatcher()->vkCmdSetLineStippleEXT( static_cast<VkCommandBuffer>( m_commandBuffer ), lineStippleFactor, lineStipplePattern );
}
//=== VK_EXT_host_query_reset ===
VULKAN_HPP_INLINE void QueryPool::resetEXT( uint32_t firstQuery, uint32_t queryCount ) const VULKAN_HPP_NOEXCEPT
{
2023-03-01 09:17:02 +00:00
VULKAN_HPP_ASSERT( getDispatcher()->vkResetQueryPoolEXT && "Function <vkResetQueryPoolEXT> requires <VK_EXT_host_query_reset> or <VK_VERSION_1_2>" );
getDispatcher()->vkResetQueryPoolEXT( static_cast<VkDevice>( m_device ), static_cast<VkQueryPool>( m_queryPool ), firstQuery, queryCount );
}
//=== VK_EXT_extended_dynamic_state ===
VULKAN_HPP_INLINE void CommandBuffer::setCullModeEXT( VULKAN_HPP_NAMESPACE::CullModeFlags cullMode ) const VULKAN_HPP_NOEXCEPT
{
2023-03-01 09:17:02 +00:00
VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSetCullModeEXT &&
"Function <vkCmdSetCullModeEXT> requires <VK_EXT_extended_dynamic_state> or <VK_EXT_shader_object> or <VK_VERSION_1_3>" );
getDispatcher()->vkCmdSetCullModeEXT( static_cast<VkCommandBuffer>( m_commandBuffer ), static_cast<VkCullModeFlags>( cullMode ) );
}
VULKAN_HPP_INLINE void CommandBuffer::setFrontFaceEXT( VULKAN_HPP_NAMESPACE::FrontFace frontFace ) const VULKAN_HPP_NOEXCEPT
{
2023-03-01 09:17:02 +00:00
VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSetFrontFaceEXT &&
"Function <vkCmdSetFrontFaceEXT> requires <VK_EXT_extended_dynamic_state> or <VK_EXT_shader_object> or <VK_VERSION_1_3>" );
getDispatcher()->vkCmdSetFrontFaceEXT( static_cast<VkCommandBuffer>( m_commandBuffer ), static_cast<VkFrontFace>( frontFace ) );
}
VULKAN_HPP_INLINE void CommandBuffer::setPrimitiveTopologyEXT( VULKAN_HPP_NAMESPACE::PrimitiveTopology primitiveTopology ) const VULKAN_HPP_NOEXCEPT
{
2023-03-01 09:17:02 +00:00
VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSetPrimitiveTopologyEXT &&
"Function <vkCmdSetPrimitiveTopologyEXT> requires <VK_EXT_extended_dynamic_state> or <VK_EXT_shader_object> or <VK_VERSION_1_3>" );
getDispatcher()->vkCmdSetPrimitiveTopologyEXT( static_cast<VkCommandBuffer>( m_commandBuffer ), static_cast<VkPrimitiveTopology>( primitiveTopology ) );
}
VULKAN_HPP_INLINE void CommandBuffer::setViewportWithCountEXT(
VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::Viewport> const & viewports ) const VULKAN_HPP_NOEXCEPT
{
2023-03-01 09:17:02 +00:00
VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSetViewportWithCountEXT &&
"Function <vkCmdSetViewportWithCountEXT> requires <VK_EXT_extended_dynamic_state> or <VK_EXT_shader_object> or <VK_VERSION_1_3>" );
getDispatcher()->vkCmdSetViewportWithCountEXT(
static_cast<VkCommandBuffer>( m_commandBuffer ), viewports.size(), reinterpret_cast<const VkViewport *>( viewports.data() ) );
}
VULKAN_HPP_INLINE void
CommandBuffer::setScissorWithCountEXT( VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::Rect2D> const & scissors ) const VULKAN_HPP_NOEXCEPT
{
2023-03-01 09:17:02 +00:00
VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSetScissorWithCountEXT &&
"Function <vkCmdSetScissorWithCountEXT> requires <VK_EXT_extended_dynamic_state> or <VK_EXT_shader_object> or <VK_VERSION_1_3>" );
getDispatcher()->vkCmdSetScissorWithCountEXT(
static_cast<VkCommandBuffer>( m_commandBuffer ), scissors.size(), reinterpret_cast<const VkRect2D *>( scissors.data() ) );
}
VULKAN_HPP_INLINE void
CommandBuffer::bindVertexBuffers2EXT( uint32_t firstBinding,
VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::Buffer> const & buffers,
VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::DeviceSize> const & offsets,
VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::DeviceSize> const & sizes,
VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::DeviceSize> const & strides ) const
{
2023-03-01 09:17:02 +00:00
VULKAN_HPP_ASSERT( getDispatcher()->vkCmdBindVertexBuffers2EXT &&
"Function <vkCmdBindVertexBuffers2EXT> requires <VK_EXT_extended_dynamic_state> or <VK_EXT_shader_object> or <VK_VERSION_1_3>" );
# ifdef VULKAN_HPP_NO_EXCEPTIONS
VULKAN_HPP_ASSERT( buffers.size() == offsets.size() );
VULKAN_HPP_ASSERT( sizes.empty() || buffers.size() == sizes.size() );
VULKAN_HPP_ASSERT( strides.empty() || buffers.size() == strides.size() );
# else
if ( buffers.size() != offsets.size() )
{
throw LogicError( VULKAN_HPP_NAMESPACE_STRING "::CommandBuffer::bindVertexBuffers2EXT: buffers.size() != offsets.size()" );
}
if ( !sizes.empty() && buffers.size() != sizes.size() )
{
throw LogicError( VULKAN_HPP_NAMESPACE_STRING "::CommandBuffer::bindVertexBuffers2EXT: buffers.size() != sizes.size()" );
}
if ( !strides.empty() && buffers.size() != strides.size() )
{
throw LogicError( VULKAN_HPP_NAMESPACE_STRING "::CommandBuffer::bindVertexBuffers2EXT: buffers.size() != strides.size()" );
}
# endif /*VULKAN_HPP_NO_EXCEPTIONS*/
getDispatcher()->vkCmdBindVertexBuffers2EXT( static_cast<VkCommandBuffer>( m_commandBuffer ),
firstBinding,
buffers.size(),
reinterpret_cast<const VkBuffer *>( buffers.data() ),
reinterpret_cast<const VkDeviceSize *>( offsets.data() ),
reinterpret_cast<const VkDeviceSize *>( sizes.data() ),
reinterpret_cast<const VkDeviceSize *>( strides.data() ) );
}
VULKAN_HPP_INLINE void CommandBuffer::setDepthTestEnableEXT( VULKAN_HPP_NAMESPACE::Bool32 depthTestEnable ) const VULKAN_HPP_NOEXCEPT
2021-04-28 11:35:14 +00:00
{
2023-03-01 09:17:02 +00:00
VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSetDepthTestEnableEXT &&
"Function <vkCmdSetDepthTestEnableEXT> requires <VK_EXT_extended_dynamic_state> or <VK_EXT_shader_object> or <VK_VERSION_1_3>" );
2021-04-28 11:35:14 +00:00
getDispatcher()->vkCmdSetDepthTestEnableEXT( static_cast<VkCommandBuffer>( m_commandBuffer ), static_cast<VkBool32>( depthTestEnable ) );
}
VULKAN_HPP_INLINE void CommandBuffer::setDepthWriteEnableEXT( VULKAN_HPP_NAMESPACE::Bool32 depthWriteEnable ) const VULKAN_HPP_NOEXCEPT
{
2023-03-01 09:17:02 +00:00
VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSetDepthWriteEnableEXT &&
"Function <vkCmdSetDepthWriteEnableEXT> requires <VK_EXT_extended_dynamic_state> or <VK_EXT_shader_object> or <VK_VERSION_1_3>" );
getDispatcher()->vkCmdSetDepthWriteEnableEXT( static_cast<VkCommandBuffer>( m_commandBuffer ), static_cast<VkBool32>( depthWriteEnable ) );
}
VULKAN_HPP_INLINE void CommandBuffer::setDepthCompareOpEXT( VULKAN_HPP_NAMESPACE::CompareOp depthCompareOp ) const VULKAN_HPP_NOEXCEPT
{
2023-03-01 09:17:02 +00:00
VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSetDepthCompareOpEXT &&
"Function <vkCmdSetDepthCompareOpEXT> requires <VK_EXT_extended_dynamic_state> or <VK_EXT_shader_object> or <VK_VERSION_1_3>" );
getDispatcher()->vkCmdSetDepthCompareOpEXT( static_cast<VkCommandBuffer>( m_commandBuffer ), static_cast<VkCompareOp>( depthCompareOp ) );
}
VULKAN_HPP_INLINE void CommandBuffer::setDepthBoundsTestEnableEXT( VULKAN_HPP_NAMESPACE::Bool32 depthBoundsTestEnable ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSetDepthBoundsTestEnableEXT &&
"Function <vkCmdSetDepthBoundsTestEnableEXT> requires <VK_EXT_extended_dynamic_state> or <VK_EXT_shader_object> or <VK_VERSION_1_3>" );
getDispatcher()->vkCmdSetDepthBoundsTestEnableEXT( static_cast<VkCommandBuffer>( m_commandBuffer ), static_cast<VkBool32>( depthBoundsTestEnable ) );
}
VULKAN_HPP_INLINE void CommandBuffer::setStencilTestEnableEXT( VULKAN_HPP_NAMESPACE::Bool32 stencilTestEnable ) const VULKAN_HPP_NOEXCEPT
{
2023-03-01 09:17:02 +00:00
VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSetStencilTestEnableEXT &&
"Function <vkCmdSetStencilTestEnableEXT> requires <VK_EXT_extended_dynamic_state> or <VK_EXT_shader_object> or <VK_VERSION_1_3>" );
getDispatcher()->vkCmdSetStencilTestEnableEXT( static_cast<VkCommandBuffer>( m_commandBuffer ), static_cast<VkBool32>( stencilTestEnable ) );
}
VULKAN_HPP_INLINE void CommandBuffer::setStencilOpEXT( VULKAN_HPP_NAMESPACE::StencilFaceFlags faceMask,
VULKAN_HPP_NAMESPACE::StencilOp failOp,
VULKAN_HPP_NAMESPACE::StencilOp passOp,
VULKAN_HPP_NAMESPACE::StencilOp depthFailOp,
VULKAN_HPP_NAMESPACE::CompareOp compareOp ) const VULKAN_HPP_NOEXCEPT
{
2023-03-01 09:17:02 +00:00
VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSetStencilOpEXT &&
"Function <vkCmdSetStencilOpEXT> requires <VK_EXT_extended_dynamic_state> or <VK_EXT_shader_object> or <VK_VERSION_1_3>" );
getDispatcher()->vkCmdSetStencilOpEXT( static_cast<VkCommandBuffer>( m_commandBuffer ),
static_cast<VkStencilFaceFlags>( faceMask ),
static_cast<VkStencilOp>( failOp ),
static_cast<VkStencilOp>( passOp ),
static_cast<VkStencilOp>( depthFailOp ),
static_cast<VkCompareOp>( compareOp ) );
}
//=== VK_KHR_deferred_host_operations ===
VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE
VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::CreateReturnType<VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::DeferredOperationKHR>::Type
Device::createDeferredOperationKHR( VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator ) const
VULKAN_HPP_RAII_CREATE_NOEXCEPT
{
VULKAN_HPP_NAMESPACE::DeferredOperationKHR deferredOperation;
VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( getDispatcher()->vkCreateDeferredOperationKHR(
static_cast<VkDevice>( m_device ),
reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
reinterpret_cast<VkDeferredOperationKHR *>( &deferredOperation ) ) );
if ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess )
{
# if defined( VULKAN_HPP_RAII_NO_EXCEPTIONS )
return VULKAN_HPP_UNEXPECTED( result );
# else
VULKAN_HPP_NAMESPACE::detail::throwResultException( result, "Device::createDeferredOperationKHR" );
# endif
}
return VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::DeferredOperationKHR(
*this, *reinterpret_cast<VkDeferredOperationKHR *>( &deferredOperation ), allocator );
}
VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE uint32_t DeferredOperationKHR::getMaxConcurrency() const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( getDispatcher()->vkGetDeferredOperationMaxConcurrencyKHR &&
"Function <vkGetDeferredOperationMaxConcurrencyKHR> requires <VK_KHR_deferred_host_operations>" );
uint32_t result =
getDispatcher()->vkGetDeferredOperationMaxConcurrencyKHR( static_cast<VkDevice>( m_device ), static_cast<VkDeferredOperationKHR>( m_operation ) );
return result;
}
VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::Result DeferredOperationKHR::getResult() const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( getDispatcher()->vkGetDeferredOperationResultKHR &&
"Function <vkGetDeferredOperationResultKHR> requires <VK_KHR_deferred_host_operations>" );
VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>(
getDispatcher()->vkGetDeferredOperationResultKHR( static_cast<VkDevice>( m_device ), static_cast<VkDeferredOperationKHR>( m_operation ) ) );
return static_cast<VULKAN_HPP_NAMESPACE::Result>( result );
}
VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::Result DeferredOperationKHR::join() const
{
VULKAN_HPP_ASSERT( getDispatcher()->vkDeferredOperationJoinKHR && "Function <vkDeferredOperationJoinKHR> requires <VK_KHR_deferred_host_operations>" );
VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>(
getDispatcher()->vkDeferredOperationJoinKHR( static_cast<VkDevice>( m_device ), static_cast<VkDeferredOperationKHR>( m_operation ) ) );
VULKAN_HPP_NAMESPACE::detail::resultCheck(
result,
VULKAN_HPP_NAMESPACE_STRING "::DeferredOperationKHR::join",
{ VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::eThreadDoneKHR, VULKAN_HPP_NAMESPACE::Result::eThreadIdleKHR } );
return static_cast<VULKAN_HPP_NAMESPACE::Result>( result );
}
//=== VK_KHR_pipeline_executable_properties ===
VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector<VULKAN_HPP_NAMESPACE::PipelineExecutablePropertiesKHR>
Device::getPipelineExecutablePropertiesKHR( const VULKAN_HPP_NAMESPACE::PipelineInfoKHR & pipelineInfo ) const
{
VULKAN_HPP_ASSERT( getDispatcher()->vkGetPipelineExecutablePropertiesKHR &&
"Function <vkGetPipelineExecutablePropertiesKHR> requires <VK_KHR_pipeline_executable_properties>" );
std::vector<VULKAN_HPP_NAMESPACE::PipelineExecutablePropertiesKHR> properties;
uint32_t executableCount;
VULKAN_HPP_NAMESPACE::Result result;
do
{
result = static_cast<VULKAN_HPP_NAMESPACE::Result>( getDispatcher()->vkGetPipelineExecutablePropertiesKHR(
static_cast<VkDevice>( m_device ), reinterpret_cast<const VkPipelineInfoKHR *>( &pipelineInfo ), &executableCount, nullptr ) );
if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) && executableCount )
{
properties.resize( executableCount );
result = static_cast<VULKAN_HPP_NAMESPACE::Result>(
getDispatcher()->vkGetPipelineExecutablePropertiesKHR( static_cast<VkDevice>( m_device ),
reinterpret_cast<const VkPipelineInfoKHR *>( &pipelineInfo ),
&executableCount,
reinterpret_cast<VkPipelineExecutablePropertiesKHR *>( properties.data() ) ) );
}
} while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete );
VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getPipelineExecutablePropertiesKHR" );
VULKAN_HPP_ASSERT( executableCount <= properties.size() );
if ( executableCount < properties.size() )
{
properties.resize( executableCount );
}
return properties;
}
VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector<VULKAN_HPP_NAMESPACE::PipelineExecutableStatisticKHR>
Device::getPipelineExecutableStatisticsKHR( const VULKAN_HPP_NAMESPACE::PipelineExecutableInfoKHR & executableInfo ) const
{
VULKAN_HPP_ASSERT( getDispatcher()->vkGetPipelineExecutableStatisticsKHR &&
"Function <vkGetPipelineExecutableStatisticsKHR> requires <VK_KHR_pipeline_executable_properties>" );
std::vector<VULKAN_HPP_NAMESPACE::PipelineExecutableStatisticKHR> statistics;
uint32_t statisticCount;
VULKAN_HPP_NAMESPACE::Result result;
do
{
result = static_cast<VULKAN_HPP_NAMESPACE::Result>( getDispatcher()->vkGetPipelineExecutableStatisticsKHR(
static_cast<VkDevice>( m_device ), reinterpret_cast<const VkPipelineExecutableInfoKHR *>( &executableInfo ), &statisticCount, nullptr ) );
if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) && statisticCount )
{
statistics.resize( statisticCount );
result = static_cast<VULKAN_HPP_NAMESPACE::Result>(
getDispatcher()->vkGetPipelineExecutableStatisticsKHR( static_cast<VkDevice>( m_device ),
reinterpret_cast<const VkPipelineExecutableInfoKHR *>( &executableInfo ),
&statisticCount,
reinterpret_cast<VkPipelineExecutableStatisticKHR *>( statistics.data() ) ) );
}
} while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete );
VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getPipelineExecutableStatisticsKHR" );
VULKAN_HPP_ASSERT( statisticCount <= statistics.size() );
if ( statisticCount < statistics.size() )
{
statistics.resize( statisticCount );
}
return statistics;
}
VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector<VULKAN_HPP_NAMESPACE::PipelineExecutableInternalRepresentationKHR>
Device::getPipelineExecutableInternalRepresentationsKHR( const VULKAN_HPP_NAMESPACE::PipelineExecutableInfoKHR & executableInfo ) const
{
VULKAN_HPP_ASSERT( getDispatcher()->vkGetPipelineExecutableInternalRepresentationsKHR &&
"Function <vkGetPipelineExecutableInternalRepresentationsKHR> requires <VK_KHR_pipeline_executable_properties>" );
std::vector<VULKAN_HPP_NAMESPACE::PipelineExecutableInternalRepresentationKHR> internalRepresentations;
uint32_t internalRepresentationCount;
VULKAN_HPP_NAMESPACE::Result result;
do
{
result = static_cast<VULKAN_HPP_NAMESPACE::Result>(
getDispatcher()->vkGetPipelineExecutableInternalRepresentationsKHR( static_cast<VkDevice>( m_device ),
reinterpret_cast<const VkPipelineExecutableInfoKHR *>( &executableInfo ),
&internalRepresentationCount,
nullptr ) );
if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) && internalRepresentationCount )
{
internalRepresentations.resize( internalRepresentationCount );
result = static_cast<VULKAN_HPP_NAMESPACE::Result>( getDispatcher()->vkGetPipelineExecutableInternalRepresentationsKHR(
static_cast<VkDevice>( m_device ),
reinterpret_cast<const VkPipelineExecutableInfoKHR *>( &executableInfo ),
&internalRepresentationCount,
reinterpret_cast<VkPipelineExecutableInternalRepresentationKHR *>( internalRepresentations.data() ) ) );
}
} while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete );
VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getPipelineExecutableInternalRepresentationsKHR" );
VULKAN_HPP_ASSERT( internalRepresentationCount <= internalRepresentations.size() );
if ( internalRepresentationCount < internalRepresentations.size() )
{
internalRepresentations.resize( internalRepresentationCount );
}
return internalRepresentations;
}
//=== VK_EXT_host_image_copy ===
VULKAN_HPP_INLINE void Device::copyMemoryToImageEXT( const VULKAN_HPP_NAMESPACE::CopyMemoryToImageInfoEXT & copyMemoryToImageInfo ) const
{
VULKAN_HPP_ASSERT( getDispatcher()->vkCopyMemoryToImageEXT && "Function <vkCopyMemoryToImageEXT> requires <VK_EXT_host_image_copy>" );
VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( getDispatcher()->vkCopyMemoryToImageEXT(
static_cast<VkDevice>( m_device ), reinterpret_cast<const VkCopyMemoryToImageInfoEXT *>( &copyMemoryToImageInfo ) ) );
VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::copyMemoryToImageEXT" );
}
VULKAN_HPP_INLINE void Device::copyImageToMemoryEXT( const VULKAN_HPP_NAMESPACE::CopyImageToMemoryInfoEXT & copyImageToMemoryInfo ) const
{
VULKAN_HPP_ASSERT( getDispatcher()->vkCopyImageToMemoryEXT && "Function <vkCopyImageToMemoryEXT> requires <VK_EXT_host_image_copy>" );
VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( getDispatcher()->vkCopyImageToMemoryEXT(
static_cast<VkDevice>( m_device ), reinterpret_cast<const VkCopyImageToMemoryInfoEXT *>( &copyImageToMemoryInfo ) ) );
VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::copyImageToMemoryEXT" );
}
VULKAN_HPP_INLINE void Device::copyImageToImageEXT( const VULKAN_HPP_NAMESPACE::CopyImageToImageInfoEXT & copyImageToImageInfo ) const
{
VULKAN_HPP_ASSERT( getDispatcher()->vkCopyImageToImageEXT && "Function <vkCopyImageToImageEXT> requires <VK_EXT_host_image_copy>" );
VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( getDispatcher()->vkCopyImageToImageEXT(
static_cast<VkDevice>( m_device ), reinterpret_cast<const VkCopyImageToImageInfoEXT *>( &copyImageToImageInfo ) ) );
VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::copyImageToImageEXT" );
}
VULKAN_HPP_INLINE void Device::transitionImageLayoutEXT(
VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::HostImageLayoutTransitionInfoEXT> const & transitions ) const
{
VULKAN_HPP_ASSERT( getDispatcher()->vkTransitionImageLayoutEXT && "Function <vkTransitionImageLayoutEXT> requires <VK_EXT_host_image_copy>" );
VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( getDispatcher()->vkTransitionImageLayoutEXT(
static_cast<VkDevice>( m_device ), transitions.size(), reinterpret_cast<const VkHostImageLayoutTransitionInfoEXT *>( transitions.data() ) ) );
VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::transitionImageLayoutEXT" );
}
VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::SubresourceLayout2KHR
Image::getSubresourceLayout2EXT( const VULKAN_HPP_NAMESPACE::ImageSubresource2KHR & subresource ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT(
getDispatcher()->vkGetImageSubresourceLayout2EXT &&
"Function <vkGetImageSubresourceLayout2EXT> requires <VK_EXT_host_image_copy> or <VK_EXT_image_compression_control> or <VK_KHR_maintenance5>" );
VULKAN_HPP_NAMESPACE::SubresourceLayout2KHR layout;
getDispatcher()->vkGetImageSubresourceLayout2EXT( static_cast<VkDevice>( m_device ),
static_cast<VkImage>( m_image ),
reinterpret_cast<const VkImageSubresource2KHR *>( &subresource ),
reinterpret_cast<VkSubresourceLayout2KHR *>( &layout ) );
return layout;
}
template <typename X, typename Y, typename... Z>
VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::StructureChain<X, Y, Z...>
Image::getSubresourceLayout2EXT( const VULKAN_HPP_NAMESPACE::ImageSubresource2KHR & subresource ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT(
getDispatcher()->vkGetImageSubresourceLayout2EXT &&
"Function <vkGetImageSubresourceLayout2EXT> requires <VK_EXT_host_image_copy> or <VK_EXT_image_compression_control> or <VK_KHR_maintenance5>" );
VULKAN_HPP_NAMESPACE::StructureChain<X, Y, Z...> structureChain;
VULKAN_HPP_NAMESPACE::SubresourceLayout2KHR & layout = structureChain.template get<VULKAN_HPP_NAMESPACE::SubresourceLayout2KHR>();
getDispatcher()->vkGetImageSubresourceLayout2EXT( static_cast<VkDevice>( m_device ),
static_cast<VkImage>( m_image ),
reinterpret_cast<const VkImageSubresource2KHR *>( &subresource ),
reinterpret_cast<VkSubresourceLayout2KHR *>( &layout ) );
return structureChain;
}
//=== VK_KHR_map_memory2 ===
VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE void * Device::mapMemory2KHR( const VULKAN_HPP_NAMESPACE::MemoryMapInfoKHR & memoryMapInfo ) const
{
VULKAN_HPP_ASSERT( getDispatcher()->vkMapMemory2KHR && "Function <vkMapMemory2KHR> requires <VK_KHR_map_memory2>" );
void * pData;
VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>(
getDispatcher()->vkMapMemory2KHR( static_cast<VkDevice>( m_device ), reinterpret_cast<const VkMemoryMapInfoKHR *>( &memoryMapInfo ), &pData ) );
VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::mapMemory2KHR" );
return pData;
}
VULKAN_HPP_INLINE void Device::unmapMemory2KHR( const VULKAN_HPP_NAMESPACE::MemoryUnmapInfoKHR & memoryUnmapInfo ) const
{
VULKAN_HPP_ASSERT( getDispatcher()->vkUnmapMemory2KHR && "Function <vkUnmapMemory2KHR> requires <VK_KHR_map_memory2>" );
VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>(
getDispatcher()->vkUnmapMemory2KHR( static_cast<VkDevice>( m_device ), reinterpret_cast<const VkMemoryUnmapInfoKHR *>( &memoryUnmapInfo ) ) );
VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::unmapMemory2KHR" );
}
//=== VK_EXT_swapchain_maintenance1 ===
VULKAN_HPP_INLINE void Device::releaseSwapchainImagesEXT( const VULKAN_HPP_NAMESPACE::ReleaseSwapchainImagesInfoEXT & releaseInfo ) const
{
VULKAN_HPP_ASSERT( getDispatcher()->vkReleaseSwapchainImagesEXT && "Function <vkReleaseSwapchainImagesEXT> requires <VK_EXT_swapchain_maintenance1>" );
VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( getDispatcher()->vkReleaseSwapchainImagesEXT(
static_cast<VkDevice>( m_device ), reinterpret_cast<const VkReleaseSwapchainImagesInfoEXT *>( &releaseInfo ) ) );
VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::releaseSwapchainImagesEXT" );
}
//=== VK_NV_device_generated_commands ===
VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::MemoryRequirements2
Device::getGeneratedCommandsMemoryRequirementsNV( const VULKAN_HPP_NAMESPACE::GeneratedCommandsMemoryRequirementsInfoNV & info ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( getDispatcher()->vkGetGeneratedCommandsMemoryRequirementsNV &&
"Function <vkGetGeneratedCommandsMemoryRequirementsNV> requires <VK_NV_device_generated_commands>" );
VULKAN_HPP_NAMESPACE::MemoryRequirements2 memoryRequirements;
getDispatcher()->vkGetGeneratedCommandsMemoryRequirementsNV( static_cast<VkDevice>( m_device ),
reinterpret_cast<const VkGeneratedCommandsMemoryRequirementsInfoNV *>( &info ),
reinterpret_cast<VkMemoryRequirements2 *>( &memoryRequirements ) );
return memoryRequirements;
}
template <typename X, typename Y, typename... Z>
VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::StructureChain<X, Y, Z...>
Device::getGeneratedCommandsMemoryRequirementsNV( const VULKAN_HPP_NAMESPACE::GeneratedCommandsMemoryRequirementsInfoNV & info ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( getDispatcher()->vkGetGeneratedCommandsMemoryRequirementsNV &&
"Function <vkGetGeneratedCommandsMemoryRequirementsNV> requires <VK_NV_device_generated_commands>" );
VULKAN_HPP_NAMESPACE::StructureChain<X, Y, Z...> structureChain;
VULKAN_HPP_NAMESPACE::MemoryRequirements2 & memoryRequirements = structureChain.template get<VULKAN_HPP_NAMESPACE::MemoryRequirements2>();
getDispatcher()->vkGetGeneratedCommandsMemoryRequirementsNV( static_cast<VkDevice>( m_device ),
reinterpret_cast<const VkGeneratedCommandsMemoryRequirementsInfoNV *>( &info ),
reinterpret_cast<VkMemoryRequirements2 *>( &memoryRequirements ) );
return structureChain;
}
VULKAN_HPP_INLINE void
CommandBuffer::preprocessGeneratedCommandsNV( const VULKAN_HPP_NAMESPACE::GeneratedCommandsInfoNV & generatedCommandsInfo ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( getDispatcher()->vkCmdPreprocessGeneratedCommandsNV &&
"Function <vkCmdPreprocessGeneratedCommandsNV> requires <VK_NV_device_generated_commands>" );
getDispatcher()->vkCmdPreprocessGeneratedCommandsNV( static_cast<VkCommandBuffer>( m_commandBuffer ),
reinterpret_cast<const VkGeneratedCommandsInfoNV *>( &generatedCommandsInfo ) );
}
VULKAN_HPP_INLINE void
CommandBuffer::executeGeneratedCommandsNV( VULKAN_HPP_NAMESPACE::Bool32 isPreprocessed,
const VULKAN_HPP_NAMESPACE::GeneratedCommandsInfoNV & generatedCommandsInfo ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( getDispatcher()->vkCmdExecuteGeneratedCommandsNV &&
"Function <vkCmdExecuteGeneratedCommandsNV> requires <VK_NV_device_generated_commands>" );
getDispatcher()->vkCmdExecuteGeneratedCommandsNV( static_cast<VkCommandBuffer>( m_commandBuffer ),
static_cast<VkBool32>( isPreprocessed ),
reinterpret_cast<const VkGeneratedCommandsInfoNV *>( &generatedCommandsInfo ) );
}
VULKAN_HPP_INLINE void CommandBuffer::bindPipelineShaderGroupNV( VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint,
VULKAN_HPP_NAMESPACE::Pipeline pipeline,
uint32_t groupIndex ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( getDispatcher()->vkCmdBindPipelineShaderGroupNV &&
"Function <vkCmdBindPipelineShaderGroupNV> requires <VK_NV_device_generated_commands>" );
getDispatcher()->vkCmdBindPipelineShaderGroupNV( static_cast<VkCommandBuffer>( m_commandBuffer ),
static_cast<VkPipelineBindPoint>( pipelineBindPoint ),
static_cast<VkPipeline>( pipeline ),
groupIndex );
}
VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE
VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::CreateReturnType<VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::IndirectCommandsLayoutNV>::Type
Device::createIndirectCommandsLayoutNV( VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutCreateInfoNV const & createInfo,
VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator ) const
VULKAN_HPP_RAII_CREATE_NOEXCEPT
{
VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutNV indirectCommandsLayout;
VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( getDispatcher()->vkCreateIndirectCommandsLayoutNV(
static_cast<VkDevice>( m_device ),
reinterpret_cast<const VkIndirectCommandsLayoutCreateInfoNV *>( &createInfo ),
reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
reinterpret_cast<VkIndirectCommandsLayoutNV *>( &indirectCommandsLayout ) ) );
if ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess )
{
# if defined( VULKAN_HPP_RAII_NO_EXCEPTIONS )
return VULKAN_HPP_UNEXPECTED( result );
# else
VULKAN_HPP_NAMESPACE::detail::throwResultException( result, "Device::createIndirectCommandsLayoutNV" );
# endif
}
return VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::IndirectCommandsLayoutNV(
*this, *reinterpret_cast<VkIndirectCommandsLayoutNV *>( &indirectCommandsLayout ), allocator );
}
//=== VK_EXT_depth_bias_control ===
VULKAN_HPP_INLINE void CommandBuffer::setDepthBias2EXT( const VULKAN_HPP_NAMESPACE::DepthBiasInfoEXT & depthBiasInfo ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSetDepthBias2EXT && "Function <vkCmdSetDepthBias2EXT> requires <VK_EXT_depth_bias_control>" );
getDispatcher()->vkCmdSetDepthBias2EXT( static_cast<VkCommandBuffer>( m_commandBuffer ), reinterpret_cast<const VkDepthBiasInfoEXT *>( &depthBiasInfo ) );
}
2021-06-22 07:40:12 +00:00
//=== VK_EXT_acquire_drm_display ===
VULKAN_HPP_INLINE void PhysicalDevice::acquireDrmDisplayEXT( int32_t drmFd, VULKAN_HPP_NAMESPACE::DisplayKHR display ) const
2021-06-22 07:40:12 +00:00
{
VULKAN_HPP_ASSERT( getDispatcher()->vkAcquireDrmDisplayEXT && "Function <vkAcquireDrmDisplayEXT> requires <VK_EXT_acquire_drm_display>" );
2021-06-22 07:40:12 +00:00
VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>(
getDispatcher()->vkAcquireDrmDisplayEXT( static_cast<VkPhysicalDevice>( m_physicalDevice ), drmFd, static_cast<VkDisplayKHR>( display ) ) );
VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::acquireDrmDisplayEXT" );
2021-06-22 07:40:12 +00:00
}
VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE
VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::CreateReturnType<VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::DisplayKHR>::Type
PhysicalDevice::getDrmDisplayEXT( int32_t drmFd, uint32_t connectorId ) const VULKAN_HPP_RAII_CREATE_NOEXCEPT
{
VULKAN_HPP_NAMESPACE::DisplayKHR display;
VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( getDispatcher()->vkGetDrmDisplayEXT(
static_cast<VkPhysicalDevice>( m_physicalDevice ), drmFd, connectorId, reinterpret_cast<VkDisplayKHR *>( &display ) ) );
if ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess )
{
# if defined( VULKAN_HPP_RAII_NO_EXCEPTIONS )
return VULKAN_HPP_UNEXPECTED( result );
# else
VULKAN_HPP_NAMESPACE::detail::throwResultException( result, "PhysicalDevice::getDrmDisplayEXT" );
# endif
}
return VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::DisplayKHR( *this, *reinterpret_cast<VkDisplayKHR *>( &display ) );
}
//=== VK_EXT_private_data ===
VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE
VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::CreateReturnType<VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::PrivateDataSlot>::Type
Device::createPrivateDataSlotEXT( VULKAN_HPP_NAMESPACE::PrivateDataSlotCreateInfo const & createInfo,
VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator ) const
VULKAN_HPP_RAII_CREATE_NOEXCEPT
{
VULKAN_HPP_NAMESPACE::PrivateDataSlot privateDataSlot;
VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( getDispatcher()->vkCreatePrivateDataSlotEXT(
static_cast<VkDevice>( m_device ),
reinterpret_cast<const VkPrivateDataSlotCreateInfo *>( &createInfo ),
reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
reinterpret_cast<VkPrivateDataSlot *>( &privateDataSlot ) ) );
if ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess )
{
# if defined( VULKAN_HPP_RAII_NO_EXCEPTIONS )
return VULKAN_HPP_UNEXPECTED( result );
# else
VULKAN_HPP_NAMESPACE::detail::throwResultException( result, "Device::createPrivateDataSlotEXT" );
# endif
}
return VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::PrivateDataSlot( *this, *reinterpret_cast<VkPrivateDataSlot *>( &privateDataSlot ), allocator );
}
VULKAN_HPP_INLINE void Device::destroyPrivateDataSlotEXT( VULKAN_HPP_NAMESPACE::PrivateDataSlot privateDataSlot,
Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator ) const VULKAN_HPP_NOEXCEPT
2022-01-26 00:42:08 +00:00
{
2023-03-01 09:17:02 +00:00
VULKAN_HPP_ASSERT( getDispatcher()->vkDestroyPrivateDataSlotEXT &&
"Function <vkDestroyPrivateDataSlotEXT> requires <VK_EXT_private_data> or <VK_VERSION_1_3>" );
2022-01-26 00:42:08 +00:00
getDispatcher()->vkDestroyPrivateDataSlotEXT(
static_cast<VkDevice>( m_device ),
static_cast<VkPrivateDataSlot>( privateDataSlot ),
reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) );
2022-01-26 00:42:08 +00:00
}
VULKAN_HPP_INLINE void Device::setPrivateDataEXT( VULKAN_HPP_NAMESPACE::ObjectType objectType_,
uint64_t objectHandle,
VULKAN_HPP_NAMESPACE::PrivateDataSlot privateDataSlot,
uint64_t data ) const
{
2023-03-01 09:17:02 +00:00
VULKAN_HPP_ASSERT( getDispatcher()->vkSetPrivateDataEXT && "Function <vkSetPrivateDataEXT> requires <VK_EXT_private_data> or <VK_VERSION_1_3>" );
VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( getDispatcher()->vkSetPrivateDataEXT(
static_cast<VkDevice>( m_device ), static_cast<VkObjectType>( objectType_ ), objectHandle, static_cast<VkPrivateDataSlot>( privateDataSlot ), data ) );
VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::setPrivateDataEXT" );
}
VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE uint64_t Device::getPrivateDataEXT( VULKAN_HPP_NAMESPACE::ObjectType objectType_,
uint64_t objectHandle,
VULKAN_HPP_NAMESPACE::PrivateDataSlot privateDataSlot ) const VULKAN_HPP_NOEXCEPT
2021-04-28 11:35:14 +00:00
{
2023-03-01 09:17:02 +00:00
VULKAN_HPP_ASSERT( getDispatcher()->vkGetPrivateDataEXT && "Function <vkGetPrivateDataEXT> requires <VK_EXT_private_data> or <VK_VERSION_1_3>" );
2021-04-28 11:35:14 +00:00
uint64_t data;
getDispatcher()->vkGetPrivateDataEXT(
static_cast<VkDevice>( m_device ), static_cast<VkObjectType>( objectType_ ), objectHandle, static_cast<VkPrivateDataSlot>( privateDataSlot ), &data );
2021-04-28 11:35:14 +00:00
return data;
}
//=== VK_KHR_video_encode_queue ===
2023-06-11 00:11:41 +00:00
VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::VideoEncodeQualityLevelPropertiesKHR PhysicalDevice::getVideoEncodeQualityLevelPropertiesKHR(
const VULKAN_HPP_NAMESPACE::PhysicalDeviceVideoEncodeQualityLevelInfoKHR & qualityLevelInfo ) const
{
VULKAN_HPP_ASSERT( getDispatcher()->vkGetPhysicalDeviceVideoEncodeQualityLevelPropertiesKHR &&
"Function <vkGetPhysicalDeviceVideoEncodeQualityLevelPropertiesKHR> requires <VK_KHR_video_encode_queue>" );
VULKAN_HPP_NAMESPACE::VideoEncodeQualityLevelPropertiesKHR qualityLevelProperties;
VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( getDispatcher()->vkGetPhysicalDeviceVideoEncodeQualityLevelPropertiesKHR(
2023-06-11 00:11:41 +00:00
static_cast<VkPhysicalDevice>( m_physicalDevice ),
reinterpret_cast<const VkPhysicalDeviceVideoEncodeQualityLevelInfoKHR *>( &qualityLevelInfo ),
reinterpret_cast<VkVideoEncodeQualityLevelPropertiesKHR *>( &qualityLevelProperties ) ) );
VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getVideoEncodeQualityLevelPropertiesKHR" );
2023-06-11 00:11:41 +00:00
return qualityLevelProperties;
}
template <typename X, typename Y, typename... Z>
VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::StructureChain<X, Y, Z...> PhysicalDevice::getVideoEncodeQualityLevelPropertiesKHR(
const VULKAN_HPP_NAMESPACE::PhysicalDeviceVideoEncodeQualityLevelInfoKHR & qualityLevelInfo ) const
{
VULKAN_HPP_ASSERT( getDispatcher()->vkGetPhysicalDeviceVideoEncodeQualityLevelPropertiesKHR &&
"Function <vkGetPhysicalDeviceVideoEncodeQualityLevelPropertiesKHR> requires <VK_KHR_video_encode_queue>" );
StructureChain<X, Y, Z...> structureChain;
VULKAN_HPP_NAMESPACE::VideoEncodeQualityLevelPropertiesKHR & qualityLevelProperties =
structureChain.template get<VULKAN_HPP_NAMESPACE::VideoEncodeQualityLevelPropertiesKHR>();
VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( getDispatcher()->vkGetPhysicalDeviceVideoEncodeQualityLevelPropertiesKHR(
2023-06-11 00:11:41 +00:00
static_cast<VkPhysicalDevice>( m_physicalDevice ),
reinterpret_cast<const VkPhysicalDeviceVideoEncodeQualityLevelInfoKHR *>( &qualityLevelInfo ),
reinterpret_cast<VkVideoEncodeQualityLevelPropertiesKHR *>( &qualityLevelProperties ) ) );
VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getVideoEncodeQualityLevelPropertiesKHR" );
2023-06-11 00:11:41 +00:00
return structureChain;
}
VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::pair<VULKAN_HPP_NAMESPACE::VideoEncodeSessionParametersFeedbackInfoKHR, std::vector<uint8_t>>
Device::getEncodedVideoSessionParametersKHR( const VULKAN_HPP_NAMESPACE::VideoEncodeSessionParametersGetInfoKHR & videoSessionParametersInfo ) const
{
VULKAN_HPP_ASSERT( getDispatcher()->vkGetEncodedVideoSessionParametersKHR &&
"Function <vkGetEncodedVideoSessionParametersKHR> requires <VK_KHR_video_encode_queue>" );
std::pair<VULKAN_HPP_NAMESPACE::VideoEncodeSessionParametersFeedbackInfoKHR, std::vector<uint8_t>> data_;
VULKAN_HPP_NAMESPACE::VideoEncodeSessionParametersFeedbackInfoKHR & feedbackInfo = data_.first;
std::vector<uint8_t> & data = data_.second;
size_t dataSize;
VULKAN_HPP_NAMESPACE::Result result;
2023-06-11 00:11:41 +00:00
do
{
result = static_cast<VULKAN_HPP_NAMESPACE::Result>( getDispatcher()->vkGetEncodedVideoSessionParametersKHR(
2023-06-11 00:11:41 +00:00
static_cast<VkDevice>( m_device ),
reinterpret_cast<const VkVideoEncodeSessionParametersGetInfoKHR *>( &videoSessionParametersInfo ),
reinterpret_cast<VkVideoEncodeSessionParametersFeedbackInfoKHR *>( &feedbackInfo ),
&dataSize,
nullptr ) );
if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) && dataSize )
2023-06-11 00:11:41 +00:00
{
data.resize( dataSize );
result = static_cast<VULKAN_HPP_NAMESPACE::Result>( getDispatcher()->vkGetEncodedVideoSessionParametersKHR(
2023-06-11 00:11:41 +00:00
static_cast<VkDevice>( m_device ),
reinterpret_cast<const VkVideoEncodeSessionParametersGetInfoKHR *>( &videoSessionParametersInfo ),
reinterpret_cast<VkVideoEncodeSessionParametersFeedbackInfoKHR *>( &feedbackInfo ),
&dataSize,
reinterpret_cast<void *>( data.data() ) ) );
2023-06-11 00:11:41 +00:00
}
} while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete );
VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getEncodedVideoSessionParametersKHR" );
2023-06-11 00:11:41 +00:00
return data_;
}
template <typename X, typename Y, typename... Z>
VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::pair<VULKAN_HPP_NAMESPACE::StructureChain<X, Y, Z...>, std::vector<uint8_t>>
Device::getEncodedVideoSessionParametersKHR( const VULKAN_HPP_NAMESPACE::VideoEncodeSessionParametersGetInfoKHR & videoSessionParametersInfo ) const
{
VULKAN_HPP_ASSERT( getDispatcher()->vkGetEncodedVideoSessionParametersKHR &&
"Function <vkGetEncodedVideoSessionParametersKHR> requires <VK_KHR_video_encode_queue>" );
std::pair<VULKAN_HPP_NAMESPACE::StructureChain<X, Y, Z...>, std::vector<uint8_t>> data_;
VULKAN_HPP_NAMESPACE::VideoEncodeSessionParametersFeedbackInfoKHR & feedbackInfo =
data_.first.template get<VULKAN_HPP_NAMESPACE::VideoEncodeSessionParametersFeedbackInfoKHR>();
std::vector<uint8_t> & data = data_.second;
size_t dataSize;
VULKAN_HPP_NAMESPACE::Result result;
2023-06-11 00:11:41 +00:00
do
{
result = static_cast<VULKAN_HPP_NAMESPACE::Result>( getDispatcher()->vkGetEncodedVideoSessionParametersKHR(
2023-06-11 00:11:41 +00:00
static_cast<VkDevice>( m_device ),
reinterpret_cast<const VkVideoEncodeSessionParametersGetInfoKHR *>( &videoSessionParametersInfo ),
reinterpret_cast<VkVideoEncodeSessionParametersFeedbackInfoKHR *>( &feedbackInfo ),
&dataSize,
nullptr ) );
if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) && dataSize )
2023-06-11 00:11:41 +00:00
{
data.resize( dataSize );
result = static_cast<VULKAN_HPP_NAMESPACE::Result>( getDispatcher()->vkGetEncodedVideoSessionParametersKHR(
2023-06-11 00:11:41 +00:00
static_cast<VkDevice>( m_device ),
reinterpret_cast<const VkVideoEncodeSessionParametersGetInfoKHR *>( &videoSessionParametersInfo ),
reinterpret_cast<VkVideoEncodeSessionParametersFeedbackInfoKHR *>( &feedbackInfo ),
&dataSize,
reinterpret_cast<void *>( data.data() ) ) );
2023-06-11 00:11:41 +00:00
}
} while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete );
VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getEncodedVideoSessionParametersKHR" );
2023-06-11 00:11:41 +00:00
return data_;
}
VULKAN_HPP_INLINE void CommandBuffer::encodeVideoKHR( const VULKAN_HPP_NAMESPACE::VideoEncodeInfoKHR & encodeInfo ) const VULKAN_HPP_NOEXCEPT
2021-04-28 11:35:14 +00:00
{
VULKAN_HPP_ASSERT( getDispatcher()->vkCmdEncodeVideoKHR && "Function <vkCmdEncodeVideoKHR> requires <VK_KHR_video_encode_queue>" );
getDispatcher()->vkCmdEncodeVideoKHR( static_cast<VkCommandBuffer>( m_commandBuffer ), reinterpret_cast<const VkVideoEncodeInfoKHR *>( &encodeInfo ) );
2021-04-28 11:35:14 +00:00
}
# if defined( VK_ENABLE_BETA_EXTENSIONS )
//=== VK_NV_cuda_kernel_launch ===
VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE
VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::CreateReturnType<VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::CudaModuleNV>::Type
Device::createCudaModuleNV( VULKAN_HPP_NAMESPACE::CudaModuleCreateInfoNV const & createInfo,
VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator ) const
VULKAN_HPP_RAII_CREATE_NOEXCEPT
{
VULKAN_HPP_NAMESPACE::CudaModuleNV module;
VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( getDispatcher()->vkCreateCudaModuleNV(
static_cast<VkDevice>( m_device ),
reinterpret_cast<const VkCudaModuleCreateInfoNV *>( &createInfo ),
reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
reinterpret_cast<VkCudaModuleNV *>( &module ) ) );
if ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess )
{
# if defined( VULKAN_HPP_RAII_NO_EXCEPTIONS )
return VULKAN_HPP_UNEXPECTED( result );
# else
VULKAN_HPP_NAMESPACE::detail::throwResultException( result, "Device::createCudaModuleNV" );
# endif
}
return VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::CudaModuleNV( *this, *reinterpret_cast<VkCudaModuleNV *>( &module ), allocator );
}
VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector<uint8_t> CudaModuleNV::getCache() const
{
VULKAN_HPP_ASSERT( getDispatcher()->vkGetCudaModuleCacheNV && "Function <vkGetCudaModuleCacheNV> requires <VK_NV_cuda_kernel_launch>" );
std::vector<uint8_t> cacheData;
size_t cacheSize;
VULKAN_HPP_NAMESPACE::Result result;
do
{
result = static_cast<VULKAN_HPP_NAMESPACE::Result>(
getDispatcher()->vkGetCudaModuleCacheNV( static_cast<VkDevice>( m_device ), static_cast<VkCudaModuleNV>( m_module ), &cacheSize, nullptr ) );
if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) && cacheSize )
{
cacheData.resize( cacheSize );
result = static_cast<VULKAN_HPP_NAMESPACE::Result>( getDispatcher()->vkGetCudaModuleCacheNV(
static_cast<VkDevice>( m_device ), static_cast<VkCudaModuleNV>( m_module ), &cacheSize, reinterpret_cast<void *>( cacheData.data() ) ) );
}
} while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete );
VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::CudaModuleNV::getCache" );
VULKAN_HPP_ASSERT( cacheSize <= cacheData.size() );
if ( cacheSize < cacheData.size() )
{
cacheData.resize( cacheSize );
}
return cacheData;
}
VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE
VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::CreateReturnType<VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::CudaFunctionNV>::Type
Device::createCudaFunctionNV( VULKAN_HPP_NAMESPACE::CudaFunctionCreateInfoNV const & createInfo,
VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator ) const
VULKAN_HPP_RAII_CREATE_NOEXCEPT
{
VULKAN_HPP_NAMESPACE::CudaFunctionNV function;
VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( getDispatcher()->vkCreateCudaFunctionNV(
static_cast<VkDevice>( m_device ),
reinterpret_cast<const VkCudaFunctionCreateInfoNV *>( &createInfo ),
reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
reinterpret_cast<VkCudaFunctionNV *>( &function ) ) );
if ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess )
{
# if defined( VULKAN_HPP_RAII_NO_EXCEPTIONS )
return VULKAN_HPP_UNEXPECTED( result );
# else
VULKAN_HPP_NAMESPACE::detail::throwResultException( result, "Device::createCudaFunctionNV" );
# endif
}
return VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::CudaFunctionNV( *this, *reinterpret_cast<VkCudaFunctionNV *>( &function ), allocator );
}
VULKAN_HPP_INLINE void CommandBuffer::cudaLaunchKernelNV( const VULKAN_HPP_NAMESPACE::CudaLaunchInfoNV & launchInfo ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( getDispatcher()->vkCmdCudaLaunchKernelNV && "Function <vkCmdCudaLaunchKernelNV> requires <VK_NV_cuda_kernel_launch>" );
getDispatcher()->vkCmdCudaLaunchKernelNV( static_cast<VkCommandBuffer>( m_commandBuffer ), reinterpret_cast<const VkCudaLaunchInfoNV *>( &launchInfo ) );
}
# endif /*VK_ENABLE_BETA_EXTENSIONS*/
2022-06-10 00:09:44 +00:00
# if defined( VK_USE_PLATFORM_METAL_EXT )
//=== VK_EXT_metal_objects ===
VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::ExportMetalObjectsInfoEXT Device::exportMetalObjectsEXT() const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( getDispatcher()->vkExportMetalObjectsEXT && "Function <vkExportMetalObjectsEXT> requires <VK_EXT_metal_objects>" );
2022-06-10 00:09:44 +00:00
VULKAN_HPP_NAMESPACE::ExportMetalObjectsInfoEXT metalObjectsInfo;
getDispatcher()->vkExportMetalObjectsEXT( static_cast<VkDevice>( m_device ), reinterpret_cast<VkExportMetalObjectsInfoEXT *>( &metalObjectsInfo ) );
return metalObjectsInfo;
}
template <typename X, typename Y, typename... Z>
VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::StructureChain<X, Y, Z...> Device::exportMetalObjectsEXT() const VULKAN_HPP_NOEXCEPT
2022-06-10 00:09:44 +00:00
{
VULKAN_HPP_ASSERT( getDispatcher()->vkExportMetalObjectsEXT && "Function <vkExportMetalObjectsEXT> requires <VK_EXT_metal_objects>" );
2022-06-10 00:09:44 +00:00
VULKAN_HPP_NAMESPACE::StructureChain<X, Y, Z...> structureChain;
2022-06-10 00:09:44 +00:00
VULKAN_HPP_NAMESPACE::ExportMetalObjectsInfoEXT & metalObjectsInfo = structureChain.template get<VULKAN_HPP_NAMESPACE::ExportMetalObjectsInfoEXT>();
getDispatcher()->vkExportMetalObjectsEXT( static_cast<VkDevice>( m_device ), reinterpret_cast<VkExportMetalObjectsInfoEXT *>( &metalObjectsInfo ) );
return structureChain;
}
# endif /*VK_USE_PLATFORM_METAL_EXT*/
//=== VK_KHR_synchronization2 ===
VULKAN_HPP_INLINE void CommandBuffer::setEvent2KHR( VULKAN_HPP_NAMESPACE::Event event,
const VULKAN_HPP_NAMESPACE::DependencyInfo & dependencyInfo ) const VULKAN_HPP_NOEXCEPT
{
2023-03-01 09:17:02 +00:00
VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSetEvent2KHR && "Function <vkCmdSetEvent2KHR> requires <VK_KHR_synchronization2> or <VK_VERSION_1_3>" );
getDispatcher()->vkCmdSetEvent2KHR(
static_cast<VkCommandBuffer>( m_commandBuffer ), static_cast<VkEvent>( event ), reinterpret_cast<const VkDependencyInfo *>( &dependencyInfo ) );
}
VULKAN_HPP_INLINE void CommandBuffer::resetEvent2KHR( VULKAN_HPP_NAMESPACE::Event event,
VULKAN_HPP_NAMESPACE::PipelineStageFlags2 stageMask ) const VULKAN_HPP_NOEXCEPT
{
2023-03-01 09:17:02 +00:00
VULKAN_HPP_ASSERT( getDispatcher()->vkCmdResetEvent2KHR && "Function <vkCmdResetEvent2KHR> requires <VK_KHR_synchronization2> or <VK_VERSION_1_3>" );
getDispatcher()->vkCmdResetEvent2KHR(
static_cast<VkCommandBuffer>( m_commandBuffer ), static_cast<VkEvent>( event ), static_cast<VkPipelineStageFlags2>( stageMask ) );
}
VULKAN_HPP_INLINE void
CommandBuffer::waitEvents2KHR( VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::Event> const & events,
VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::DependencyInfo> const & dependencyInfos ) const
{
2023-03-01 09:17:02 +00:00
VULKAN_HPP_ASSERT( getDispatcher()->vkCmdWaitEvents2KHR && "Function <vkCmdWaitEvents2KHR> requires <VK_KHR_synchronization2> or <VK_VERSION_1_3>" );
# ifdef VULKAN_HPP_NO_EXCEPTIONS
VULKAN_HPP_ASSERT( events.size() == dependencyInfos.size() );
# else
if ( events.size() != dependencyInfos.size() )
{
throw LogicError( VULKAN_HPP_NAMESPACE_STRING "::CommandBuffer::waitEvents2KHR: events.size() != dependencyInfos.size()" );
}
# endif /*VULKAN_HPP_NO_EXCEPTIONS*/
getDispatcher()->vkCmdWaitEvents2KHR( static_cast<VkCommandBuffer>( m_commandBuffer ),
events.size(),
reinterpret_cast<const VkEvent *>( events.data() ),
2022-01-26 00:42:08 +00:00
reinterpret_cast<const VkDependencyInfo *>( dependencyInfos.data() ) );
}
VULKAN_HPP_INLINE void CommandBuffer::pipelineBarrier2KHR( const VULKAN_HPP_NAMESPACE::DependencyInfo & dependencyInfo ) const VULKAN_HPP_NOEXCEPT
{
2023-03-01 09:17:02 +00:00
VULKAN_HPP_ASSERT( getDispatcher()->vkCmdPipelineBarrier2KHR &&
"Function <vkCmdPipelineBarrier2KHR> requires <VK_KHR_synchronization2> or <VK_VERSION_1_3>" );
getDispatcher()->vkCmdPipelineBarrier2KHR( static_cast<VkCommandBuffer>( m_commandBuffer ),
2022-01-26 00:42:08 +00:00
reinterpret_cast<const VkDependencyInfo *>( &dependencyInfo ) );
}
2022-01-26 00:42:08 +00:00
VULKAN_HPP_INLINE void CommandBuffer::writeTimestamp2KHR( VULKAN_HPP_NAMESPACE::PipelineStageFlags2 stage,
VULKAN_HPP_NAMESPACE::QueryPool queryPool,
uint32_t query ) const VULKAN_HPP_NOEXCEPT
{
2023-03-01 09:17:02 +00:00
VULKAN_HPP_ASSERT( getDispatcher()->vkCmdWriteTimestamp2KHR &&
"Function <vkCmdWriteTimestamp2KHR> requires <VK_KHR_synchronization2> or <VK_VERSION_1_3>" );
getDispatcher()->vkCmdWriteTimestamp2KHR(
static_cast<VkCommandBuffer>( m_commandBuffer ), static_cast<VkPipelineStageFlags2>( stage ), static_cast<VkQueryPool>( queryPool ), query );
}
VULKAN_HPP_INLINE void Queue::submit2KHR( VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::SubmitInfo2> const & submits,
VULKAN_HPP_NAMESPACE::Fence fence ) const
{
2023-03-01 09:17:02 +00:00
VULKAN_HPP_ASSERT( getDispatcher()->vkQueueSubmit2KHR && "Function <vkQueueSubmit2KHR> requires <VK_KHR_synchronization2> or <VK_VERSION_1_3>" );
VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( getDispatcher()->vkQueueSubmit2KHR(
static_cast<VkQueue>( m_queue ), submits.size(), reinterpret_cast<const VkSubmitInfo2 *>( submits.data() ), static_cast<VkFence>( fence ) ) );
VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Queue::submit2KHR" );
}
2022-11-18 00:12:31 +00:00
//=== VK_EXT_descriptor_buffer ===
VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::DeviceSize DescriptorSetLayout::getSizeEXT() const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( getDispatcher()->vkGetDescriptorSetLayoutSizeEXT && "Function <vkGetDescriptorSetLayoutSizeEXT> requires <VK_EXT_descriptor_buffer>" );
2022-11-18 00:12:31 +00:00
VULKAN_HPP_NAMESPACE::DeviceSize layoutSizeInBytes;
getDispatcher()->vkGetDescriptorSetLayoutSizeEXT( static_cast<VkDevice>( m_device ),
static_cast<VkDescriptorSetLayout>( m_descriptorSetLayout ),
reinterpret_cast<VkDeviceSize *>( &layoutSizeInBytes ) );
return layoutSizeInBytes;
}
VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::DeviceSize
DescriptorSetLayout::getBindingOffsetEXT( uint32_t binding ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( getDispatcher()->vkGetDescriptorSetLayoutBindingOffsetEXT &&
"Function <vkGetDescriptorSetLayoutBindingOffsetEXT> requires <VK_EXT_descriptor_buffer>" );
2022-11-18 00:12:31 +00:00
VULKAN_HPP_NAMESPACE::DeviceSize offset;
getDispatcher()->vkGetDescriptorSetLayoutBindingOffsetEXT(
static_cast<VkDevice>( m_device ), static_cast<VkDescriptorSetLayout>( m_descriptorSetLayout ), binding, reinterpret_cast<VkDeviceSize *>( &offset ) );
return offset;
}
VULKAN_HPP_INLINE void Device::getDescriptorEXT( const VULKAN_HPP_NAMESPACE::DescriptorGetInfoEXT & descriptorInfo,
size_t dataSize,
void * pDescriptor ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( getDispatcher()->vkGetDescriptorEXT && "Function <vkGetDescriptorEXT> requires <VK_EXT_descriptor_buffer>" );
getDispatcher()->vkGetDescriptorEXT(
static_cast<VkDevice>( m_device ), reinterpret_cast<const VkDescriptorGetInfoEXT *>( &descriptorInfo ), dataSize, pDescriptor );
}
2022-11-18 00:12:31 +00:00
template <typename DescriptorType>
VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE DescriptorType
Device::getDescriptorEXT( const VULKAN_HPP_NAMESPACE::DescriptorGetInfoEXT & descriptorInfo ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( getDispatcher()->vkGetDescriptorEXT && "Function <vkGetDescriptorEXT> requires <VK_EXT_descriptor_buffer>" );
2022-11-18 00:12:31 +00:00
DescriptorType descriptor;
getDispatcher()->vkGetDescriptorEXT( static_cast<VkDevice>( m_device ),
reinterpret_cast<const VkDescriptorGetInfoEXT *>( &descriptorInfo ),
sizeof( DescriptorType ),
reinterpret_cast<void *>( &descriptor ) );
return descriptor;
}
VULKAN_HPP_INLINE void CommandBuffer::bindDescriptorBuffersEXT(
VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::DescriptorBufferBindingInfoEXT> const & bindingInfos ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( getDispatcher()->vkCmdBindDescriptorBuffersEXT && "Function <vkCmdBindDescriptorBuffersEXT> requires <VK_EXT_descriptor_buffer>" );
2022-11-18 00:12:31 +00:00
getDispatcher()->vkCmdBindDescriptorBuffersEXT( static_cast<VkCommandBuffer>( m_commandBuffer ),
bindingInfos.size(),
reinterpret_cast<const VkDescriptorBufferBindingInfoEXT *>( bindingInfos.data() ) );
}
VULKAN_HPP_INLINE void
CommandBuffer::setDescriptorBufferOffsetsEXT( VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint,
VULKAN_HPP_NAMESPACE::PipelineLayout layout,
uint32_t firstSet,
VULKAN_HPP_NAMESPACE::ArrayProxy<const uint32_t> const & bufferIndices,
VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::DeviceSize> const & offsets ) const
{
VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSetDescriptorBufferOffsetsEXT &&
"Function <vkCmdSetDescriptorBufferOffsetsEXT> requires <VK_EXT_descriptor_buffer>" );
# ifdef VULKAN_HPP_NO_EXCEPTIONS
VULKAN_HPP_ASSERT( bufferIndices.size() == offsets.size() );
# else
2022-11-18 00:12:31 +00:00
if ( bufferIndices.size() != offsets.size() )
{
throw LogicError( VULKAN_HPP_NAMESPACE_STRING "::CommandBuffer::setDescriptorBufferOffsetsEXT: bufferIndices.size() != offsets.size()" );
}
# endif /*VULKAN_HPP_NO_EXCEPTIONS*/
2022-11-18 00:12:31 +00:00
getDispatcher()->vkCmdSetDescriptorBufferOffsetsEXT( static_cast<VkCommandBuffer>( m_commandBuffer ),
static_cast<VkPipelineBindPoint>( pipelineBindPoint ),
static_cast<VkPipelineLayout>( layout ),
firstSet,
bufferIndices.size(),
bufferIndices.data(),
reinterpret_cast<const VkDeviceSize *>( offsets.data() ) );
}
VULKAN_HPP_INLINE void CommandBuffer::bindDescriptorBufferEmbeddedSamplersEXT( VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint,
VULKAN_HPP_NAMESPACE::PipelineLayout layout,
uint32_t set ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( getDispatcher()->vkCmdBindDescriptorBufferEmbeddedSamplersEXT &&
"Function <vkCmdBindDescriptorBufferEmbeddedSamplersEXT> requires <VK_EXT_descriptor_buffer>" );
2022-11-18 00:12:31 +00:00
getDispatcher()->vkCmdBindDescriptorBufferEmbeddedSamplersEXT(
static_cast<VkCommandBuffer>( m_commandBuffer ), static_cast<VkPipelineBindPoint>( pipelineBindPoint ), static_cast<VkPipelineLayout>( layout ), set );
}
template <typename DataType>
VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE DataType
Device::getBufferOpaqueCaptureDescriptorDataEXT( const VULKAN_HPP_NAMESPACE::BufferCaptureDescriptorDataInfoEXT & info ) const
{
VULKAN_HPP_ASSERT( getDispatcher()->vkGetBufferOpaqueCaptureDescriptorDataEXT &&
"Function <vkGetBufferOpaqueCaptureDescriptorDataEXT> requires <VK_EXT_descriptor_buffer>" );
2022-11-18 00:12:31 +00:00
DataType data;
VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( getDispatcher()->vkGetBufferOpaqueCaptureDescriptorDataEXT(
static_cast<VkDevice>( m_device ), reinterpret_cast<const VkBufferCaptureDescriptorDataInfoEXT *>( &info ), &data ) );
VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getBufferOpaqueCaptureDescriptorDataEXT" );
2022-11-18 00:12:31 +00:00
return data;
}
template <typename DataType>
VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE DataType
Device::getImageOpaqueCaptureDescriptorDataEXT( const VULKAN_HPP_NAMESPACE::ImageCaptureDescriptorDataInfoEXT & info ) const
{
VULKAN_HPP_ASSERT( getDispatcher()->vkGetImageOpaqueCaptureDescriptorDataEXT &&
"Function <vkGetImageOpaqueCaptureDescriptorDataEXT> requires <VK_EXT_descriptor_buffer>" );
2022-11-18 00:12:31 +00:00
DataType data;
VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( getDispatcher()->vkGetImageOpaqueCaptureDescriptorDataEXT(
static_cast<VkDevice>( m_device ), reinterpret_cast<const VkImageCaptureDescriptorDataInfoEXT *>( &info ), &data ) );
VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getImageOpaqueCaptureDescriptorDataEXT" );
2022-11-18 00:12:31 +00:00
return data;
}
template <typename DataType>
VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE DataType
Device::getImageViewOpaqueCaptureDescriptorDataEXT( const VULKAN_HPP_NAMESPACE::ImageViewCaptureDescriptorDataInfoEXT & info ) const
{
VULKAN_HPP_ASSERT( getDispatcher()->vkGetImageViewOpaqueCaptureDescriptorDataEXT &&
"Function <vkGetImageViewOpaqueCaptureDescriptorDataEXT> requires <VK_EXT_descriptor_buffer>" );
2022-11-18 00:12:31 +00:00
DataType data;
VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( getDispatcher()->vkGetImageViewOpaqueCaptureDescriptorDataEXT(
static_cast<VkDevice>( m_device ), reinterpret_cast<const VkImageViewCaptureDescriptorDataInfoEXT *>( &info ), &data ) );
VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getImageViewOpaqueCaptureDescriptorDataEXT" );
2022-11-18 00:12:31 +00:00
return data;
}
template <typename DataType>
VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE DataType
Device::getSamplerOpaqueCaptureDescriptorDataEXT( const VULKAN_HPP_NAMESPACE::SamplerCaptureDescriptorDataInfoEXT & info ) const
{
VULKAN_HPP_ASSERT( getDispatcher()->vkGetSamplerOpaqueCaptureDescriptorDataEXT &&
"Function <vkGetSamplerOpaqueCaptureDescriptorDataEXT> requires <VK_EXT_descriptor_buffer>" );
2022-11-18 00:12:31 +00:00
DataType data;
VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( getDispatcher()->vkGetSamplerOpaqueCaptureDescriptorDataEXT(
static_cast<VkDevice>( m_device ), reinterpret_cast<const VkSamplerCaptureDescriptorDataInfoEXT *>( &info ), &data ) );
VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getSamplerOpaqueCaptureDescriptorDataEXT" );
2022-11-18 00:12:31 +00:00
return data;
}
template <typename DataType>
VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE DataType Device::getAccelerationStructureOpaqueCaptureDescriptorDataEXT(
const VULKAN_HPP_NAMESPACE::AccelerationStructureCaptureDescriptorDataInfoEXT & info ) const
{
VULKAN_HPP_ASSERT( getDispatcher()->vkGetAccelerationStructureOpaqueCaptureDescriptorDataEXT &&
"Function <vkGetAccelerationStructureOpaqueCaptureDescriptorDataEXT> requires <VK_EXT_descriptor_buffer>" );
2022-11-18 00:12:31 +00:00
DataType data;
VULKAN_HPP_NAMESPACE::Result result =
static_cast<VULKAN_HPP_NAMESPACE::Result>( getDispatcher()->vkGetAccelerationStructureOpaqueCaptureDescriptorDataEXT(
static_cast<VkDevice>( m_device ), reinterpret_cast<const VkAccelerationStructureCaptureDescriptorDataInfoEXT *>( &info ), &data ) );
VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getAccelerationStructureOpaqueCaptureDescriptorDataEXT" );
2022-11-18 00:12:31 +00:00
return data;
}
//=== VK_NV_fragment_shading_rate_enums ===
VULKAN_HPP_INLINE void
CommandBuffer::setFragmentShadingRateEnumNV( VULKAN_HPP_NAMESPACE::FragmentShadingRateNV shadingRate,
const VULKAN_HPP_NAMESPACE::FragmentShadingRateCombinerOpKHR combinerOps[2] ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSetFragmentShadingRateEnumNV &&
"Function <vkCmdSetFragmentShadingRateEnumNV> requires <VK_NV_fragment_shading_rate_enums>" );
getDispatcher()->vkCmdSetFragmentShadingRateEnumNV( static_cast<VkCommandBuffer>( m_commandBuffer ),
static_cast<VkFragmentShadingRateNV>( shadingRate ),
reinterpret_cast<const VkFragmentShadingRateCombinerOpKHR *>( combinerOps ) );
}
2022-09-02 00:12:10 +00:00
//=== VK_EXT_mesh_shader ===
VULKAN_HPP_INLINE void CommandBuffer::drawMeshTasksEXT( uint32_t groupCountX, uint32_t groupCountY, uint32_t groupCountZ ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( getDispatcher()->vkCmdDrawMeshTasksEXT && "Function <vkCmdDrawMeshTasksEXT> requires <VK_EXT_mesh_shader>" );
2022-09-02 00:12:10 +00:00
getDispatcher()->vkCmdDrawMeshTasksEXT( static_cast<VkCommandBuffer>( m_commandBuffer ), groupCountX, groupCountY, groupCountZ );
}
VULKAN_HPP_INLINE void CommandBuffer::drawMeshTasksIndirectEXT( VULKAN_HPP_NAMESPACE::Buffer buffer,
VULKAN_HPP_NAMESPACE::DeviceSize offset,
uint32_t drawCount,
uint32_t stride ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( getDispatcher()->vkCmdDrawMeshTasksIndirectEXT && "Function <vkCmdDrawMeshTasksIndirectEXT> requires <VK_EXT_mesh_shader>" );
2022-09-02 00:12:10 +00:00
getDispatcher()->vkCmdDrawMeshTasksIndirectEXT(
static_cast<VkCommandBuffer>( m_commandBuffer ), static_cast<VkBuffer>( buffer ), static_cast<VkDeviceSize>( offset ), drawCount, stride );
}
VULKAN_HPP_INLINE void CommandBuffer::drawMeshTasksIndirectCountEXT( VULKAN_HPP_NAMESPACE::Buffer buffer,
VULKAN_HPP_NAMESPACE::DeviceSize offset,
VULKAN_HPP_NAMESPACE::Buffer countBuffer,
VULKAN_HPP_NAMESPACE::DeviceSize countBufferOffset,
uint32_t maxDrawCount,
uint32_t stride ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( getDispatcher()->vkCmdDrawMeshTasksIndirectCountEXT && "Function <vkCmdDrawMeshTasksIndirectCountEXT> requires <VK_EXT_mesh_shader>" );
2022-09-02 00:12:10 +00:00
getDispatcher()->vkCmdDrawMeshTasksIndirectCountEXT( static_cast<VkCommandBuffer>( m_commandBuffer ),
static_cast<VkBuffer>( buffer ),
static_cast<VkDeviceSize>( offset ),
static_cast<VkBuffer>( countBuffer ),
static_cast<VkDeviceSize>( countBufferOffset ),
maxDrawCount,
stride );
}
//=== VK_KHR_copy_commands2 ===
VULKAN_HPP_INLINE void CommandBuffer::copyBuffer2KHR( const VULKAN_HPP_NAMESPACE::CopyBufferInfo2 & copyBufferInfo ) const VULKAN_HPP_NOEXCEPT
{
2023-03-01 09:17:02 +00:00
VULKAN_HPP_ASSERT( getDispatcher()->vkCmdCopyBuffer2KHR && "Function <vkCmdCopyBuffer2KHR> requires <VK_KHR_copy_commands2> or <VK_VERSION_1_3>" );
getDispatcher()->vkCmdCopyBuffer2KHR( static_cast<VkCommandBuffer>( m_commandBuffer ), reinterpret_cast<const VkCopyBufferInfo2 *>( &copyBufferInfo ) );
}
VULKAN_HPP_INLINE void CommandBuffer::copyImage2KHR( const VULKAN_HPP_NAMESPACE::CopyImageInfo2 & copyImageInfo ) const VULKAN_HPP_NOEXCEPT
{
2023-03-01 09:17:02 +00:00
VULKAN_HPP_ASSERT( getDispatcher()->vkCmdCopyImage2KHR && "Function <vkCmdCopyImage2KHR> requires <VK_KHR_copy_commands2> or <VK_VERSION_1_3>" );
getDispatcher()->vkCmdCopyImage2KHR( static_cast<VkCommandBuffer>( m_commandBuffer ), reinterpret_cast<const VkCopyImageInfo2 *>( &copyImageInfo ) );
}
VULKAN_HPP_INLINE void
CommandBuffer::copyBufferToImage2KHR( const VULKAN_HPP_NAMESPACE::CopyBufferToImageInfo2 & copyBufferToImageInfo ) const VULKAN_HPP_NOEXCEPT
{
2023-03-01 09:17:02 +00:00
VULKAN_HPP_ASSERT( getDispatcher()->vkCmdCopyBufferToImage2KHR &&
"Function <vkCmdCopyBufferToImage2KHR> requires <VK_KHR_copy_commands2> or <VK_VERSION_1_3>" );
getDispatcher()->vkCmdCopyBufferToImage2KHR( static_cast<VkCommandBuffer>( m_commandBuffer ),
reinterpret_cast<const VkCopyBufferToImageInfo2 *>( &copyBufferToImageInfo ) );
}
VULKAN_HPP_INLINE void
CommandBuffer::copyImageToBuffer2KHR( const VULKAN_HPP_NAMESPACE::CopyImageToBufferInfo2 & copyImageToBufferInfo ) const VULKAN_HPP_NOEXCEPT
{
2023-03-01 09:17:02 +00:00
VULKAN_HPP_ASSERT( getDispatcher()->vkCmdCopyImageToBuffer2KHR &&
"Function <vkCmdCopyImageToBuffer2KHR> requires <VK_KHR_copy_commands2> or <VK_VERSION_1_3>" );
getDispatcher()->vkCmdCopyImageToBuffer2KHR( static_cast<VkCommandBuffer>( m_commandBuffer ),
reinterpret_cast<const VkCopyImageToBufferInfo2 *>( &copyImageToBufferInfo ) );
}
VULKAN_HPP_INLINE void CommandBuffer::blitImage2KHR( const VULKAN_HPP_NAMESPACE::BlitImageInfo2 & blitImageInfo ) const VULKAN_HPP_NOEXCEPT
{
2023-03-01 09:17:02 +00:00
VULKAN_HPP_ASSERT( getDispatcher()->vkCmdBlitImage2KHR && "Function <vkCmdBlitImage2KHR> requires <VK_KHR_copy_commands2> or <VK_VERSION_1_3>" );
getDispatcher()->vkCmdBlitImage2KHR( static_cast<VkCommandBuffer>( m_commandBuffer ), reinterpret_cast<const VkBlitImageInfo2 *>( &blitImageInfo ) );
}
VULKAN_HPP_INLINE void CommandBuffer::resolveImage2KHR( const VULKAN_HPP_NAMESPACE::ResolveImageInfo2 & resolveImageInfo ) const VULKAN_HPP_NOEXCEPT
{
2023-03-01 09:17:02 +00:00
VULKAN_HPP_ASSERT( getDispatcher()->vkCmdResolveImage2KHR && "Function <vkCmdResolveImage2KHR> requires <VK_KHR_copy_commands2> or <VK_VERSION_1_3>" );
getDispatcher()->vkCmdResolveImage2KHR( static_cast<VkCommandBuffer>( m_commandBuffer ),
2022-01-26 00:42:08 +00:00
reinterpret_cast<const VkResolveImageInfo2 *>( &resolveImageInfo ) );
}
2022-09-30 00:20:29 +00:00
//=== VK_EXT_device_fault ===
template <typename Dispatch>
VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getFaultInfoEXT( VULKAN_HPP_NAMESPACE::DeviceFaultCountsEXT * pFaultCounts,
VULKAN_HPP_NAMESPACE::DeviceFaultInfoEXT * pFaultInfo,
Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
return static_cast<Result>( d.vkGetDeviceFaultInfoEXT( static_cast<VkDevice>( m_device ),
reinterpret_cast<VkDeviceFaultCountsEXT *>( pFaultCounts ),
reinterpret_cast<VkDeviceFaultInfoEXT *>( pFaultInfo ) ) );
2022-09-30 00:20:29 +00:00
}
# if defined( VK_USE_PLATFORM_WIN32_KHR )
//=== VK_NV_acquire_winrt_display ===
VULKAN_HPP_INLINE void DisplayKHR::acquireWinrtNV() const
{
VULKAN_HPP_ASSERT( getDispatcher()->vkAcquireWinrtDisplayNV && "Function <vkAcquireWinrtDisplayNV> requires <VK_NV_acquire_winrt_display>" );
2022-09-30 00:20:29 +00:00
VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>(
getDispatcher()->vkAcquireWinrtDisplayNV( static_cast<VkPhysicalDevice>( m_physicalDevice ), static_cast<VkDisplayKHR>( m_display ) ) );
VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::DisplayKHR::acquireWinrtNV" );
2022-09-30 00:20:29 +00:00
}
VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE
VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::CreateReturnType<VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::DisplayKHR>::Type
PhysicalDevice::getWinrtDisplayNV( uint32_t deviceRelativeId ) const VULKAN_HPP_RAII_CREATE_NOEXCEPT
{
VULKAN_HPP_NAMESPACE::DisplayKHR display;
VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( getDispatcher()->vkGetWinrtDisplayNV(
static_cast<VkPhysicalDevice>( m_physicalDevice ), deviceRelativeId, reinterpret_cast<VkDisplayKHR *>( &display ) ) );
if ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess )
{
# if defined( VULKAN_HPP_RAII_NO_EXCEPTIONS )
return VULKAN_HPP_UNEXPECTED( result );
# else
VULKAN_HPP_NAMESPACE::detail::throwResultException( result, "PhysicalDevice::getWinrtDisplayNV" );
# endif
}
return VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::DisplayKHR( *this, *reinterpret_cast<VkDisplayKHR *>( &display ) );
}
# endif /*VK_USE_PLATFORM_WIN32_KHR*/
# if defined( VK_USE_PLATFORM_DIRECTFB_EXT )
//=== VK_EXT_directfb_surface ===
VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE
VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::CreateReturnType<VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::SurfaceKHR>::Type
Instance::createDirectFBSurfaceEXT( VULKAN_HPP_NAMESPACE::DirectFBSurfaceCreateInfoEXT const & createInfo,
VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator ) const
VULKAN_HPP_RAII_CREATE_NOEXCEPT
{
VULKAN_HPP_NAMESPACE::SurfaceKHR surface;
VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( getDispatcher()->vkCreateDirectFBSurfaceEXT(
static_cast<VkInstance>( m_instance ),
reinterpret_cast<const VkDirectFBSurfaceCreateInfoEXT *>( &createInfo ),
reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
reinterpret_cast<VkSurfaceKHR *>( &surface ) ) );
if ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess )
{
# if defined( VULKAN_HPP_RAII_NO_EXCEPTIONS )
return VULKAN_HPP_UNEXPECTED( result );
# else
VULKAN_HPP_NAMESPACE::detail::throwResultException( result, "Instance::createDirectFBSurfaceEXT" );
# endif
}
return VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::SurfaceKHR( *this, *reinterpret_cast<VkSurfaceKHR *>( &surface ), allocator );
}
VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::Bool32
PhysicalDevice::getDirectFBPresentationSupportEXT( uint32_t queueFamilyIndex, IDirectFB & dfb ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( getDispatcher()->vkGetPhysicalDeviceDirectFBPresentationSupportEXT &&
"Function <vkGetPhysicalDeviceDirectFBPresentationSupportEXT> requires <VK_EXT_directfb_surface>" );
VkBool32 result =
getDispatcher()->vkGetPhysicalDeviceDirectFBPresentationSupportEXT( static_cast<VkPhysicalDevice>( m_physicalDevice ), queueFamilyIndex, &dfb );
return static_cast<VULKAN_HPP_NAMESPACE::Bool32>( result );
}
# endif /*VK_USE_PLATFORM_DIRECTFB_EXT*/
//=== VK_EXT_vertex_input_dynamic_state ===
VULKAN_HPP_INLINE void CommandBuffer::setVertexInputEXT(
VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::VertexInputBindingDescription2EXT> const & vertexBindingDescriptions,
VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::VertexInputAttributeDescription2EXT> const & vertexAttributeDescriptions ) const
VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSetVertexInputEXT &&
"Function <vkCmdSetVertexInputEXT> requires <VK_EXT_shader_object> or <VK_EXT_vertex_input_dynamic_state>" );
getDispatcher()->vkCmdSetVertexInputEXT( static_cast<VkCommandBuffer>( m_commandBuffer ),
vertexBindingDescriptions.size(),
reinterpret_cast<const VkVertexInputBindingDescription2EXT *>( vertexBindingDescriptions.data() ),
vertexAttributeDescriptions.size(),
reinterpret_cast<const VkVertexInputAttributeDescription2EXT *>( vertexAttributeDescriptions.data() ) );
}
# if defined( VK_USE_PLATFORM_FUCHSIA )
//=== VK_FUCHSIA_external_memory ===
VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE zx_handle_t
Device::getMemoryZirconHandleFUCHSIA( const VULKAN_HPP_NAMESPACE::MemoryGetZirconHandleInfoFUCHSIA & getZirconHandleInfo ) const
{
VULKAN_HPP_ASSERT( getDispatcher()->vkGetMemoryZirconHandleFUCHSIA && "Function <vkGetMemoryZirconHandleFUCHSIA> requires <VK_FUCHSIA_external_memory>" );
zx_handle_t zirconHandle;
VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( getDispatcher()->vkGetMemoryZirconHandleFUCHSIA(
static_cast<VkDevice>( m_device ), reinterpret_cast<const VkMemoryGetZirconHandleInfoFUCHSIA *>( &getZirconHandleInfo ), &zirconHandle ) );
VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getMemoryZirconHandleFUCHSIA" );
return zirconHandle;
}
VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::MemoryZirconHandlePropertiesFUCHSIA
Device::getMemoryZirconHandlePropertiesFUCHSIA( VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType, zx_handle_t zirconHandle ) const
{
VULKAN_HPP_ASSERT( getDispatcher()->vkGetMemoryZirconHandlePropertiesFUCHSIA &&
"Function <vkGetMemoryZirconHandlePropertiesFUCHSIA> requires <VK_FUCHSIA_external_memory>" );
VULKAN_HPP_NAMESPACE::MemoryZirconHandlePropertiesFUCHSIA memoryZirconHandleProperties;
VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( getDispatcher()->vkGetMemoryZirconHandlePropertiesFUCHSIA(
static_cast<VkDevice>( m_device ),
static_cast<VkExternalMemoryHandleTypeFlagBits>( handleType ),
zirconHandle,
reinterpret_cast<VkMemoryZirconHandlePropertiesFUCHSIA *>( &memoryZirconHandleProperties ) ) );
VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getMemoryZirconHandlePropertiesFUCHSIA" );
return memoryZirconHandleProperties;
}
# endif /*VK_USE_PLATFORM_FUCHSIA*/
# if defined( VK_USE_PLATFORM_FUCHSIA )
//=== VK_FUCHSIA_external_semaphore ===
VULKAN_HPP_INLINE void
Device::importSemaphoreZirconHandleFUCHSIA( const VULKAN_HPP_NAMESPACE::ImportSemaphoreZirconHandleInfoFUCHSIA & importSemaphoreZirconHandleInfo ) const
{
VULKAN_HPP_ASSERT( getDispatcher()->vkImportSemaphoreZirconHandleFUCHSIA &&
"Function <vkImportSemaphoreZirconHandleFUCHSIA> requires <VK_FUCHSIA_external_semaphore>" );
VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( getDispatcher()->vkImportSemaphoreZirconHandleFUCHSIA(
static_cast<VkDevice>( m_device ), reinterpret_cast<const VkImportSemaphoreZirconHandleInfoFUCHSIA *>( &importSemaphoreZirconHandleInfo ) ) );
VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::importSemaphoreZirconHandleFUCHSIA" );
}
VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE zx_handle_t
Device::getSemaphoreZirconHandleFUCHSIA( const VULKAN_HPP_NAMESPACE::SemaphoreGetZirconHandleInfoFUCHSIA & getZirconHandleInfo ) const
{
VULKAN_HPP_ASSERT( getDispatcher()->vkGetSemaphoreZirconHandleFUCHSIA &&
"Function <vkGetSemaphoreZirconHandleFUCHSIA> requires <VK_FUCHSIA_external_semaphore>" );
zx_handle_t zirconHandle;
VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( getDispatcher()->vkGetSemaphoreZirconHandleFUCHSIA(
static_cast<VkDevice>( m_device ), reinterpret_cast<const VkSemaphoreGetZirconHandleInfoFUCHSIA *>( &getZirconHandleInfo ), &zirconHandle ) );
VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getSemaphoreZirconHandleFUCHSIA" );
return zirconHandle;
}
# endif /*VK_USE_PLATFORM_FUCHSIA*/
2021-09-29 00:34:11 +00:00
# if defined( VK_USE_PLATFORM_FUCHSIA )
//=== VK_FUCHSIA_buffer_collection ===
VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE
VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::CreateReturnType<VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::BufferCollectionFUCHSIA>::Type
Device::createBufferCollectionFUCHSIA( VULKAN_HPP_NAMESPACE::BufferCollectionCreateInfoFUCHSIA const & createInfo,
VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator ) const
VULKAN_HPP_RAII_CREATE_NOEXCEPT
{
VULKAN_HPP_NAMESPACE::BufferCollectionFUCHSIA collection;
VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( getDispatcher()->vkCreateBufferCollectionFUCHSIA(
static_cast<VkDevice>( m_device ),
reinterpret_cast<const VkBufferCollectionCreateInfoFUCHSIA *>( &createInfo ),
reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
reinterpret_cast<VkBufferCollectionFUCHSIA *>( &collection ) ) );
if ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess )
{
# if defined( VULKAN_HPP_RAII_NO_EXCEPTIONS )
return VULKAN_HPP_UNEXPECTED( result );
# else
VULKAN_HPP_NAMESPACE::detail::throwResultException( result, "Device::createBufferCollectionFUCHSIA" );
# endif
}
return VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::BufferCollectionFUCHSIA(
*this, *reinterpret_cast<VkBufferCollectionFUCHSIA *>( &collection ), allocator );
}
VULKAN_HPP_INLINE void BufferCollectionFUCHSIA::setImageConstraints( const VULKAN_HPP_NAMESPACE::ImageConstraintsInfoFUCHSIA & imageConstraintsInfo ) const
2021-09-29 00:34:11 +00:00
{
VULKAN_HPP_ASSERT( getDispatcher()->vkSetBufferCollectionImageConstraintsFUCHSIA &&
"Function <vkSetBufferCollectionImageConstraintsFUCHSIA> requires <VK_FUCHSIA_buffer_collection>" );
2021-09-29 00:34:11 +00:00
VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>(
getDispatcher()->vkSetBufferCollectionImageConstraintsFUCHSIA( static_cast<VkDevice>( m_device ),
static_cast<VkBufferCollectionFUCHSIA>( m_collection ),
reinterpret_cast<const VkImageConstraintsInfoFUCHSIA *>( &imageConstraintsInfo ) ) );
VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::BufferCollectionFUCHSIA::setImageConstraints" );
2021-09-29 00:34:11 +00:00
}
VULKAN_HPP_INLINE void
BufferCollectionFUCHSIA::setBufferConstraints( const VULKAN_HPP_NAMESPACE::BufferConstraintsInfoFUCHSIA & bufferConstraintsInfo ) const
2021-09-29 00:34:11 +00:00
{
VULKAN_HPP_ASSERT( getDispatcher()->vkSetBufferCollectionBufferConstraintsFUCHSIA &&
"Function <vkSetBufferCollectionBufferConstraintsFUCHSIA> requires <VK_FUCHSIA_buffer_collection>" );
2021-09-29 00:34:11 +00:00
VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>(
getDispatcher()->vkSetBufferCollectionBufferConstraintsFUCHSIA( static_cast<VkDevice>( m_device ),
static_cast<VkBufferCollectionFUCHSIA>( m_collection ),
reinterpret_cast<const VkBufferConstraintsInfoFUCHSIA *>( &bufferConstraintsInfo ) ) );
VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::BufferCollectionFUCHSIA::setBufferConstraints" );
2021-09-29 00:34:11 +00:00
}
VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::BufferCollectionPropertiesFUCHSIA BufferCollectionFUCHSIA::getProperties() const
2021-09-29 00:34:11 +00:00
{
VULKAN_HPP_ASSERT( getDispatcher()->vkGetBufferCollectionPropertiesFUCHSIA &&
"Function <vkGetBufferCollectionPropertiesFUCHSIA> requires <VK_FUCHSIA_buffer_collection>" );
2021-09-29 00:34:11 +00:00
VULKAN_HPP_NAMESPACE::BufferCollectionPropertiesFUCHSIA properties;
VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>(
getDispatcher()->vkGetBufferCollectionPropertiesFUCHSIA( static_cast<VkDevice>( m_device ),
static_cast<VkBufferCollectionFUCHSIA>( m_collection ),
reinterpret_cast<VkBufferCollectionPropertiesFUCHSIA *>( &properties ) ) );
VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::BufferCollectionFUCHSIA::getProperties" );
2021-09-29 00:34:11 +00:00
return properties;
}
# endif /*VK_USE_PLATFORM_FUCHSIA*/
2021-06-22 07:40:12 +00:00
//=== VK_HUAWEI_subpass_shading ===
VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::Extent2D RenderPass::getSubpassShadingMaxWorkgroupSizeHUAWEI() const
2021-06-22 07:40:12 +00:00
{
VULKAN_HPP_ASSERT( getDispatcher()->vkGetDeviceSubpassShadingMaxWorkgroupSizeHUAWEI &&
"Function <vkGetDeviceSubpassShadingMaxWorkgroupSizeHUAWEI> requires <VK_HUAWEI_subpass_shading>" );
2021-06-22 07:40:12 +00:00
VULKAN_HPP_NAMESPACE::Extent2D maxWorkgroupSize;
VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( getDispatcher()->vkGetDeviceSubpassShadingMaxWorkgroupSizeHUAWEI(
static_cast<VkDevice>( m_device ), static_cast<VkRenderPass>( m_renderPass ), reinterpret_cast<VkExtent2D *>( &maxWorkgroupSize ) ) );
VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::RenderPass::getSubpassShadingMaxWorkgroupSizeHUAWEI" );
return maxWorkgroupSize;
2021-06-22 07:40:12 +00:00
}
VULKAN_HPP_INLINE void CommandBuffer::subpassShadingHUAWEI() const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSubpassShadingHUAWEI && "Function <vkCmdSubpassShadingHUAWEI> requires <VK_HUAWEI_subpass_shading>" );
2021-06-22 07:40:12 +00:00
getDispatcher()->vkCmdSubpassShadingHUAWEI( static_cast<VkCommandBuffer>( m_commandBuffer ) );
}
2021-07-21 07:13:46 +00:00
//=== VK_HUAWEI_invocation_mask ===
VULKAN_HPP_INLINE void CommandBuffer::bindInvocationMaskHUAWEI( VULKAN_HPP_NAMESPACE::ImageView imageView,
VULKAN_HPP_NAMESPACE::ImageLayout imageLayout ) const VULKAN_HPP_NOEXCEPT
2021-07-21 07:13:46 +00:00
{
VULKAN_HPP_ASSERT( getDispatcher()->vkCmdBindInvocationMaskHUAWEI && "Function <vkCmdBindInvocationMaskHUAWEI> requires <VK_HUAWEI_invocation_mask>" );
2021-07-21 07:13:46 +00:00
getDispatcher()->vkCmdBindInvocationMaskHUAWEI(
static_cast<VkCommandBuffer>( m_commandBuffer ), static_cast<VkImageView>( imageView ), static_cast<VkImageLayout>( imageLayout ) );
2021-07-21 07:13:46 +00:00
}
2021-07-06 07:03:42 +00:00
//=== VK_NV_external_memory_rdma ===
VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::RemoteAddressNV
Device::getMemoryRemoteAddressNV( const VULKAN_HPP_NAMESPACE::MemoryGetRemoteAddressInfoNV & memoryGetRemoteAddressInfo ) const
2021-07-06 07:03:42 +00:00
{
VULKAN_HPP_ASSERT( getDispatcher()->vkGetMemoryRemoteAddressNV && "Function <vkGetMemoryRemoteAddressNV> requires <VK_NV_external_memory_rdma>" );
2021-07-06 07:03:42 +00:00
VULKAN_HPP_NAMESPACE::RemoteAddressNV address;
VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>(
getDispatcher()->vkGetMemoryRemoteAddressNV( static_cast<VkDevice>( m_device ),
reinterpret_cast<const VkMemoryGetRemoteAddressInfoNV *>( &memoryGetRemoteAddressInfo ),
reinterpret_cast<VkRemoteAddressNV *>( &address ) ) );
VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getMemoryRemoteAddressNV" );
2021-07-06 07:03:42 +00:00
return address;
}
2022-05-11 00:09:28 +00:00
//=== VK_EXT_pipeline_properties ===
VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::BaseOutStructure
Device::getPipelinePropertiesEXT( const VULKAN_HPP_NAMESPACE::PipelineInfoEXT & pipelineInfo ) const
{
VULKAN_HPP_ASSERT( getDispatcher()->vkGetPipelinePropertiesEXT && "Function <vkGetPipelinePropertiesEXT> requires <VK_EXT_pipeline_properties>" );
2022-05-11 00:09:28 +00:00
VULKAN_HPP_NAMESPACE::BaseOutStructure pipelineProperties;
VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>(
getDispatcher()->vkGetPipelinePropertiesEXT( static_cast<VkDevice>( m_device ),
reinterpret_cast<const VkPipelineInfoEXT *>( &pipelineInfo ),
reinterpret_cast<VkBaseOutStructure *>( &pipelineProperties ) ) );
VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getPipelinePropertiesEXT" );
2022-05-11 00:09:28 +00:00
return pipelineProperties;
}
//=== VK_EXT_extended_dynamic_state2 ===
VULKAN_HPP_INLINE void CommandBuffer::setPatchControlPointsEXT( uint32_t patchControlPoints ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSetPatchControlPointsEXT &&
"Function <vkCmdSetPatchControlPointsEXT> requires <VK_EXT_extended_dynamic_state2> or <VK_EXT_shader_object>" );
getDispatcher()->vkCmdSetPatchControlPointsEXT( static_cast<VkCommandBuffer>( m_commandBuffer ), patchControlPoints );
}
VULKAN_HPP_INLINE void CommandBuffer::setRasterizerDiscardEnableEXT( VULKAN_HPP_NAMESPACE::Bool32 rasterizerDiscardEnable ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT(
getDispatcher()->vkCmdSetRasterizerDiscardEnableEXT &&
"Function <vkCmdSetRasterizerDiscardEnableEXT> requires <VK_EXT_extended_dynamic_state2> or <VK_EXT_shader_object> or <VK_VERSION_1_3>" );
getDispatcher()->vkCmdSetRasterizerDiscardEnableEXT( static_cast<VkCommandBuffer>( m_commandBuffer ), static_cast<VkBool32>( rasterizerDiscardEnable ) );
}
VULKAN_HPP_INLINE void CommandBuffer::setDepthBiasEnableEXT( VULKAN_HPP_NAMESPACE::Bool32 depthBiasEnable ) const VULKAN_HPP_NOEXCEPT
{
2023-03-01 09:17:02 +00:00
VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSetDepthBiasEnableEXT &&
"Function <vkCmdSetDepthBiasEnableEXT> requires <VK_EXT_extended_dynamic_state2> or <VK_EXT_shader_object> or <VK_VERSION_1_3>" );
getDispatcher()->vkCmdSetDepthBiasEnableEXT( static_cast<VkCommandBuffer>( m_commandBuffer ), static_cast<VkBool32>( depthBiasEnable ) );
}
VULKAN_HPP_INLINE void CommandBuffer::setLogicOpEXT( VULKAN_HPP_NAMESPACE::LogicOp logicOp ) const VULKAN_HPP_NOEXCEPT
2021-04-15 08:49:54 +00:00
{
VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSetLogicOpEXT &&
"Function <vkCmdSetLogicOpEXT> requires <VK_EXT_extended_dynamic_state2> or <VK_EXT_shader_object>" );
2021-04-15 08:49:54 +00:00
getDispatcher()->vkCmdSetLogicOpEXT( static_cast<VkCommandBuffer>( m_commandBuffer ), static_cast<VkLogicOp>( logicOp ) );
2021-04-15 08:49:54 +00:00
}
VULKAN_HPP_INLINE void CommandBuffer::setPrimitiveRestartEnableEXT( VULKAN_HPP_NAMESPACE::Bool32 primitiveRestartEnable ) const VULKAN_HPP_NOEXCEPT
2021-04-15 08:49:54 +00:00
{
VULKAN_HPP_ASSERT(
getDispatcher()->vkCmdSetPrimitiveRestartEnableEXT &&
"Function <vkCmdSetPrimitiveRestartEnableEXT> requires <VK_EXT_extended_dynamic_state2> or <VK_EXT_shader_object> or <VK_VERSION_1_3>" );
2021-04-15 08:49:54 +00:00
getDispatcher()->vkCmdSetPrimitiveRestartEnableEXT( static_cast<VkCommandBuffer>( m_commandBuffer ), static_cast<VkBool32>( primitiveRestartEnable ) );
2021-04-15 08:49:54 +00:00
}
# if defined( VK_USE_PLATFORM_SCREEN_QNX )
//=== VK_QNX_screen_surface ===
VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE
VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::CreateReturnType<VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::SurfaceKHR>::Type
Instance::createScreenSurfaceQNX( VULKAN_HPP_NAMESPACE::ScreenSurfaceCreateInfoQNX const & createInfo,
VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator ) const
VULKAN_HPP_RAII_CREATE_NOEXCEPT
{
VULKAN_HPP_NAMESPACE::SurfaceKHR surface;
VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( getDispatcher()->vkCreateScreenSurfaceQNX(
static_cast<VkInstance>( m_instance ),
reinterpret_cast<const VkScreenSurfaceCreateInfoQNX *>( &createInfo ),
reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
reinterpret_cast<VkSurfaceKHR *>( &surface ) ) );
if ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess )
{
# if defined( VULKAN_HPP_RAII_NO_EXCEPTIONS )
return VULKAN_HPP_UNEXPECTED( result );
# else
VULKAN_HPP_NAMESPACE::detail::throwResultException( result, "Instance::createScreenSurfaceQNX" );
# endif
}
return VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::SurfaceKHR( *this, *reinterpret_cast<VkSurfaceKHR *>( &surface ), allocator );
}
VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::Bool32
PhysicalDevice::getScreenPresentationSupportQNX( uint32_t queueFamilyIndex, struct _screen_window & window ) const VULKAN_HPP_NOEXCEPT
2021-04-15 08:49:54 +00:00
{
VULKAN_HPP_ASSERT( getDispatcher()->vkGetPhysicalDeviceScreenPresentationSupportQNX &&
"Function <vkGetPhysicalDeviceScreenPresentationSupportQNX> requires <VK_QNX_screen_surface>" );
2021-04-15 08:49:54 +00:00
VkBool32 result =
getDispatcher()->vkGetPhysicalDeviceScreenPresentationSupportQNX( static_cast<VkPhysicalDevice>( m_physicalDevice ), queueFamilyIndex, &window );
return static_cast<VULKAN_HPP_NAMESPACE::Bool32>( result );
}
# endif /*VK_USE_PLATFORM_SCREEN_QNX*/
//=== VK_EXT_color_write_enable ===
VULKAN_HPP_INLINE void CommandBuffer::setColorWriteEnableEXT(
VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::Bool32> const & colorWriteEnables ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSetColorWriteEnableEXT && "Function <vkCmdSetColorWriteEnableEXT> requires <VK_EXT_color_write_enable>" );
getDispatcher()->vkCmdSetColorWriteEnableEXT(
static_cast<VkCommandBuffer>( m_commandBuffer ), colorWriteEnables.size(), reinterpret_cast<const VkBool32 *>( colorWriteEnables.data() ) );
2021-04-15 08:49:54 +00:00
}
2022-05-11 00:09:28 +00:00
//=== VK_KHR_ray_tracing_maintenance1 ===
VULKAN_HPP_INLINE void CommandBuffer::traceRaysIndirect2KHR( VULKAN_HPP_NAMESPACE::DeviceAddress indirectDeviceAddress ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( getDispatcher()->vkCmdTraceRaysIndirect2KHR && "Function <vkCmdTraceRaysIndirect2KHR> requires <VK_KHR_ray_tracing_maintenance1>" );
2022-05-11 00:09:28 +00:00
getDispatcher()->vkCmdTraceRaysIndirect2KHR( static_cast<VkCommandBuffer>( m_commandBuffer ), static_cast<VkDeviceAddress>( indirectDeviceAddress ) );
}
2021-06-22 07:40:12 +00:00
//=== VK_EXT_multi_draw ===
VULKAN_HPP_INLINE void
CommandBuffer::drawMultiEXT( VULKAN_HPP_NAMESPACE::StridedArrayProxy<const VULKAN_HPP_NAMESPACE::MultiDrawInfoEXT> const & vertexInfo,
uint32_t instanceCount,
uint32_t firstInstance ) const VULKAN_HPP_NOEXCEPT
2021-06-22 07:40:12 +00:00
{
VULKAN_HPP_ASSERT( getDispatcher()->vkCmdDrawMultiEXT && "Function <vkCmdDrawMultiEXT> requires <VK_EXT_multi_draw>" );
2021-06-22 07:40:12 +00:00
getDispatcher()->vkCmdDrawMultiEXT( static_cast<VkCommandBuffer>( m_commandBuffer ),
vertexInfo.size(),
reinterpret_cast<const VkMultiDrawInfoEXT *>( vertexInfo.data() ),
instanceCount,
firstInstance,
vertexInfo.stride() );
2021-06-22 07:40:12 +00:00
}
VULKAN_HPP_INLINE void
CommandBuffer::drawMultiIndexedEXT( VULKAN_HPP_NAMESPACE::StridedArrayProxy<const VULKAN_HPP_NAMESPACE::MultiDrawIndexedInfoEXT> const & indexInfo,
uint32_t instanceCount,
uint32_t firstInstance,
Optional<const int32_t> vertexOffset ) const VULKAN_HPP_NOEXCEPT
2021-06-22 07:40:12 +00:00
{
VULKAN_HPP_ASSERT( getDispatcher()->vkCmdDrawMultiIndexedEXT && "Function <vkCmdDrawMultiIndexedEXT> requires <VK_EXT_multi_draw>" );
2021-06-22 07:40:12 +00:00
getDispatcher()->vkCmdDrawMultiIndexedEXT( static_cast<VkCommandBuffer>( m_commandBuffer ),
indexInfo.size(),
reinterpret_cast<const VkMultiDrawIndexedInfoEXT *>( indexInfo.data() ),
instanceCount,
firstInstance,
indexInfo.stride(),
static_cast<const int32_t *>( vertexOffset ) );
2021-06-22 07:40:12 +00:00
}
2022-09-30 00:20:29 +00:00
//=== VK_EXT_opacity_micromap ===
VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE
VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::CreateReturnType<VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::MicromapEXT>::Type
Device::createMicromapEXT( VULKAN_HPP_NAMESPACE::MicromapCreateInfoEXT const & createInfo,
2022-09-30 00:20:29 +00:00
VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator ) const
VULKAN_HPP_RAII_CREATE_NOEXCEPT
2022-09-30 00:20:29 +00:00
{
VULKAN_HPP_NAMESPACE::MicromapEXT micromap;
VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( getDispatcher()->vkCreateMicromapEXT(
static_cast<VkDevice>( m_device ),
reinterpret_cast<const VkMicromapCreateInfoEXT *>( &createInfo ),
reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
reinterpret_cast<VkMicromapEXT *>( &micromap ) ) );
if ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess )
{
# if defined( VULKAN_HPP_RAII_NO_EXCEPTIONS )
return VULKAN_HPP_UNEXPECTED( result );
# else
VULKAN_HPP_NAMESPACE::detail::throwResultException( result, "Device::createMicromapEXT" );
# endif
}
return VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::MicromapEXT( *this, *reinterpret_cast<VkMicromapEXT *>( &micromap ), allocator );
2022-09-30 00:20:29 +00:00
}
VULKAN_HPP_INLINE void CommandBuffer::buildMicromapsEXT(
VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::MicromapBuildInfoEXT> const & infos ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( getDispatcher()->vkCmdBuildMicromapsEXT && "Function <vkCmdBuildMicromapsEXT> requires <VK_EXT_opacity_micromap>" );
2022-09-30 00:20:29 +00:00
getDispatcher()->vkCmdBuildMicromapsEXT(
static_cast<VkCommandBuffer>( m_commandBuffer ), infos.size(), reinterpret_cast<const VkMicromapBuildInfoEXT *>( infos.data() ) );
}
VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::Result
Device::buildMicromapsEXT( VULKAN_HPP_NAMESPACE::DeferredOperationKHR deferredOperation,
VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::MicromapBuildInfoEXT> const & infos ) const
{
VULKAN_HPP_ASSERT( getDispatcher()->vkBuildMicromapsEXT && "Function <vkBuildMicromapsEXT> requires <VK_EXT_opacity_micromap>" );
2022-09-30 00:20:29 +00:00
VULKAN_HPP_NAMESPACE::Result result =
static_cast<VULKAN_HPP_NAMESPACE::Result>( getDispatcher()->vkBuildMicromapsEXT( static_cast<VkDevice>( m_device ),
static_cast<VkDeferredOperationKHR>( deferredOperation ),
infos.size(),
reinterpret_cast<const VkMicromapBuildInfoEXT *>( infos.data() ) ) );
VULKAN_HPP_NAMESPACE::detail::resultCheck( result,
VULKAN_HPP_NAMESPACE_STRING "::Device::buildMicromapsEXT",
{ VULKAN_HPP_NAMESPACE::Result::eSuccess,
VULKAN_HPP_NAMESPACE::Result::eOperationDeferredKHR,
VULKAN_HPP_NAMESPACE::Result::eOperationNotDeferredKHR } );
2022-09-30 00:20:29 +00:00
return static_cast<VULKAN_HPP_NAMESPACE::Result>( result );
}
VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::Result Device::copyMicromapEXT( VULKAN_HPP_NAMESPACE::DeferredOperationKHR deferredOperation,
const VULKAN_HPP_NAMESPACE::CopyMicromapInfoEXT & info ) const
{
VULKAN_HPP_ASSERT( getDispatcher()->vkCopyMicromapEXT && "Function <vkCopyMicromapEXT> requires <VK_EXT_opacity_micromap>" );
2022-09-30 00:20:29 +00:00
VULKAN_HPP_NAMESPACE::Result result =
static_cast<VULKAN_HPP_NAMESPACE::Result>( getDispatcher()->vkCopyMicromapEXT( static_cast<VkDevice>( m_device ),
static_cast<VkDeferredOperationKHR>( deferredOperation ),
reinterpret_cast<const VkCopyMicromapInfoEXT *>( &info ) ) );
VULKAN_HPP_NAMESPACE::detail::resultCheck( result,
VULKAN_HPP_NAMESPACE_STRING "::Device::copyMicromapEXT",
{ VULKAN_HPP_NAMESPACE::Result::eSuccess,
VULKAN_HPP_NAMESPACE::Result::eOperationDeferredKHR,
VULKAN_HPP_NAMESPACE::Result::eOperationNotDeferredKHR } );
2022-09-30 00:20:29 +00:00
return static_cast<VULKAN_HPP_NAMESPACE::Result>( result );
}
VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::Result
Device::copyMicromapToMemoryEXT( VULKAN_HPP_NAMESPACE::DeferredOperationKHR deferredOperation,
const VULKAN_HPP_NAMESPACE::CopyMicromapToMemoryInfoEXT & info ) const
{
VULKAN_HPP_ASSERT( getDispatcher()->vkCopyMicromapToMemoryEXT && "Function <vkCopyMicromapToMemoryEXT> requires <VK_EXT_opacity_micromap>" );
2022-09-30 00:20:29 +00:00
VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>(
getDispatcher()->vkCopyMicromapToMemoryEXT( static_cast<VkDevice>( m_device ),
static_cast<VkDeferredOperationKHR>( deferredOperation ),
reinterpret_cast<const VkCopyMicromapToMemoryInfoEXT *>( &info ) ) );
VULKAN_HPP_NAMESPACE::detail::resultCheck( result,
VULKAN_HPP_NAMESPACE_STRING "::Device::copyMicromapToMemoryEXT",
{ VULKAN_HPP_NAMESPACE::Result::eSuccess,
VULKAN_HPP_NAMESPACE::Result::eOperationDeferredKHR,
VULKAN_HPP_NAMESPACE::Result::eOperationNotDeferredKHR } );
2022-09-30 00:20:29 +00:00
return static_cast<VULKAN_HPP_NAMESPACE::Result>( result );
}
VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::Result
Device::copyMemoryToMicromapEXT( VULKAN_HPP_NAMESPACE::DeferredOperationKHR deferredOperation,
const VULKAN_HPP_NAMESPACE::CopyMemoryToMicromapInfoEXT & info ) const
{
VULKAN_HPP_ASSERT( getDispatcher()->vkCopyMemoryToMicromapEXT && "Function <vkCopyMemoryToMicromapEXT> requires <VK_EXT_opacity_micromap>" );
2022-09-30 00:20:29 +00:00
VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>(
getDispatcher()->vkCopyMemoryToMicromapEXT( static_cast<VkDevice>( m_device ),
static_cast<VkDeferredOperationKHR>( deferredOperation ),
reinterpret_cast<const VkCopyMemoryToMicromapInfoEXT *>( &info ) ) );
VULKAN_HPP_NAMESPACE::detail::resultCheck( result,
VULKAN_HPP_NAMESPACE_STRING "::Device::copyMemoryToMicromapEXT",
{ VULKAN_HPP_NAMESPACE::Result::eSuccess,
VULKAN_HPP_NAMESPACE::Result::eOperationDeferredKHR,
VULKAN_HPP_NAMESPACE::Result::eOperationNotDeferredKHR } );
2022-09-30 00:20:29 +00:00
return static_cast<VULKAN_HPP_NAMESPACE::Result>( result );
}
template <typename DataType>
VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector<DataType>
Device::writeMicromapsPropertiesEXT( VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::MicromapEXT> const & micromaps,
VULKAN_HPP_NAMESPACE::QueryType queryType,
size_t dataSize,
size_t stride ) const
{
VULKAN_HPP_ASSERT( getDispatcher()->vkWriteMicromapsPropertiesEXT && "Function <vkWriteMicromapsPropertiesEXT> requires <VK_EXT_opacity_micromap>" );
2022-09-30 00:20:29 +00:00
VULKAN_HPP_ASSERT( dataSize % sizeof( DataType ) == 0 );
std::vector<DataType> data( dataSize / sizeof( DataType ) );
VULKAN_HPP_NAMESPACE::Result result =
static_cast<VULKAN_HPP_NAMESPACE::Result>( getDispatcher()->vkWriteMicromapsPropertiesEXT( static_cast<VkDevice>( m_device ),
micromaps.size(),
reinterpret_cast<const VkMicromapEXT *>( micromaps.data() ),
static_cast<VkQueryType>( queryType ),
data.size() * sizeof( DataType ),
reinterpret_cast<void *>( data.data() ),
stride ) );
VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::writeMicromapsPropertiesEXT" );
2022-09-30 00:20:29 +00:00
return data;
}
template <typename DataType>
VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE DataType
Device::writeMicromapsPropertyEXT( VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::MicromapEXT> const & micromaps,
VULKAN_HPP_NAMESPACE::QueryType queryType,
size_t stride ) const
{
VULKAN_HPP_ASSERT( getDispatcher()->vkWriteMicromapsPropertiesEXT && "Function <vkWriteMicromapsPropertiesEXT> requires <VK_EXT_opacity_micromap>" );
2022-09-30 00:20:29 +00:00
DataType data;
VULKAN_HPP_NAMESPACE::Result result =
static_cast<VULKAN_HPP_NAMESPACE::Result>( getDispatcher()->vkWriteMicromapsPropertiesEXT( static_cast<VkDevice>( m_device ),
micromaps.size(),
reinterpret_cast<const VkMicromapEXT *>( micromaps.data() ),
static_cast<VkQueryType>( queryType ),
sizeof( DataType ),
reinterpret_cast<void *>( &data ),
stride ) );
VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::writeMicromapsPropertyEXT" );
2022-09-30 00:20:29 +00:00
return data;
}
VULKAN_HPP_INLINE void CommandBuffer::copyMicromapEXT( const VULKAN_HPP_NAMESPACE::CopyMicromapInfoEXT & info ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( getDispatcher()->vkCmdCopyMicromapEXT && "Function <vkCmdCopyMicromapEXT> requires <VK_EXT_opacity_micromap>" );
2022-09-30 00:20:29 +00:00
getDispatcher()->vkCmdCopyMicromapEXT( static_cast<VkCommandBuffer>( m_commandBuffer ), reinterpret_cast<const VkCopyMicromapInfoEXT *>( &info ) );
}
VULKAN_HPP_INLINE void CommandBuffer::copyMicromapToMemoryEXT( const VULKAN_HPP_NAMESPACE::CopyMicromapToMemoryInfoEXT & info ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( getDispatcher()->vkCmdCopyMicromapToMemoryEXT && "Function <vkCmdCopyMicromapToMemoryEXT> requires <VK_EXT_opacity_micromap>" );
2022-09-30 00:20:29 +00:00
getDispatcher()->vkCmdCopyMicromapToMemoryEXT( static_cast<VkCommandBuffer>( m_commandBuffer ),
reinterpret_cast<const VkCopyMicromapToMemoryInfoEXT *>( &info ) );
}
VULKAN_HPP_INLINE void CommandBuffer::copyMemoryToMicromapEXT( const VULKAN_HPP_NAMESPACE::CopyMemoryToMicromapInfoEXT & info ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( getDispatcher()->vkCmdCopyMemoryToMicromapEXT && "Function <vkCmdCopyMemoryToMicromapEXT> requires <VK_EXT_opacity_micromap>" );
2022-09-30 00:20:29 +00:00
getDispatcher()->vkCmdCopyMemoryToMicromapEXT( static_cast<VkCommandBuffer>( m_commandBuffer ),
reinterpret_cast<const VkCopyMemoryToMicromapInfoEXT *>( &info ) );
}
VULKAN_HPP_INLINE void
CommandBuffer::writeMicromapsPropertiesEXT( VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::MicromapEXT> const & micromaps,
VULKAN_HPP_NAMESPACE::QueryType queryType,
VULKAN_HPP_NAMESPACE::QueryPool queryPool,
uint32_t firstQuery ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( getDispatcher()->vkCmdWriteMicromapsPropertiesEXT &&
"Function <vkCmdWriteMicromapsPropertiesEXT> requires <VK_EXT_opacity_micromap>" );
2022-09-30 00:20:29 +00:00
getDispatcher()->vkCmdWriteMicromapsPropertiesEXT( static_cast<VkCommandBuffer>( m_commandBuffer ),
micromaps.size(),
reinterpret_cast<const VkMicromapEXT *>( micromaps.data() ),
static_cast<VkQueryType>( queryType ),
static_cast<VkQueryPool>( queryPool ),
firstQuery );
}
VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::AccelerationStructureCompatibilityKHR
Device::getMicromapCompatibilityEXT( const VULKAN_HPP_NAMESPACE::MicromapVersionInfoEXT & versionInfo ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( getDispatcher()->vkGetDeviceMicromapCompatibilityEXT &&
"Function <vkGetDeviceMicromapCompatibilityEXT> requires <VK_EXT_opacity_micromap>" );
2022-09-30 00:20:29 +00:00
VULKAN_HPP_NAMESPACE::AccelerationStructureCompatibilityKHR compatibility;
getDispatcher()->vkGetDeviceMicromapCompatibilityEXT( static_cast<VkDevice>( m_device ),
reinterpret_cast<const VkMicromapVersionInfoEXT *>( &versionInfo ),
reinterpret_cast<VkAccelerationStructureCompatibilityKHR *>( &compatibility ) );
return compatibility;
}
VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::MicromapBuildSizesInfoEXT
Device::getMicromapBuildSizesEXT( VULKAN_HPP_NAMESPACE::AccelerationStructureBuildTypeKHR buildType,
const VULKAN_HPP_NAMESPACE::MicromapBuildInfoEXT & buildInfo ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( getDispatcher()->vkGetMicromapBuildSizesEXT && "Function <vkGetMicromapBuildSizesEXT> requires <VK_EXT_opacity_micromap>" );
2022-09-30 00:20:29 +00:00
VULKAN_HPP_NAMESPACE::MicromapBuildSizesInfoEXT sizeInfo;
getDispatcher()->vkGetMicromapBuildSizesEXT( static_cast<VkDevice>( m_device ),
static_cast<VkAccelerationStructureBuildTypeKHR>( buildType ),
reinterpret_cast<const VkMicromapBuildInfoEXT *>( &buildInfo ),
reinterpret_cast<VkMicromapBuildSizesInfoEXT *>( &sizeInfo ) );
return sizeInfo;
}
//=== VK_HUAWEI_cluster_culling_shader ===
VULKAN_HPP_INLINE void CommandBuffer::drawClusterHUAWEI( uint32_t groupCountX, uint32_t groupCountY, uint32_t groupCountZ ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( getDispatcher()->vkCmdDrawClusterHUAWEI && "Function <vkCmdDrawClusterHUAWEI> requires <VK_HUAWEI_cluster_culling_shader>" );
getDispatcher()->vkCmdDrawClusterHUAWEI( static_cast<VkCommandBuffer>( m_commandBuffer ), groupCountX, groupCountY, groupCountZ );
}
VULKAN_HPP_INLINE void CommandBuffer::drawClusterIndirectHUAWEI( VULKAN_HPP_NAMESPACE::Buffer buffer,
VULKAN_HPP_NAMESPACE::DeviceSize offset ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( getDispatcher()->vkCmdDrawClusterIndirectHUAWEI &&
"Function <vkCmdDrawClusterIndirectHUAWEI> requires <VK_HUAWEI_cluster_culling_shader>" );
getDispatcher()->vkCmdDrawClusterIndirectHUAWEI(
static_cast<VkCommandBuffer>( m_commandBuffer ), static_cast<VkBuffer>( buffer ), static_cast<VkDeviceSize>( offset ) );
}
2021-09-07 08:20:55 +00:00
//=== VK_EXT_pageable_device_local_memory ===
VULKAN_HPP_INLINE void DeviceMemory::setPriorityEXT( float priority ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( getDispatcher()->vkSetDeviceMemoryPriorityEXT &&
"Function <vkSetDeviceMemoryPriorityEXT> requires <VK_EXT_pageable_device_local_memory>" );
2021-09-07 08:20:55 +00:00
getDispatcher()->vkSetDeviceMemoryPriorityEXT( static_cast<VkDevice>( m_device ), static_cast<VkDeviceMemory>( m_memory ), priority );
2021-09-07 08:20:55 +00:00
}
2021-10-06 00:37:42 +00:00
//=== VK_KHR_maintenance4 ===
VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::MemoryRequirements2
Device::getBufferMemoryRequirementsKHR( const VULKAN_HPP_NAMESPACE::DeviceBufferMemoryRequirements & info ) const VULKAN_HPP_NOEXCEPT
2021-10-06 00:37:42 +00:00
{
VULKAN_HPP_ASSERT( getDispatcher()->vkGetDeviceBufferMemoryRequirementsKHR &&
2023-03-01 09:17:02 +00:00
"Function <vkGetDeviceBufferMemoryRequirementsKHR> requires <VK_KHR_maintenance4> or <VK_VERSION_1_3>" );
2021-10-06 00:37:42 +00:00
VULKAN_HPP_NAMESPACE::MemoryRequirements2 memoryRequirements;
getDispatcher()->vkGetDeviceBufferMemoryRequirementsKHR( static_cast<VkDevice>( m_device ),
reinterpret_cast<const VkDeviceBufferMemoryRequirements *>( &info ),
reinterpret_cast<VkMemoryRequirements2 *>( &memoryRequirements ) );
2021-10-06 00:37:42 +00:00
return memoryRequirements;
}
template <typename X, typename Y, typename... Z>
VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::StructureChain<X, Y, Z...>
Device::getBufferMemoryRequirementsKHR( const VULKAN_HPP_NAMESPACE::DeviceBufferMemoryRequirements & info ) const VULKAN_HPP_NOEXCEPT
2021-10-06 00:37:42 +00:00
{
VULKAN_HPP_ASSERT( getDispatcher()->vkGetDeviceBufferMemoryRequirementsKHR &&
2023-03-01 09:17:02 +00:00
"Function <vkGetDeviceBufferMemoryRequirementsKHR> requires <VK_KHR_maintenance4> or <VK_VERSION_1_3>" );
VULKAN_HPP_NAMESPACE::StructureChain<X, Y, Z...> structureChain;
VULKAN_HPP_NAMESPACE::MemoryRequirements2 & memoryRequirements = structureChain.template get<VULKAN_HPP_NAMESPACE::MemoryRequirements2>();
getDispatcher()->vkGetDeviceBufferMemoryRequirementsKHR( static_cast<VkDevice>( m_device ),
reinterpret_cast<const VkDeviceBufferMemoryRequirements *>( &info ),
reinterpret_cast<VkMemoryRequirements2 *>( &memoryRequirements ) );
2021-10-06 00:37:42 +00:00
return structureChain;
}
VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::MemoryRequirements2
Device::getImageMemoryRequirementsKHR( const VULKAN_HPP_NAMESPACE::DeviceImageMemoryRequirements & info ) const VULKAN_HPP_NOEXCEPT
2021-10-06 00:37:42 +00:00
{
VULKAN_HPP_ASSERT( getDispatcher()->vkGetDeviceImageMemoryRequirementsKHR &&
2023-03-01 09:17:02 +00:00
"Function <vkGetDeviceImageMemoryRequirementsKHR> requires <VK_KHR_maintenance4> or <VK_VERSION_1_3>" );
2021-10-06 00:37:42 +00:00
VULKAN_HPP_NAMESPACE::MemoryRequirements2 memoryRequirements;
getDispatcher()->vkGetDeviceImageMemoryRequirementsKHR( static_cast<VkDevice>( m_device ),
reinterpret_cast<const VkDeviceImageMemoryRequirements *>( &info ),
reinterpret_cast<VkMemoryRequirements2 *>( &memoryRequirements ) );
2021-10-06 00:37:42 +00:00
return memoryRequirements;
}
template <typename X, typename Y, typename... Z>
VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::StructureChain<X, Y, Z...>
Device::getImageMemoryRequirementsKHR( const VULKAN_HPP_NAMESPACE::DeviceImageMemoryRequirements & info ) const VULKAN_HPP_NOEXCEPT
2021-10-06 00:37:42 +00:00
{
VULKAN_HPP_ASSERT( getDispatcher()->vkGetDeviceImageMemoryRequirementsKHR &&
2023-03-01 09:17:02 +00:00
"Function <vkGetDeviceImageMemoryRequirementsKHR> requires <VK_KHR_maintenance4> or <VK_VERSION_1_3>" );
VULKAN_HPP_NAMESPACE::StructureChain<X, Y, Z...> structureChain;
VULKAN_HPP_NAMESPACE::MemoryRequirements2 & memoryRequirements = structureChain.template get<VULKAN_HPP_NAMESPACE::MemoryRequirements2>();
getDispatcher()->vkGetDeviceImageMemoryRequirementsKHR( static_cast<VkDevice>( m_device ),
reinterpret_cast<const VkDeviceImageMemoryRequirements *>( &info ),
reinterpret_cast<VkMemoryRequirements2 *>( &memoryRequirements ) );
2021-10-06 00:37:42 +00:00
return structureChain;
}
VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector<VULKAN_HPP_NAMESPACE::SparseImageMemoryRequirements2>
Device::getImageSparseMemoryRequirementsKHR( const VULKAN_HPP_NAMESPACE::DeviceImageMemoryRequirements & info ) const
2021-10-06 00:37:42 +00:00
{
VULKAN_HPP_ASSERT( getDispatcher()->vkGetDeviceImageSparseMemoryRequirementsKHR &&
2023-03-01 09:17:02 +00:00
"Function <vkGetDeviceImageSparseMemoryRequirementsKHR> requires <VK_KHR_maintenance4> or <VK_VERSION_1_3>" );
std::vector<VULKAN_HPP_NAMESPACE::SparseImageMemoryRequirements2> sparseMemoryRequirements;
uint32_t sparseMemoryRequirementCount;
2021-10-06 00:37:42 +00:00
getDispatcher()->vkGetDeviceImageSparseMemoryRequirementsKHR(
static_cast<VkDevice>( m_device ), reinterpret_cast<const VkDeviceImageMemoryRequirements *>( &info ), &sparseMemoryRequirementCount, nullptr );
sparseMemoryRequirements.resize( sparseMemoryRequirementCount );
getDispatcher()->vkGetDeviceImageSparseMemoryRequirementsKHR( static_cast<VkDevice>( m_device ),
reinterpret_cast<const VkDeviceImageMemoryRequirements *>( &info ),
&sparseMemoryRequirementCount,
reinterpret_cast<VkSparseImageMemoryRequirements2 *>( sparseMemoryRequirements.data() ) );
VULKAN_HPP_ASSERT( sparseMemoryRequirementCount <= sparseMemoryRequirements.size() );
if ( sparseMemoryRequirementCount < sparseMemoryRequirements.size() )
{
sparseMemoryRequirements.resize( sparseMemoryRequirementCount );
}
2021-10-06 00:37:42 +00:00
return sparseMemoryRequirements;
}
2022-03-16 08:09:01 +00:00
//=== VK_VALVE_descriptor_set_host_mapping ===
VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::DescriptorSetLayoutHostMappingInfoVALVE Device::getDescriptorSetLayoutHostMappingInfoVALVE(
const VULKAN_HPP_NAMESPACE::DescriptorSetBindingReferenceVALVE & bindingReference ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( getDispatcher()->vkGetDescriptorSetLayoutHostMappingInfoVALVE &&
"Function <vkGetDescriptorSetLayoutHostMappingInfoVALVE> requires <VK_VALVE_descriptor_set_host_mapping>" );
2022-03-16 08:09:01 +00:00
VULKAN_HPP_NAMESPACE::DescriptorSetLayoutHostMappingInfoVALVE hostMapping;
getDispatcher()->vkGetDescriptorSetLayoutHostMappingInfoVALVE( static_cast<VkDevice>( m_device ),
reinterpret_cast<const VkDescriptorSetBindingReferenceVALVE *>( &bindingReference ),
reinterpret_cast<VkDescriptorSetLayoutHostMappingInfoVALVE *>( &hostMapping ) );
2022-03-16 08:09:01 +00:00
return hostMapping;
}
VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE void * DescriptorSet::getHostMappingVALVE() const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( getDispatcher()->vkGetDescriptorSetHostMappingVALVE &&
"Function <vkGetDescriptorSetHostMappingVALVE> requires <VK_VALVE_descriptor_set_host_mapping>" );
2022-03-16 08:09:01 +00:00
void * pData;
getDispatcher()->vkGetDescriptorSetHostMappingVALVE( static_cast<VkDevice>( m_device ), static_cast<VkDescriptorSet>( m_descriptorSet ), &pData );
2022-03-16 08:09:01 +00:00
return pData;
}
2022-11-07 00:12:52 +00:00
//=== VK_NV_copy_memory_indirect ===
VULKAN_HPP_INLINE void CommandBuffer::copyMemoryIndirectNV( VULKAN_HPP_NAMESPACE::DeviceAddress copyBufferAddress,
uint32_t copyCount,
uint32_t stride ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( getDispatcher()->vkCmdCopyMemoryIndirectNV && "Function <vkCmdCopyMemoryIndirectNV> requires <VK_NV_copy_memory_indirect>" );
2022-11-07 00:12:52 +00:00
getDispatcher()->vkCmdCopyMemoryIndirectNV(
static_cast<VkCommandBuffer>( m_commandBuffer ), static_cast<VkDeviceAddress>( copyBufferAddress ), copyCount, stride );
}
VULKAN_HPP_INLINE void CommandBuffer::copyMemoryToImageIndirectNV(
VULKAN_HPP_NAMESPACE::DeviceAddress copyBufferAddress,
uint32_t stride,
VULKAN_HPP_NAMESPACE::Image dstImage,
VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout,
VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::ImageSubresourceLayers> const & imageSubresources ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( getDispatcher()->vkCmdCopyMemoryToImageIndirectNV &&
"Function <vkCmdCopyMemoryToImageIndirectNV> requires <VK_NV_copy_memory_indirect>" );
2022-11-07 00:12:52 +00:00
getDispatcher()->vkCmdCopyMemoryToImageIndirectNV( static_cast<VkCommandBuffer>( m_commandBuffer ),
static_cast<VkDeviceAddress>( copyBufferAddress ),
imageSubresources.size(),
stride,
static_cast<VkImage>( dstImage ),
static_cast<VkImageLayout>( dstImageLayout ),
reinterpret_cast<const VkImageSubresourceLayers *>( imageSubresources.data() ) );
}
//=== VK_NV_memory_decompression ===
VULKAN_HPP_INLINE void CommandBuffer::decompressMemoryNV(
VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::DecompressMemoryRegionNV> const & decompressMemoryRegions ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( getDispatcher()->vkCmdDecompressMemoryNV && "Function <vkCmdDecompressMemoryNV> requires <VK_NV_memory_decompression>" );
2022-11-07 00:12:52 +00:00
getDispatcher()->vkCmdDecompressMemoryNV( static_cast<VkCommandBuffer>( m_commandBuffer ),
decompressMemoryRegions.size(),
reinterpret_cast<const VkDecompressMemoryRegionNV *>( decompressMemoryRegions.data() ) );
}
VULKAN_HPP_INLINE void CommandBuffer::decompressMemoryIndirectCountNV( VULKAN_HPP_NAMESPACE::DeviceAddress indirectCommandsAddress,
VULKAN_HPP_NAMESPACE::DeviceAddress indirectCommandsCountAddress,
uint32_t stride ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( getDispatcher()->vkCmdDecompressMemoryIndirectCountNV &&
"Function <vkCmdDecompressMemoryIndirectCountNV> requires <VK_NV_memory_decompression>" );
2022-11-07 00:12:52 +00:00
getDispatcher()->vkCmdDecompressMemoryIndirectCountNV( static_cast<VkCommandBuffer>( m_commandBuffer ),
static_cast<VkDeviceAddress>( indirectCommandsAddress ),
static_cast<VkDeviceAddress>( indirectCommandsCountAddress ),
stride );
}
//=== VK_NV_device_generated_commands_compute ===
VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::MemoryRequirements2
Device::getPipelineIndirectMemoryRequirementsNV( const VULKAN_HPP_NAMESPACE::ComputePipelineCreateInfo & createInfo ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( getDispatcher()->vkGetPipelineIndirectMemoryRequirementsNV &&
"Function <vkGetPipelineIndirectMemoryRequirementsNV> requires <VK_NV_device_generated_commands_compute>" );
VULKAN_HPP_NAMESPACE::MemoryRequirements2 memoryRequirements;
getDispatcher()->vkGetPipelineIndirectMemoryRequirementsNV( static_cast<VkDevice>( m_device ),
reinterpret_cast<const VkComputePipelineCreateInfo *>( &createInfo ),
reinterpret_cast<VkMemoryRequirements2 *>( &memoryRequirements ) );
return memoryRequirements;
}
template <typename X, typename Y, typename... Z>
VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::StructureChain<X, Y, Z...>
Device::getPipelineIndirectMemoryRequirementsNV( const VULKAN_HPP_NAMESPACE::ComputePipelineCreateInfo & createInfo ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( getDispatcher()->vkGetPipelineIndirectMemoryRequirementsNV &&
"Function <vkGetPipelineIndirectMemoryRequirementsNV> requires <VK_NV_device_generated_commands_compute>" );
VULKAN_HPP_NAMESPACE::StructureChain<X, Y, Z...> structureChain;
VULKAN_HPP_NAMESPACE::MemoryRequirements2 & memoryRequirements = structureChain.template get<VULKAN_HPP_NAMESPACE::MemoryRequirements2>();
getDispatcher()->vkGetPipelineIndirectMemoryRequirementsNV( static_cast<VkDevice>( m_device ),
reinterpret_cast<const VkComputePipelineCreateInfo *>( &createInfo ),
reinterpret_cast<VkMemoryRequirements2 *>( &memoryRequirements ) );
return structureChain;
}
VULKAN_HPP_INLINE void CommandBuffer::updatePipelineIndirectBufferNV( VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint,
VULKAN_HPP_NAMESPACE::Pipeline pipeline ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( getDispatcher()->vkCmdUpdatePipelineIndirectBufferNV &&
"Function <vkCmdUpdatePipelineIndirectBufferNV> requires <VK_NV_device_generated_commands_compute>" );
getDispatcher()->vkCmdUpdatePipelineIndirectBufferNV(
static_cast<VkCommandBuffer>( m_commandBuffer ), static_cast<VkPipelineBindPoint>( pipelineBindPoint ), static_cast<VkPipeline>( pipeline ) );
}
VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::DeviceAddress
Device::getPipelineIndirectAddressNV( const VULKAN_HPP_NAMESPACE::PipelineIndirectDeviceAddressInfoNV & info ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( getDispatcher()->vkGetPipelineIndirectDeviceAddressNV &&
"Function <vkGetPipelineIndirectDeviceAddressNV> requires <VK_NV_device_generated_commands_compute>" );
VkDeviceAddress result = getDispatcher()->vkGetPipelineIndirectDeviceAddressNV(
static_cast<VkDevice>( m_device ), reinterpret_cast<const VkPipelineIndirectDeviceAddressInfoNV *>( &info ) );
return static_cast<VULKAN_HPP_NAMESPACE::DeviceAddress>( result );
}
2022-09-30 00:20:29 +00:00
//=== VK_EXT_extended_dynamic_state3 ===
VULKAN_HPP_INLINE void CommandBuffer::setDepthClampEnableEXT( VULKAN_HPP_NAMESPACE::Bool32 depthClampEnable ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSetDepthClampEnableEXT &&
"Function <vkCmdSetDepthClampEnableEXT> requires <VK_EXT_extended_dynamic_state3> or <VK_EXT_shader_object>" );
2022-09-30 00:20:29 +00:00
getDispatcher()->vkCmdSetDepthClampEnableEXT( static_cast<VkCommandBuffer>( m_commandBuffer ), static_cast<VkBool32>( depthClampEnable ) );
}
VULKAN_HPP_INLINE void CommandBuffer::setPolygonModeEXT( VULKAN_HPP_NAMESPACE::PolygonMode polygonMode ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSetPolygonModeEXT &&
"Function <vkCmdSetPolygonModeEXT> requires <VK_EXT_extended_dynamic_state3> or <VK_EXT_shader_object>" );
2022-09-30 00:20:29 +00:00
getDispatcher()->vkCmdSetPolygonModeEXT( static_cast<VkCommandBuffer>( m_commandBuffer ), static_cast<VkPolygonMode>( polygonMode ) );
}
VULKAN_HPP_INLINE void CommandBuffer::setRasterizationSamplesEXT( VULKAN_HPP_NAMESPACE::SampleCountFlagBits rasterizationSamples ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSetRasterizationSamplesEXT &&
"Function <vkCmdSetRasterizationSamplesEXT> requires <VK_EXT_extended_dynamic_state3> or <VK_EXT_shader_object>" );
2022-09-30 00:20:29 +00:00
getDispatcher()->vkCmdSetRasterizationSamplesEXT( static_cast<VkCommandBuffer>( m_commandBuffer ),
static_cast<VkSampleCountFlagBits>( rasterizationSamples ) );
}
VULKAN_HPP_INLINE void CommandBuffer::setSampleMaskEXT( VULKAN_HPP_NAMESPACE::SampleCountFlagBits samples,
VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::SampleMask> const & sampleMask ) const
2022-09-30 00:20:29 +00:00
{
VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSetSampleMaskEXT &&
"Function <vkCmdSetSampleMaskEXT> requires <VK_EXT_extended_dynamic_state3> or <VK_EXT_shader_object>" );
# ifdef VULKAN_HPP_NO_EXCEPTIONS
VULKAN_HPP_ASSERT( sampleMask.size() == ( static_cast<uint32_t>( samples ) + 31 ) / 32 );
# else
if ( sampleMask.size() != ( static_cast<uint32_t>( samples ) + 31 ) / 32 )
{
throw LogicError( VULKAN_HPP_NAMESPACE_STRING
"::CommandBuffer::setSampleMaskEXT: sampleMask.size() != ( static_cast<uint32_t>( samples ) + 31 ) / 32" );
}
# endif /*VULKAN_HPP_NO_EXCEPTIONS*/
2022-09-30 00:20:29 +00:00
getDispatcher()->vkCmdSetSampleMaskEXT( static_cast<VkCommandBuffer>( m_commandBuffer ),
static_cast<VkSampleCountFlagBits>( samples ),
reinterpret_cast<const VkSampleMask *>( sampleMask.data() ) );
}
VULKAN_HPP_INLINE void CommandBuffer::setAlphaToCoverageEnableEXT( VULKAN_HPP_NAMESPACE::Bool32 alphaToCoverageEnable ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSetAlphaToCoverageEnableEXT &&
"Function <vkCmdSetAlphaToCoverageEnableEXT> requires <VK_EXT_extended_dynamic_state3> or <VK_EXT_shader_object>" );
2022-09-30 00:20:29 +00:00
getDispatcher()->vkCmdSetAlphaToCoverageEnableEXT( static_cast<VkCommandBuffer>( m_commandBuffer ), static_cast<VkBool32>( alphaToCoverageEnable ) );
}
VULKAN_HPP_INLINE void CommandBuffer::setAlphaToOneEnableEXT( VULKAN_HPP_NAMESPACE::Bool32 alphaToOneEnable ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSetAlphaToOneEnableEXT &&
"Function <vkCmdSetAlphaToOneEnableEXT> requires <VK_EXT_extended_dynamic_state3> or <VK_EXT_shader_object>" );
2022-09-30 00:20:29 +00:00
getDispatcher()->vkCmdSetAlphaToOneEnableEXT( static_cast<VkCommandBuffer>( m_commandBuffer ), static_cast<VkBool32>( alphaToOneEnable ) );
}
VULKAN_HPP_INLINE void CommandBuffer::setLogicOpEnableEXT( VULKAN_HPP_NAMESPACE::Bool32 logicOpEnable ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSetLogicOpEnableEXT &&
"Function <vkCmdSetLogicOpEnableEXT> requires <VK_EXT_extended_dynamic_state3> or <VK_EXT_shader_object>" );
2022-09-30 00:20:29 +00:00
getDispatcher()->vkCmdSetLogicOpEnableEXT( static_cast<VkCommandBuffer>( m_commandBuffer ), static_cast<VkBool32>( logicOpEnable ) );
}
VULKAN_HPP_INLINE void CommandBuffer::setColorBlendEnableEXT(
uint32_t firstAttachment, VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::Bool32> const & colorBlendEnables ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSetColorBlendEnableEXT &&
"Function <vkCmdSetColorBlendEnableEXT> requires <VK_EXT_extended_dynamic_state3> or <VK_EXT_shader_object>" );
2022-09-30 00:20:29 +00:00
getDispatcher()->vkCmdSetColorBlendEnableEXT( static_cast<VkCommandBuffer>( m_commandBuffer ),
firstAttachment,
colorBlendEnables.size(),
reinterpret_cast<const VkBool32 *>( colorBlendEnables.data() ) );
}
VULKAN_HPP_INLINE void CommandBuffer::setColorBlendEquationEXT(
uint32_t firstAttachment,
VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::ColorBlendEquationEXT> const & colorBlendEquations ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSetColorBlendEquationEXT &&
"Function <vkCmdSetColorBlendEquationEXT> requires <VK_EXT_extended_dynamic_state3> or <VK_EXT_shader_object>" );
2022-09-30 00:20:29 +00:00
getDispatcher()->vkCmdSetColorBlendEquationEXT( static_cast<VkCommandBuffer>( m_commandBuffer ),
firstAttachment,
colorBlendEquations.size(),
reinterpret_cast<const VkColorBlendEquationEXT *>( colorBlendEquations.data() ) );
}
VULKAN_HPP_INLINE void CommandBuffer::setColorWriteMaskEXT(
uint32_t firstAttachment,
VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::ColorComponentFlags> const & colorWriteMasks ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSetColorWriteMaskEXT &&
"Function <vkCmdSetColorWriteMaskEXT> requires <VK_EXT_extended_dynamic_state3> or <VK_EXT_shader_object>" );
2022-09-30 00:20:29 +00:00
getDispatcher()->vkCmdSetColorWriteMaskEXT( static_cast<VkCommandBuffer>( m_commandBuffer ),
firstAttachment,
colorWriteMasks.size(),
reinterpret_cast<const VkColorComponentFlags *>( colorWriteMasks.data() ) );
}
VULKAN_HPP_INLINE void
CommandBuffer::setTessellationDomainOriginEXT( VULKAN_HPP_NAMESPACE::TessellationDomainOrigin domainOrigin ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSetTessellationDomainOriginEXT &&
"Function <vkCmdSetTessellationDomainOriginEXT> requires <VK_EXT_extended_dynamic_state3> or <VK_EXT_shader_object>" );
getDispatcher()->vkCmdSetTessellationDomainOriginEXT( static_cast<VkCommandBuffer>( m_commandBuffer ),
static_cast<VkTessellationDomainOrigin>( domainOrigin ) );
}
2022-09-30 00:20:29 +00:00
VULKAN_HPP_INLINE void CommandBuffer::setRasterizationStreamEXT( uint32_t rasterizationStream ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSetRasterizationStreamEXT &&
"Function <vkCmdSetRasterizationStreamEXT> requires <VK_EXT_extended_dynamic_state3> or <VK_EXT_shader_object>" );
2022-09-30 00:20:29 +00:00
getDispatcher()->vkCmdSetRasterizationStreamEXT( static_cast<VkCommandBuffer>( m_commandBuffer ), rasterizationStream );
}
VULKAN_HPP_INLINE void CommandBuffer::setConservativeRasterizationModeEXT(
VULKAN_HPP_NAMESPACE::ConservativeRasterizationModeEXT conservativeRasterizationMode ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSetConservativeRasterizationModeEXT &&
"Function <vkCmdSetConservativeRasterizationModeEXT> requires <VK_EXT_extended_dynamic_state3> or <VK_EXT_shader_object>" );
2022-09-30 00:20:29 +00:00
getDispatcher()->vkCmdSetConservativeRasterizationModeEXT( static_cast<VkCommandBuffer>( m_commandBuffer ),
static_cast<VkConservativeRasterizationModeEXT>( conservativeRasterizationMode ) );
}
VULKAN_HPP_INLINE void CommandBuffer::setExtraPrimitiveOverestimationSizeEXT( float extraPrimitiveOverestimationSize ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSetExtraPrimitiveOverestimationSizeEXT &&
"Function <vkCmdSetExtraPrimitiveOverestimationSizeEXT> requires <VK_EXT_extended_dynamic_state3> or <VK_EXT_shader_object>" );
2022-09-30 00:20:29 +00:00
getDispatcher()->vkCmdSetExtraPrimitiveOverestimationSizeEXT( static_cast<VkCommandBuffer>( m_commandBuffer ), extraPrimitiveOverestimationSize );
}
VULKAN_HPP_INLINE void CommandBuffer::setDepthClipEnableEXT( VULKAN_HPP_NAMESPACE::Bool32 depthClipEnable ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSetDepthClipEnableEXT &&
"Function <vkCmdSetDepthClipEnableEXT> requires <VK_EXT_extended_dynamic_state3> or <VK_EXT_shader_object>" );
2022-09-30 00:20:29 +00:00
getDispatcher()->vkCmdSetDepthClipEnableEXT( static_cast<VkCommandBuffer>( m_commandBuffer ), static_cast<VkBool32>( depthClipEnable ) );
}
VULKAN_HPP_INLINE void CommandBuffer::setSampleLocationsEnableEXT( VULKAN_HPP_NAMESPACE::Bool32 sampleLocationsEnable ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSetSampleLocationsEnableEXT &&
"Function <vkCmdSetSampleLocationsEnableEXT> requires <VK_EXT_extended_dynamic_state3> or <VK_EXT_shader_object>" );
2022-09-30 00:20:29 +00:00
getDispatcher()->vkCmdSetSampleLocationsEnableEXT( static_cast<VkCommandBuffer>( m_commandBuffer ), static_cast<VkBool32>( sampleLocationsEnable ) );
}
VULKAN_HPP_INLINE void CommandBuffer::setColorBlendAdvancedEXT(
uint32_t firstAttachment,
VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::ColorBlendAdvancedEXT> const & colorBlendAdvanced ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSetColorBlendAdvancedEXT &&
"Function <vkCmdSetColorBlendAdvancedEXT> requires <VK_EXT_extended_dynamic_state3> or <VK_EXT_shader_object>" );
2022-09-30 00:20:29 +00:00
getDispatcher()->vkCmdSetColorBlendAdvancedEXT( static_cast<VkCommandBuffer>( m_commandBuffer ),
firstAttachment,
colorBlendAdvanced.size(),
reinterpret_cast<const VkColorBlendAdvancedEXT *>( colorBlendAdvanced.data() ) );
}
VULKAN_HPP_INLINE void
CommandBuffer::setProvokingVertexModeEXT( VULKAN_HPP_NAMESPACE::ProvokingVertexModeEXT provokingVertexMode ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSetProvokingVertexModeEXT &&
"Function <vkCmdSetProvokingVertexModeEXT> requires <VK_EXT_extended_dynamic_state3> or <VK_EXT_shader_object>" );
2022-09-30 00:20:29 +00:00
getDispatcher()->vkCmdSetProvokingVertexModeEXT( static_cast<VkCommandBuffer>( m_commandBuffer ),
static_cast<VkProvokingVertexModeEXT>( provokingVertexMode ) );
}
VULKAN_HPP_INLINE void
CommandBuffer::setLineRasterizationModeEXT( VULKAN_HPP_NAMESPACE::LineRasterizationModeEXT lineRasterizationMode ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSetLineRasterizationModeEXT &&
"Function <vkCmdSetLineRasterizationModeEXT> requires <VK_EXT_extended_dynamic_state3> or <VK_EXT_shader_object>" );
2022-09-30 00:20:29 +00:00
getDispatcher()->vkCmdSetLineRasterizationModeEXT( static_cast<VkCommandBuffer>( m_commandBuffer ),
static_cast<VkLineRasterizationModeEXT>( lineRasterizationMode ) );
}
VULKAN_HPP_INLINE void CommandBuffer::setLineStippleEnableEXT( VULKAN_HPP_NAMESPACE::Bool32 stippledLineEnable ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSetLineStippleEnableEXT &&
"Function <vkCmdSetLineStippleEnableEXT> requires <VK_EXT_extended_dynamic_state3> or <VK_EXT_shader_object>" );
2022-09-30 00:20:29 +00:00
getDispatcher()->vkCmdSetLineStippleEnableEXT( static_cast<VkCommandBuffer>( m_commandBuffer ), static_cast<VkBool32>( stippledLineEnable ) );
}
VULKAN_HPP_INLINE void CommandBuffer::setDepthClipNegativeOneToOneEXT( VULKAN_HPP_NAMESPACE::Bool32 negativeOneToOne ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSetDepthClipNegativeOneToOneEXT &&
"Function <vkCmdSetDepthClipNegativeOneToOneEXT> requires <VK_EXT_extended_dynamic_state3> or <VK_EXT_shader_object>" );
2022-09-30 00:20:29 +00:00
getDispatcher()->vkCmdSetDepthClipNegativeOneToOneEXT( static_cast<VkCommandBuffer>( m_commandBuffer ), static_cast<VkBool32>( negativeOneToOne ) );
}
VULKAN_HPP_INLINE void CommandBuffer::setViewportWScalingEnableNV( VULKAN_HPP_NAMESPACE::Bool32 viewportWScalingEnable ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSetViewportWScalingEnableNV &&
"Function <vkCmdSetViewportWScalingEnableNV> requires <VK_EXT_extended_dynamic_state3> or <VK_EXT_shader_object>" );
2022-09-30 00:20:29 +00:00
getDispatcher()->vkCmdSetViewportWScalingEnableNV( static_cast<VkCommandBuffer>( m_commandBuffer ), static_cast<VkBool32>( viewportWScalingEnable ) );
}
VULKAN_HPP_INLINE void CommandBuffer::setViewportSwizzleNV(
uint32_t firstViewport,
VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::ViewportSwizzleNV> const & viewportSwizzles ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSetViewportSwizzleNV &&
"Function <vkCmdSetViewportSwizzleNV> requires <VK_EXT_extended_dynamic_state3> or <VK_EXT_shader_object>" );
2022-09-30 00:20:29 +00:00
getDispatcher()->vkCmdSetViewportSwizzleNV( static_cast<VkCommandBuffer>( m_commandBuffer ),
firstViewport,
viewportSwizzles.size(),
reinterpret_cast<const VkViewportSwizzleNV *>( viewportSwizzles.data() ) );
}
VULKAN_HPP_INLINE void CommandBuffer::setCoverageToColorEnableNV( VULKAN_HPP_NAMESPACE::Bool32 coverageToColorEnable ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSetCoverageToColorEnableNV &&
"Function <vkCmdSetCoverageToColorEnableNV> requires <VK_EXT_extended_dynamic_state3> or <VK_EXT_shader_object>" );
2022-09-30 00:20:29 +00:00
getDispatcher()->vkCmdSetCoverageToColorEnableNV( static_cast<VkCommandBuffer>( m_commandBuffer ), static_cast<VkBool32>( coverageToColorEnable ) );
}
VULKAN_HPP_INLINE void CommandBuffer::setCoverageToColorLocationNV( uint32_t coverageToColorLocation ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSetCoverageToColorLocationNV &&
"Function <vkCmdSetCoverageToColorLocationNV> requires <VK_EXT_extended_dynamic_state3> or <VK_EXT_shader_object>" );
2022-09-30 00:20:29 +00:00
getDispatcher()->vkCmdSetCoverageToColorLocationNV( static_cast<VkCommandBuffer>( m_commandBuffer ), coverageToColorLocation );
}
VULKAN_HPP_INLINE void
CommandBuffer::setCoverageModulationModeNV( VULKAN_HPP_NAMESPACE::CoverageModulationModeNV coverageModulationMode ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSetCoverageModulationModeNV &&
"Function <vkCmdSetCoverageModulationModeNV> requires <VK_EXT_extended_dynamic_state3> or <VK_EXT_shader_object>" );
2022-09-30 00:20:29 +00:00
getDispatcher()->vkCmdSetCoverageModulationModeNV( static_cast<VkCommandBuffer>( m_commandBuffer ),
static_cast<VkCoverageModulationModeNV>( coverageModulationMode ) );
}
VULKAN_HPP_INLINE void
CommandBuffer::setCoverageModulationTableEnableNV( VULKAN_HPP_NAMESPACE::Bool32 coverageModulationTableEnable ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSetCoverageModulationTableEnableNV &&
"Function <vkCmdSetCoverageModulationTableEnableNV> requires <VK_EXT_extended_dynamic_state3> or <VK_EXT_shader_object>" );
2022-09-30 00:20:29 +00:00
getDispatcher()->vkCmdSetCoverageModulationTableEnableNV( static_cast<VkCommandBuffer>( m_commandBuffer ),
static_cast<VkBool32>( coverageModulationTableEnable ) );
}
VULKAN_HPP_INLINE void
CommandBuffer::setCoverageModulationTableNV( VULKAN_HPP_NAMESPACE::ArrayProxy<const float> const & coverageModulationTable ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSetCoverageModulationTableNV &&
"Function <vkCmdSetCoverageModulationTableNV> requires <VK_EXT_extended_dynamic_state3> or <VK_EXT_shader_object>" );
2022-09-30 00:20:29 +00:00
getDispatcher()->vkCmdSetCoverageModulationTableNV(
static_cast<VkCommandBuffer>( m_commandBuffer ), coverageModulationTable.size(), coverageModulationTable.data() );
}
VULKAN_HPP_INLINE void CommandBuffer::setShadingRateImageEnableNV( VULKAN_HPP_NAMESPACE::Bool32 shadingRateImageEnable ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSetShadingRateImageEnableNV &&
"Function <vkCmdSetShadingRateImageEnableNV> requires <VK_EXT_extended_dynamic_state3> or <VK_EXT_shader_object>" );
2022-09-30 00:20:29 +00:00
getDispatcher()->vkCmdSetShadingRateImageEnableNV( static_cast<VkCommandBuffer>( m_commandBuffer ), static_cast<VkBool32>( shadingRateImageEnable ) );
}
VULKAN_HPP_INLINE void
CommandBuffer::setRepresentativeFragmentTestEnableNV( VULKAN_HPP_NAMESPACE::Bool32 representativeFragmentTestEnable ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSetRepresentativeFragmentTestEnableNV &&
"Function <vkCmdSetRepresentativeFragmentTestEnableNV> requires <VK_EXT_extended_dynamic_state3> or <VK_EXT_shader_object>" );
2022-09-30 00:20:29 +00:00
getDispatcher()->vkCmdSetRepresentativeFragmentTestEnableNV( static_cast<VkCommandBuffer>( m_commandBuffer ),
static_cast<VkBool32>( representativeFragmentTestEnable ) );
}
VULKAN_HPP_INLINE void
CommandBuffer::setCoverageReductionModeNV( VULKAN_HPP_NAMESPACE::CoverageReductionModeNV coverageReductionMode ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSetCoverageReductionModeNV &&
"Function <vkCmdSetCoverageReductionModeNV> requires <VK_EXT_extended_dynamic_state3> or <VK_EXT_shader_object>" );
2022-09-30 00:20:29 +00:00
getDispatcher()->vkCmdSetCoverageReductionModeNV( static_cast<VkCommandBuffer>( m_commandBuffer ),
static_cast<VkCoverageReductionModeNV>( coverageReductionMode ) );
}
2022-07-01 00:11:04 +00:00
//=== VK_EXT_shader_module_identifier ===
VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::ShaderModuleIdentifierEXT ShaderModule::getIdentifierEXT() const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( getDispatcher()->vkGetShaderModuleIdentifierEXT &&
"Function <vkGetShaderModuleIdentifierEXT> requires <VK_EXT_shader_module_identifier>" );
2022-07-01 00:11:04 +00:00
VULKAN_HPP_NAMESPACE::ShaderModuleIdentifierEXT identifier;
getDispatcher()->vkGetShaderModuleIdentifierEXT(
static_cast<VkDevice>( m_device ), static_cast<VkShaderModule>( m_shaderModule ), reinterpret_cast<VkShaderModuleIdentifierEXT *>( &identifier ) );
return identifier;
}
VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::ShaderModuleIdentifierEXT
Device::getShaderModuleCreateInfoIdentifierEXT( const VULKAN_HPP_NAMESPACE::ShaderModuleCreateInfo & createInfo ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( getDispatcher()->vkGetShaderModuleCreateInfoIdentifierEXT &&
"Function <vkGetShaderModuleCreateInfoIdentifierEXT> requires <VK_EXT_shader_module_identifier>" );
2022-07-01 00:11:04 +00:00
VULKAN_HPP_NAMESPACE::ShaderModuleIdentifierEXT identifier;
getDispatcher()->vkGetShaderModuleCreateInfoIdentifierEXT( static_cast<VkDevice>( m_device ),
reinterpret_cast<const VkShaderModuleCreateInfo *>( &createInfo ),
reinterpret_cast<VkShaderModuleIdentifierEXT *>( &identifier ) );
return identifier;
}
2022-09-30 00:20:29 +00:00
//=== VK_NV_optical_flow ===
VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector<VULKAN_HPP_NAMESPACE::OpticalFlowImageFormatPropertiesNV>
PhysicalDevice::getOpticalFlowImageFormatsNV( const VULKAN_HPP_NAMESPACE::OpticalFlowImageFormatInfoNV & opticalFlowImageFormatInfo ) const
{
VULKAN_HPP_ASSERT( getDispatcher()->vkGetPhysicalDeviceOpticalFlowImageFormatsNV &&
"Function <vkGetPhysicalDeviceOpticalFlowImageFormatsNV> requires <VK_NV_optical_flow>" );
2022-09-30 00:20:29 +00:00
std::vector<VULKAN_HPP_NAMESPACE::OpticalFlowImageFormatPropertiesNV> imageFormatProperties;
uint32_t formatCount;
VULKAN_HPP_NAMESPACE::Result result;
2022-09-30 00:20:29 +00:00
do
{
result = static_cast<VULKAN_HPP_NAMESPACE::Result>( getDispatcher()->vkGetPhysicalDeviceOpticalFlowImageFormatsNV(
2022-09-30 00:20:29 +00:00
static_cast<VkPhysicalDevice>( m_physicalDevice ),
reinterpret_cast<const VkOpticalFlowImageFormatInfoNV *>( &opticalFlowImageFormatInfo ),
&formatCount,
nullptr ) );
if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) && formatCount )
2022-09-30 00:20:29 +00:00
{
imageFormatProperties.resize( formatCount );
result = static_cast<VULKAN_HPP_NAMESPACE::Result>( getDispatcher()->vkGetPhysicalDeviceOpticalFlowImageFormatsNV(
2022-09-30 00:20:29 +00:00
static_cast<VkPhysicalDevice>( m_physicalDevice ),
reinterpret_cast<const VkOpticalFlowImageFormatInfoNV *>( &opticalFlowImageFormatInfo ),
&formatCount,
reinterpret_cast<VkOpticalFlowImageFormatPropertiesNV *>( imageFormatProperties.data() ) ) );
2022-09-30 00:20:29 +00:00
}
} while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete );
VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getOpticalFlowImageFormatsNV" );
2022-09-30 00:20:29 +00:00
VULKAN_HPP_ASSERT( formatCount <= imageFormatProperties.size() );
if ( formatCount < imageFormatProperties.size() )
{
imageFormatProperties.resize( formatCount );
}
return imageFormatProperties;
}
VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE
VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::CreateReturnType<VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::OpticalFlowSessionNV>::Type
Device::createOpticalFlowSessionNV( VULKAN_HPP_NAMESPACE::OpticalFlowSessionCreateInfoNV const & createInfo,
2022-09-30 00:20:29 +00:00
VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator ) const
VULKAN_HPP_RAII_CREATE_NOEXCEPT
2022-09-30 00:20:29 +00:00
{
VULKAN_HPP_NAMESPACE::OpticalFlowSessionNV session;
VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( getDispatcher()->vkCreateOpticalFlowSessionNV(
static_cast<VkDevice>( m_device ),
reinterpret_cast<const VkOpticalFlowSessionCreateInfoNV *>( &createInfo ),
reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
reinterpret_cast<VkOpticalFlowSessionNV *>( &session ) ) );
if ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess )
{
# if defined( VULKAN_HPP_RAII_NO_EXCEPTIONS )
return VULKAN_HPP_UNEXPECTED( result );
# else
VULKAN_HPP_NAMESPACE::detail::throwResultException( result, "Device::createOpticalFlowSessionNV" );
# endif
}
return VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::OpticalFlowSessionNV( *this, *reinterpret_cast<VkOpticalFlowSessionNV *>( &session ), allocator );
2022-09-30 00:20:29 +00:00
}
VULKAN_HPP_INLINE void OpticalFlowSessionNV::bindImage( VULKAN_HPP_NAMESPACE::OpticalFlowSessionBindingPointNV bindingPoint,
VULKAN_HPP_NAMESPACE::ImageView view,
VULKAN_HPP_NAMESPACE::ImageLayout layout ) const
{
VULKAN_HPP_ASSERT( getDispatcher()->vkBindOpticalFlowSessionImageNV && "Function <vkBindOpticalFlowSessionImageNV> requires <VK_NV_optical_flow>" );
2022-09-30 00:20:29 +00:00
VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>(
getDispatcher()->vkBindOpticalFlowSessionImageNV( static_cast<VkDevice>( m_device ),
static_cast<VkOpticalFlowSessionNV>( m_session ),
static_cast<VkOpticalFlowSessionBindingPointNV>( bindingPoint ),
static_cast<VkImageView>( view ),
static_cast<VkImageLayout>( layout ) ) );
VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::OpticalFlowSessionNV::bindImage" );
2022-09-30 00:20:29 +00:00
}
VULKAN_HPP_INLINE void CommandBuffer::opticalFlowExecuteNV( VULKAN_HPP_NAMESPACE::OpticalFlowSessionNV session,
const VULKAN_HPP_NAMESPACE::OpticalFlowExecuteInfoNV & executeInfo ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( getDispatcher()->vkCmdOpticalFlowExecuteNV && "Function <vkCmdOpticalFlowExecuteNV> requires <VK_NV_optical_flow>" );
2022-09-30 00:20:29 +00:00
getDispatcher()->vkCmdOpticalFlowExecuteNV( static_cast<VkCommandBuffer>( m_commandBuffer ),
static_cast<VkOpticalFlowSessionNV>( session ),
reinterpret_cast<const VkOpticalFlowExecuteInfoNV *>( &executeInfo ) );
}
//=== VK_KHR_maintenance5 ===
VULKAN_HPP_INLINE void CommandBuffer::bindIndexBuffer2KHR( VULKAN_HPP_NAMESPACE::Buffer buffer,
VULKAN_HPP_NAMESPACE::DeviceSize offset,
VULKAN_HPP_NAMESPACE::DeviceSize size,
VULKAN_HPP_NAMESPACE::IndexType indexType ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( getDispatcher()->vkCmdBindIndexBuffer2KHR && "Function <vkCmdBindIndexBuffer2KHR> requires <VK_KHR_maintenance5>" );
getDispatcher()->vkCmdBindIndexBuffer2KHR( static_cast<VkCommandBuffer>( m_commandBuffer ),
static_cast<VkBuffer>( buffer ),
static_cast<VkDeviceSize>( offset ),
static_cast<VkDeviceSize>( size ),
static_cast<VkIndexType>( indexType ) );
}
VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::Extent2D
Device::getRenderingAreaGranularityKHR( const VULKAN_HPP_NAMESPACE::RenderingAreaInfoKHR & renderingAreaInfo ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( getDispatcher()->vkGetRenderingAreaGranularityKHR && "Function <vkGetRenderingAreaGranularityKHR> requires <VK_KHR_maintenance5>" );
VULKAN_HPP_NAMESPACE::Extent2D granularity;
getDispatcher()->vkGetRenderingAreaGranularityKHR( static_cast<VkDevice>( m_device ),
reinterpret_cast<const VkRenderingAreaInfoKHR *>( &renderingAreaInfo ),
reinterpret_cast<VkExtent2D *>( &granularity ) );
return granularity;
}
VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::SubresourceLayout2KHR
Device::getImageSubresourceLayoutKHR( const VULKAN_HPP_NAMESPACE::DeviceImageSubresourceInfoKHR & info ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( getDispatcher()->vkGetDeviceImageSubresourceLayoutKHR &&
"Function <vkGetDeviceImageSubresourceLayoutKHR> requires <VK_KHR_maintenance5>" );
VULKAN_HPP_NAMESPACE::SubresourceLayout2KHR layout;
getDispatcher()->vkGetDeviceImageSubresourceLayoutKHR( static_cast<VkDevice>( m_device ),
reinterpret_cast<const VkDeviceImageSubresourceInfoKHR *>( &info ),
reinterpret_cast<VkSubresourceLayout2KHR *>( &layout ) );
return layout;
}
template <typename X, typename Y, typename... Z>
VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::StructureChain<X, Y, Z...>
Device::getImageSubresourceLayoutKHR( const VULKAN_HPP_NAMESPACE::DeviceImageSubresourceInfoKHR & info ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( getDispatcher()->vkGetDeviceImageSubresourceLayoutKHR &&
"Function <vkGetDeviceImageSubresourceLayoutKHR> requires <VK_KHR_maintenance5>" );
VULKAN_HPP_NAMESPACE::StructureChain<X, Y, Z...> structureChain;
VULKAN_HPP_NAMESPACE::SubresourceLayout2KHR & layout = structureChain.template get<VULKAN_HPP_NAMESPACE::SubresourceLayout2KHR>();
getDispatcher()->vkGetDeviceImageSubresourceLayoutKHR( static_cast<VkDevice>( m_device ),
reinterpret_cast<const VkDeviceImageSubresourceInfoKHR *>( &info ),
reinterpret_cast<VkSubresourceLayout2KHR *>( &layout ) );
return structureChain;
}
VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::SubresourceLayout2KHR
Image::getSubresourceLayout2KHR( const VULKAN_HPP_NAMESPACE::ImageSubresource2KHR & subresource ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT(
getDispatcher()->vkGetImageSubresourceLayout2KHR &&
"Function <vkGetImageSubresourceLayout2KHR> requires <VK_EXT_host_image_copy> or <VK_EXT_image_compression_control> or <VK_KHR_maintenance5>" );
VULKAN_HPP_NAMESPACE::SubresourceLayout2KHR layout;
getDispatcher()->vkGetImageSubresourceLayout2KHR( static_cast<VkDevice>( m_device ),
static_cast<VkImage>( m_image ),
reinterpret_cast<const VkImageSubresource2KHR *>( &subresource ),
reinterpret_cast<VkSubresourceLayout2KHR *>( &layout ) );
return layout;
}
template <typename X, typename Y, typename... Z>
VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::StructureChain<X, Y, Z...>
Image::getSubresourceLayout2KHR( const VULKAN_HPP_NAMESPACE::ImageSubresource2KHR & subresource ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT(
getDispatcher()->vkGetImageSubresourceLayout2KHR &&
"Function <vkGetImageSubresourceLayout2KHR> requires <VK_EXT_host_image_copy> or <VK_EXT_image_compression_control> or <VK_KHR_maintenance5>" );
VULKAN_HPP_NAMESPACE::StructureChain<X, Y, Z...> structureChain;
VULKAN_HPP_NAMESPACE::SubresourceLayout2KHR & layout = structureChain.template get<VULKAN_HPP_NAMESPACE::SubresourceLayout2KHR>();
getDispatcher()->vkGetImageSubresourceLayout2KHR( static_cast<VkDevice>( m_device ),
static_cast<VkImage>( m_image ),
reinterpret_cast<const VkImageSubresource2KHR *>( &subresource ),
reinterpret_cast<VkSubresourceLayout2KHR *>( &layout ) );
return structureChain;
}
2022-09-30 00:20:29 +00:00
//=== VK_AMD_anti_lag ===
VULKAN_HPP_INLINE void Device::antiLagUpdateAMD( const VULKAN_HPP_NAMESPACE::AntiLagDataAMD & data ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( getDispatcher()->vkAntiLagUpdateAMD && "Function <vkAntiLagUpdateAMD> requires <VK_AMD_anti_lag>" );
getDispatcher()->vkAntiLagUpdateAMD( static_cast<VkDevice>( m_device ), reinterpret_cast<const VkAntiLagDataAMD *>( &data ) );
}
//=== VK_EXT_shader_object ===
VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE
VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::CreateReturnType<std::vector<VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::ShaderEXT>>::Type
Device::createShadersEXT( VULKAN_HPP_NAMESPACE::ArrayProxy<VULKAN_HPP_NAMESPACE::ShaderCreateInfoEXT> const & createInfos,
VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator ) const
{
std::vector<VULKAN_HPP_NAMESPACE::ShaderEXT> shaders( createInfos.size() );
VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( getDispatcher()->vkCreateShadersEXT(
static_cast<VkDevice>( m_device ),
createInfos.size(),
reinterpret_cast<const VkShaderCreateInfoEXT *>( createInfos.data() ),
reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
reinterpret_cast<VkShaderEXT *>( shaders.data() ) ) );
if ( ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess ) && ( result != VULKAN_HPP_NAMESPACE::Result::eIncompatibleShaderBinaryEXT ) )
{
# if defined( VULKAN_HPP_RAII_NO_EXCEPTIONS )
return VULKAN_HPP_UNEXPECTED( result );
# else
VULKAN_HPP_NAMESPACE::detail::throwResultException( result, "Device::createShadersEXT" );
# endif
}
std::vector<VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::ShaderEXT> shadersRAII;
shadersRAII.reserve( shaders.size() );
for ( auto & shader : shaders )
{
shadersRAII.emplace_back( *this, *reinterpret_cast<VkShaderEXT *>( &shader ), allocator, result );
}
return shadersRAII;
}
VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE
VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::CreateReturnType<VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::ShaderEXT>::Type
Device::createShaderEXT( VULKAN_HPP_NAMESPACE::ShaderCreateInfoEXT const & createInfo,
VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator ) const VULKAN_HPP_RAII_CREATE_NOEXCEPT
{
VULKAN_HPP_NAMESPACE::ShaderEXT shader;
VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( getDispatcher()->vkCreateShadersEXT(
static_cast<VkDevice>( m_device ),
1,
reinterpret_cast<const VkShaderCreateInfoEXT *>( &createInfo ),
reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
reinterpret_cast<VkShaderEXT *>( &shader ) ) );
if ( ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess ) && ( result != VULKAN_HPP_NAMESPACE::Result::eIncompatibleShaderBinaryEXT ) )
{
# if defined( VULKAN_HPP_RAII_NO_EXCEPTIONS )
return VULKAN_HPP_UNEXPECTED( result );
# else
VULKAN_HPP_NAMESPACE::detail::throwResultException( result, "Device::createShaderEXT" );
# endif
}
return VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::ShaderEXT( *this, *reinterpret_cast<VkShaderEXT *>( &shader ), allocator, result );
}
VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector<uint8_t> ShaderEXT::getBinaryData() const
{
VULKAN_HPP_ASSERT( getDispatcher()->vkGetShaderBinaryDataEXT && "Function <vkGetShaderBinaryDataEXT> requires <VK_EXT_shader_object>" );
std::vector<uint8_t> data;
size_t dataSize;
VULKAN_HPP_NAMESPACE::Result result;
do
{
result = static_cast<VULKAN_HPP_NAMESPACE::Result>(
getDispatcher()->vkGetShaderBinaryDataEXT( static_cast<VkDevice>( m_device ), static_cast<VkShaderEXT>( m_shader ), &dataSize, nullptr ) );
if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) && dataSize )
{
data.resize( dataSize );
result = static_cast<VULKAN_HPP_NAMESPACE::Result>( getDispatcher()->vkGetShaderBinaryDataEXT(
static_cast<VkDevice>( m_device ), static_cast<VkShaderEXT>( m_shader ), &dataSize, reinterpret_cast<void *>( data.data() ) ) );
}
} while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete );
VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::ShaderEXT::getBinaryData" );
VULKAN_HPP_ASSERT( dataSize <= data.size() );
if ( dataSize < data.size() )
{
data.resize( dataSize );
}
return data;
}
VULKAN_HPP_INLINE void CommandBuffer::bindShadersEXT( VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::ShaderStageFlagBits> const & stages,
VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::ShaderEXT> const & shaders ) const
{
VULKAN_HPP_ASSERT( getDispatcher()->vkCmdBindShadersEXT && "Function <vkCmdBindShadersEXT> requires <VK_EXT_shader_object>" );
# ifdef VULKAN_HPP_NO_EXCEPTIONS
VULKAN_HPP_ASSERT( stages.size() == shaders.size() );
# else
if ( stages.size() != shaders.size() )
{
throw LogicError( VULKAN_HPP_NAMESPACE_STRING "::CommandBuffer::bindShadersEXT: stages.size() != shaders.size()" );
}
# endif /*VULKAN_HPP_NO_EXCEPTIONS*/
getDispatcher()->vkCmdBindShadersEXT( static_cast<VkCommandBuffer>( m_commandBuffer ),
stages.size(),
reinterpret_cast<const VkShaderStageFlagBits *>( stages.data() ),
reinterpret_cast<const VkShaderEXT *>( shaders.data() ) );
}
2024-10-05 00:12:51 +00:00
VULKAN_HPP_INLINE void
CommandBuffer::setDepthClampRangeEXT( VULKAN_HPP_NAMESPACE::DepthClampModeEXT depthClampMode,
Optional<const VULKAN_HPP_NAMESPACE::DepthClampRangeEXT> depthClampRange ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSetDepthClampRangeEXT &&
"Function <vkCmdSetDepthClampRangeEXT> requires <VK_EXT_depth_clamp_control> or <VK_EXT_shader_object>" );
getDispatcher()->vkCmdSetDepthClampRangeEXT(
static_cast<VkCommandBuffer>( m_commandBuffer ),
static_cast<VkDepthClampModeEXT>( depthClampMode ),
reinterpret_cast<const VkDepthClampRangeEXT *>( static_cast<const VULKAN_HPP_NAMESPACE::DepthClampRangeEXT *>( depthClampRange ) ) );
}
//=== VK_KHR_pipeline_binary ===
VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::CreateReturnType<
std::vector<VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::PipelineBinaryKHR>>::Type
Device::createPipelineBinariesKHR( VULKAN_HPP_NAMESPACE::PipelineBinaryCreateInfoKHR const & createInfo,
VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator ) const
{
std::vector<VULKAN_HPP_NAMESPACE::PipelineBinaryKHR> pipelineBinaries;
VULKAN_HPP_NAMESPACE::PipelineBinaryHandlesInfoKHR binaries;
VULKAN_HPP_NAMESPACE::Result result;
if ( createInfo.pKeysAndDataInfo )
{
VULKAN_HPP_ASSERT( !createInfo.pipeline && !createInfo.pPipelineCreateInfo );
pipelineBinaries.resize( createInfo.pKeysAndDataInfo->binaryCount );
binaries.pipelineBinaryCount = createInfo.pKeysAndDataInfo->binaryCount;
binaries.pPipelineBinaries = pipelineBinaries.data();
result = static_cast<VULKAN_HPP_NAMESPACE::Result>( getDispatcher()->vkCreatePipelineBinariesKHR(
static_cast<VkDevice>( m_device ),
reinterpret_cast<const VkPipelineBinaryCreateInfoKHR *>( &createInfo ),
reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
reinterpret_cast<VkPipelineBinaryHandlesInfoKHR *>( &binaries ) ) );
}
else
{
VULKAN_HPP_ASSERT( !createInfo.pipeline ^ !createInfo.pPipelineCreateInfo );
result = static_cast<VULKAN_HPP_NAMESPACE::Result>( getDispatcher()->vkCreatePipelineBinariesKHR(
static_cast<VkDevice>( m_device ),
reinterpret_cast<const VkPipelineBinaryCreateInfoKHR *>( &createInfo ),
reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
reinterpret_cast<VkPipelineBinaryHandlesInfoKHR *>( &binaries ) ) );
if ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess )
{
pipelineBinaries.resize( binaries.pipelineBinaryCount );
binaries.pPipelineBinaries = pipelineBinaries.data();
result = static_cast<VULKAN_HPP_NAMESPACE::Result>( getDispatcher()->vkCreatePipelineBinariesKHR(
static_cast<VkDevice>( m_device ),
reinterpret_cast<const VkPipelineBinaryCreateInfoKHR *>( &createInfo ),
reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
reinterpret_cast<VkPipelineBinaryHandlesInfoKHR *>( &binaries ) ) );
}
}
if ( ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess ) && ( result != VULKAN_HPP_NAMESPACE::Result::eIncomplete ) &&
( result != VULKAN_HPP_NAMESPACE::Result::ePipelineBinaryMissingKHR ) )
{
# if defined( VULKAN_HPP_RAII_NO_EXCEPTIONS )
return VULKAN_HPP_UNEXPECTED( result );
# else
VULKAN_HPP_NAMESPACE::detail::throwResultException( result, "Device::createPipelineBinariesKHR" );
# endif
}
std::vector<VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::PipelineBinaryKHR> pipelineBinariesRAII;
pipelineBinariesRAII.reserve( pipelineBinaries.size() );
for ( auto & pipelineBinary : pipelineBinaries )
{
pipelineBinariesRAII.emplace_back( *this, *reinterpret_cast<VkPipelineBinaryKHR *>( &pipelineBinary ), allocator, result );
}
return pipelineBinariesRAII;
}
VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::PipelineBinaryKeyKHR
Device::getPipelineKeyKHR( Optional<const VULKAN_HPP_NAMESPACE::PipelineCreateInfoKHR> pipelineCreateInfo ) const
{
VULKAN_HPP_ASSERT( getDispatcher()->vkGetPipelineKeyKHR && "Function <vkGetPipelineKeyKHR> requires <VK_KHR_pipeline_binary>" );
VULKAN_HPP_NAMESPACE::PipelineBinaryKeyKHR pipelineKey;
VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( getDispatcher()->vkGetPipelineKeyKHR(
static_cast<VkDevice>( m_device ),
reinterpret_cast<const VkPipelineCreateInfoKHR *>( static_cast<const VULKAN_HPP_NAMESPACE::PipelineCreateInfoKHR *>( pipelineCreateInfo ) ),
reinterpret_cast<VkPipelineBinaryKeyKHR *>( &pipelineKey ) ) );
VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getPipelineKeyKHR" );
return pipelineKey;
}
VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::pair<VULKAN_HPP_NAMESPACE::PipelineBinaryKeyKHR, std::vector<uint8_t>>
Device::getPipelineBinaryDataKHR( const VULKAN_HPP_NAMESPACE::PipelineBinaryDataInfoKHR & info ) const
{
VULKAN_HPP_ASSERT( getDispatcher()->vkGetPipelineBinaryDataKHR && "Function <vkGetPipelineBinaryDataKHR> requires <VK_KHR_pipeline_binary>" );
std::pair<VULKAN_HPP_NAMESPACE::PipelineBinaryKeyKHR, std::vector<uint8_t>> data_;
VULKAN_HPP_NAMESPACE::PipelineBinaryKeyKHR & pipelineBinaryKey = data_.first;
std::vector<uint8_t> & pipelineBinaryData = data_.second;
size_t pipelineBinaryDataSize;
VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>(
getDispatcher()->vkGetPipelineBinaryDataKHR( static_cast<VkDevice>( m_device ),
reinterpret_cast<const VkPipelineBinaryDataInfoKHR *>( &info ),
reinterpret_cast<VkPipelineBinaryKeyKHR *>( &pipelineBinaryKey ),
&pipelineBinaryDataSize,
nullptr ) );
if ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess )
{
pipelineBinaryData.resize( pipelineBinaryDataSize );
result = static_cast<VULKAN_HPP_NAMESPACE::Result>(
getDispatcher()->vkGetPipelineBinaryDataKHR( static_cast<VkDevice>( m_device ),
reinterpret_cast<const VkPipelineBinaryDataInfoKHR *>( &info ),
reinterpret_cast<VkPipelineBinaryKeyKHR *>( &pipelineBinaryKey ),
&pipelineBinaryDataSize,
reinterpret_cast<void *>( pipelineBinaryData.data() ) ) );
}
VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getPipelineBinaryDataKHR" );
return data_;
}
VULKAN_HPP_INLINE void
Device::releaseCapturedPipelineDataKHR( const VULKAN_HPP_NAMESPACE::ReleaseCapturedPipelineDataInfoKHR & info,
Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( getDispatcher()->vkReleaseCapturedPipelineDataKHR && "Function <vkReleaseCapturedPipelineDataKHR> requires <VK_KHR_pipeline_binary>" );
getDispatcher()->vkReleaseCapturedPipelineDataKHR(
static_cast<VkDevice>( m_device ),
reinterpret_cast<const VkReleaseCapturedPipelineDataInfoKHR *>( &info ),
reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) );
}
2022-07-22 00:12:13 +00:00
//=== VK_QCOM_tile_properties ===
VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector<VULKAN_HPP_NAMESPACE::TilePropertiesQCOM> Framebuffer::getTilePropertiesQCOM() const
{
VULKAN_HPP_ASSERT( getDispatcher()->vkGetFramebufferTilePropertiesQCOM &&
"Function <vkGetFramebufferTilePropertiesQCOM> requires <VK_QCOM_tile_properties>" );
2022-07-22 00:12:13 +00:00
std::vector<VULKAN_HPP_NAMESPACE::TilePropertiesQCOM> properties;
uint32_t propertiesCount;
VULKAN_HPP_NAMESPACE::Result result;
2022-07-22 00:12:13 +00:00
do
{
result = static_cast<VULKAN_HPP_NAMESPACE::Result>( getDispatcher()->vkGetFramebufferTilePropertiesQCOM(
static_cast<VkDevice>( m_device ), static_cast<VkFramebuffer>( m_framebuffer ), &propertiesCount, nullptr ) );
if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) && propertiesCount )
2022-07-22 00:12:13 +00:00
{
properties.resize( propertiesCount );
result = static_cast<VULKAN_HPP_NAMESPACE::Result>(
getDispatcher()->vkGetFramebufferTilePropertiesQCOM( static_cast<VkDevice>( m_device ),
static_cast<VkFramebuffer>( m_framebuffer ),
&propertiesCount,
reinterpret_cast<VkTilePropertiesQCOM *>( properties.data() ) ) );
2022-07-22 00:12:13 +00:00
}
} while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete );
2022-07-22 00:12:13 +00:00
VULKAN_HPP_ASSERT( propertiesCount <= properties.size() );
if ( propertiesCount < properties.size() )
{
properties.resize( propertiesCount );
}
return properties;
}
VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::TilePropertiesQCOM
Device::getDynamicRenderingTilePropertiesQCOM( const VULKAN_HPP_NAMESPACE::RenderingInfo & renderingInfo ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( getDispatcher()->vkGetDynamicRenderingTilePropertiesQCOM &&
"Function <vkGetDynamicRenderingTilePropertiesQCOM> requires <VK_QCOM_tile_properties>" );
2022-07-22 00:12:13 +00:00
VULKAN_HPP_NAMESPACE::TilePropertiesQCOM properties;
getDispatcher()->vkGetDynamicRenderingTilePropertiesQCOM( static_cast<VkDevice>( m_device ),
reinterpret_cast<const VkRenderingInfo *>( &renderingInfo ),
reinterpret_cast<VkTilePropertiesQCOM *>( &properties ) );
return properties;
}
//=== VK_NV_low_latency2 ===
VULKAN_HPP_INLINE void SwapchainKHR::setLatencySleepModeNV( const VULKAN_HPP_NAMESPACE::LatencySleepModeInfoNV & sleepModeInfo ) const
{
VULKAN_HPP_ASSERT( getDispatcher()->vkSetLatencySleepModeNV && "Function <vkSetLatencySleepModeNV> requires <VK_NV_low_latency2>" );
VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( getDispatcher()->vkSetLatencySleepModeNV(
static_cast<VkDevice>( m_device ), static_cast<VkSwapchainKHR>( m_swapchain ), reinterpret_cast<const VkLatencySleepModeInfoNV *>( &sleepModeInfo ) ) );
VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::SwapchainKHR::setLatencySleepModeNV" );
}
VULKAN_HPP_INLINE void SwapchainKHR::latencySleepNV( const VULKAN_HPP_NAMESPACE::LatencySleepInfoNV & sleepInfo ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( getDispatcher()->vkLatencySleepNV && "Function <vkLatencySleepNV> requires <VK_NV_low_latency2>" );
getDispatcher()->vkLatencySleepNV(
static_cast<VkDevice>( m_device ), static_cast<VkSwapchainKHR>( m_swapchain ), reinterpret_cast<const VkLatencySleepInfoNV *>( &sleepInfo ) );
}
VULKAN_HPP_INLINE void SwapchainKHR::setLatencyMarkerNV( const VULKAN_HPP_NAMESPACE::SetLatencyMarkerInfoNV & latencyMarkerInfo ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( getDispatcher()->vkSetLatencyMarkerNV && "Function <vkSetLatencyMarkerNV> requires <VK_NV_low_latency2>" );
getDispatcher()->vkSetLatencyMarkerNV( static_cast<VkDevice>( m_device ),
static_cast<VkSwapchainKHR>( m_swapchain ),
reinterpret_cast<const VkSetLatencyMarkerInfoNV *>( &latencyMarkerInfo ) );
}
VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector<VULKAN_HPP_NAMESPACE::LatencyTimingsFrameReportNV> SwapchainKHR::getLatencyTimingsNV() const
{
VULKAN_HPP_ASSERT( getDispatcher()->vkGetLatencyTimingsNV && "Function <vkGetLatencyTimingsNV> requires <VK_NV_low_latency2>" );
std::vector<VULKAN_HPP_NAMESPACE::LatencyTimingsFrameReportNV> timings;
VULKAN_HPP_NAMESPACE::GetLatencyMarkerInfoNV latencyMarkerInfo;
getDispatcher()->vkGetLatencyTimingsNV(
static_cast<VkDevice>( m_device ), static_cast<VkSwapchainKHR>( m_swapchain ), reinterpret_cast<VkGetLatencyMarkerInfoNV *>( &latencyMarkerInfo ) );
timings.resize( latencyMarkerInfo.timingCount );
latencyMarkerInfo.pTimings = timings.data();
2023-11-28 00:10:55 +00:00
getDispatcher()->vkGetLatencyTimingsNV(
static_cast<VkDevice>( m_device ), static_cast<VkSwapchainKHR>( m_swapchain ), reinterpret_cast<VkGetLatencyMarkerInfoNV *>( &latencyMarkerInfo ) );
return timings;
}
VULKAN_HPP_INLINE void Queue::notifyOutOfBandNV( const VULKAN_HPP_NAMESPACE::OutOfBandQueueTypeInfoNV & queueTypeInfo ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( getDispatcher()->vkQueueNotifyOutOfBandNV && "Function <vkQueueNotifyOutOfBandNV> requires <VK_NV_low_latency2>" );
getDispatcher()->vkQueueNotifyOutOfBandNV( static_cast<VkQueue>( m_queue ), reinterpret_cast<const VkOutOfBandQueueTypeInfoNV *>( &queueTypeInfo ) );
}
//=== VK_KHR_cooperative_matrix ===
VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector<VULKAN_HPP_NAMESPACE::CooperativeMatrixPropertiesKHR>
PhysicalDevice::getCooperativeMatrixPropertiesKHR() const
{
VULKAN_HPP_ASSERT( getDispatcher()->vkGetPhysicalDeviceCooperativeMatrixPropertiesKHR &&
"Function <vkGetPhysicalDeviceCooperativeMatrixPropertiesKHR> requires <VK_KHR_cooperative_matrix>" );
std::vector<VULKAN_HPP_NAMESPACE::CooperativeMatrixPropertiesKHR> properties;
uint32_t propertyCount;
VULKAN_HPP_NAMESPACE::Result result;
do
{
result = static_cast<VULKAN_HPP_NAMESPACE::Result>(
getDispatcher()->vkGetPhysicalDeviceCooperativeMatrixPropertiesKHR( static_cast<VkPhysicalDevice>( m_physicalDevice ), &propertyCount, nullptr ) );
if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) && propertyCount )
{
properties.resize( propertyCount );
result = static_cast<VULKAN_HPP_NAMESPACE::Result>( getDispatcher()->vkGetPhysicalDeviceCooperativeMatrixPropertiesKHR(
static_cast<VkPhysicalDevice>( m_physicalDevice ), &propertyCount, reinterpret_cast<VkCooperativeMatrixPropertiesKHR *>( properties.data() ) ) );
}
} while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete );
VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getCooperativeMatrixPropertiesKHR" );
VULKAN_HPP_ASSERT( propertyCount <= properties.size() );
if ( propertyCount < properties.size() )
{
properties.resize( propertyCount );
}
return properties;
}
//=== VK_EXT_attachment_feedback_loop_dynamic_state ===
VULKAN_HPP_INLINE void CommandBuffer::setAttachmentFeedbackLoopEnableEXT( VULKAN_HPP_NAMESPACE::ImageAspectFlags aspectMask ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSetAttachmentFeedbackLoopEnableEXT &&
"Function <vkCmdSetAttachmentFeedbackLoopEnableEXT> requires <VK_EXT_attachment_feedback_loop_dynamic_state>" );
getDispatcher()->vkCmdSetAttachmentFeedbackLoopEnableEXT( static_cast<VkCommandBuffer>( m_commandBuffer ),
static_cast<VkImageAspectFlags>( aspectMask ) );
}
# if defined( VK_USE_PLATFORM_SCREEN_QNX )
//=== VK_QNX_external_memory_screen_buffer ===
VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::ScreenBufferPropertiesQNX
Device::getScreenBufferPropertiesQNX( const struct _screen_buffer & buffer ) const
{
VULKAN_HPP_ASSERT( getDispatcher()->vkGetScreenBufferPropertiesQNX &&
"Function <vkGetScreenBufferPropertiesQNX> requires <VK_QNX_external_memory_screen_buffer>" );
VULKAN_HPP_NAMESPACE::ScreenBufferPropertiesQNX properties;
VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( getDispatcher()->vkGetScreenBufferPropertiesQNX(
static_cast<VkDevice>( m_device ), &buffer, reinterpret_cast<VkScreenBufferPropertiesQNX *>( &properties ) ) );
VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getScreenBufferPropertiesQNX" );
return properties;
}
template <typename X, typename Y, typename... Z>
VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::StructureChain<X, Y, Z...>
Device::getScreenBufferPropertiesQNX( const struct _screen_buffer & buffer ) const
{
VULKAN_HPP_ASSERT( getDispatcher()->vkGetScreenBufferPropertiesQNX &&
"Function <vkGetScreenBufferPropertiesQNX> requires <VK_QNX_external_memory_screen_buffer>" );
StructureChain<X, Y, Z...> structureChain;
VULKAN_HPP_NAMESPACE::ScreenBufferPropertiesQNX & properties = structureChain.template get<VULKAN_HPP_NAMESPACE::ScreenBufferPropertiesQNX>();
VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( getDispatcher()->vkGetScreenBufferPropertiesQNX(
static_cast<VkDevice>( m_device ), &buffer, reinterpret_cast<VkScreenBufferPropertiesQNX *>( &properties ) ) );
VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getScreenBufferPropertiesQNX" );
return structureChain;
}
# endif /*VK_USE_PLATFORM_SCREEN_QNX*/
//=== VK_KHR_line_rasterization ===
VULKAN_HPP_INLINE void CommandBuffer::setLineStippleKHR( uint32_t lineStippleFactor, uint16_t lineStipplePattern ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSetLineStippleKHR &&
"Function <vkCmdSetLineStippleKHR> requires <VK_EXT_line_rasterization> or <VK_KHR_line_rasterization>" );
getDispatcher()->vkCmdSetLineStippleKHR( static_cast<VkCommandBuffer>( m_commandBuffer ), lineStippleFactor, lineStipplePattern );
}
//=== VK_KHR_calibrated_timestamps ===
VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector<VULKAN_HPP_NAMESPACE::TimeDomainKHR> PhysicalDevice::getCalibrateableTimeDomainsKHR() const
{
VULKAN_HPP_ASSERT(
getDispatcher()->vkGetPhysicalDeviceCalibrateableTimeDomainsKHR &&
"Function <vkGetPhysicalDeviceCalibrateableTimeDomainsKHR> requires <VK_EXT_calibrated_timestamps> or <VK_KHR_calibrated_timestamps>" );
std::vector<VULKAN_HPP_NAMESPACE::TimeDomainKHR> timeDomains;
uint32_t timeDomainCount;
VULKAN_HPP_NAMESPACE::Result result;
do
{
result = static_cast<VULKAN_HPP_NAMESPACE::Result>(
getDispatcher()->vkGetPhysicalDeviceCalibrateableTimeDomainsKHR( static_cast<VkPhysicalDevice>( m_physicalDevice ), &timeDomainCount, nullptr ) );
if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) && timeDomainCount )
{
timeDomains.resize( timeDomainCount );
result = static_cast<VULKAN_HPP_NAMESPACE::Result>( getDispatcher()->vkGetPhysicalDeviceCalibrateableTimeDomainsKHR(
static_cast<VkPhysicalDevice>( m_physicalDevice ), &timeDomainCount, reinterpret_cast<VkTimeDomainKHR *>( timeDomains.data() ) ) );
}
} while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete );
VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getCalibrateableTimeDomainsKHR" );
VULKAN_HPP_ASSERT( timeDomainCount <= timeDomains.size() );
if ( timeDomainCount < timeDomains.size() )
{
timeDomains.resize( timeDomainCount );
}
return timeDomains;
}
VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::pair<std::vector<uint64_t>, uint64_t> Device::getCalibratedTimestampsKHR(
VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::CalibratedTimestampInfoKHR> const & timestampInfos ) const
{
VULKAN_HPP_ASSERT( getDispatcher()->vkGetCalibratedTimestampsKHR &&
"Function <vkGetCalibratedTimestampsKHR> requires <VK_EXT_calibrated_timestamps> or <VK_KHR_calibrated_timestamps>" );
std::pair<std::vector<uint64_t>, uint64_t> data_( std::piecewise_construct, std::forward_as_tuple( timestampInfos.size() ), std::forward_as_tuple( 0 ) );
std::vector<uint64_t> & timestamps = data_.first;
uint64_t & maxDeviation = data_.second;
VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>(
getDispatcher()->vkGetCalibratedTimestampsKHR( static_cast<VkDevice>( m_device ),
timestampInfos.size(),
reinterpret_cast<const VkCalibratedTimestampInfoKHR *>( timestampInfos.data() ),
timestamps.data(),
&maxDeviation ) );
VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getCalibratedTimestampsKHR" );
return data_;
}
VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::pair<uint64_t, uint64_t>
Device::getCalibratedTimestampKHR( const VULKAN_HPP_NAMESPACE::CalibratedTimestampInfoKHR & timestampInfo ) const
{
VULKAN_HPP_ASSERT( getDispatcher()->vkGetCalibratedTimestampsKHR &&
"Function <vkGetCalibratedTimestampsKHR> requires <VK_EXT_calibrated_timestamps> or <VK_KHR_calibrated_timestamps>" );
std::pair<uint64_t, uint64_t> data_;
uint64_t & timestamp = data_.first;
uint64_t & maxDeviation = data_.second;
VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( getDispatcher()->vkGetCalibratedTimestampsKHR(
static_cast<VkDevice>( m_device ), 1, reinterpret_cast<const VkCalibratedTimestampInfoKHR *>( &timestampInfo ), &timestamp, &maxDeviation ) );
VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getCalibratedTimestampKHR" );
return data_;
}
//=== VK_KHR_maintenance6 ===
VULKAN_HPP_INLINE void
CommandBuffer::bindDescriptorSets2KHR( const VULKAN_HPP_NAMESPACE::BindDescriptorSetsInfoKHR & bindDescriptorSetsInfo ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( getDispatcher()->vkCmdBindDescriptorSets2KHR && "Function <vkCmdBindDescriptorSets2KHR> requires <VK_KHR_maintenance6>" );
getDispatcher()->vkCmdBindDescriptorSets2KHR( static_cast<VkCommandBuffer>( m_commandBuffer ),
reinterpret_cast<const VkBindDescriptorSetsInfoKHR *>( &bindDescriptorSetsInfo ) );
}
VULKAN_HPP_INLINE void CommandBuffer::pushConstants2KHR( const VULKAN_HPP_NAMESPACE::PushConstantsInfoKHR & pushConstantsInfo ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( getDispatcher()->vkCmdPushConstants2KHR && "Function <vkCmdPushConstants2KHR> requires <VK_KHR_maintenance6>" );
getDispatcher()->vkCmdPushConstants2KHR( static_cast<VkCommandBuffer>( m_commandBuffer ),
reinterpret_cast<const VkPushConstantsInfoKHR *>( &pushConstantsInfo ) );
}
VULKAN_HPP_INLINE void
CommandBuffer::pushDescriptorSet2KHR( const VULKAN_HPP_NAMESPACE::PushDescriptorSetInfoKHR & pushDescriptorSetInfo ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( getDispatcher()->vkCmdPushDescriptorSet2KHR && "Function <vkCmdPushDescriptorSet2KHR> requires <VK_KHR_maintenance6>" );
getDispatcher()->vkCmdPushDescriptorSet2KHR( static_cast<VkCommandBuffer>( m_commandBuffer ),
reinterpret_cast<const VkPushDescriptorSetInfoKHR *>( &pushDescriptorSetInfo ) );
}
VULKAN_HPP_INLINE void CommandBuffer::pushDescriptorSetWithTemplate2KHR(
const VULKAN_HPP_NAMESPACE::PushDescriptorSetWithTemplateInfoKHR & pushDescriptorSetWithTemplateInfo ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( getDispatcher()->vkCmdPushDescriptorSetWithTemplate2KHR &&
"Function <vkCmdPushDescriptorSetWithTemplate2KHR> requires <VK_KHR_maintenance6>" );
getDispatcher()->vkCmdPushDescriptorSetWithTemplate2KHR(
static_cast<VkCommandBuffer>( m_commandBuffer ),
reinterpret_cast<const VkPushDescriptorSetWithTemplateInfoKHR *>( &pushDescriptorSetWithTemplateInfo ) );
}
VULKAN_HPP_INLINE void CommandBuffer::setDescriptorBufferOffsets2EXT(
const VULKAN_HPP_NAMESPACE::SetDescriptorBufferOffsetsInfoEXT & setDescriptorBufferOffsetsInfo ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSetDescriptorBufferOffsets2EXT &&
"Function <vkCmdSetDescriptorBufferOffsets2EXT> requires <VK_KHR_maintenance6>" );
getDispatcher()->vkCmdSetDescriptorBufferOffsets2EXT( static_cast<VkCommandBuffer>( m_commandBuffer ),
reinterpret_cast<const VkSetDescriptorBufferOffsetsInfoEXT *>( &setDescriptorBufferOffsetsInfo ) );
}
VULKAN_HPP_INLINE void CommandBuffer::bindDescriptorBufferEmbeddedSamplers2EXT(
const VULKAN_HPP_NAMESPACE::BindDescriptorBufferEmbeddedSamplersInfoEXT & bindDescriptorBufferEmbeddedSamplersInfo ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( getDispatcher()->vkCmdBindDescriptorBufferEmbeddedSamplers2EXT &&
"Function <vkCmdBindDescriptorBufferEmbeddedSamplers2EXT> requires <VK_KHR_maintenance6>" );
getDispatcher()->vkCmdBindDescriptorBufferEmbeddedSamplers2EXT(
static_cast<VkCommandBuffer>( m_commandBuffer ),
reinterpret_cast<const VkBindDescriptorBufferEmbeddedSamplersInfoEXT *>( &bindDescriptorBufferEmbeddedSamplersInfo ) );
}
2024-10-05 00:12:51 +00:00
//=== VK_EXT_device_generated_commands ===
VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::MemoryRequirements2 Device::getGeneratedCommandsMemoryRequirementsEXT(
const VULKAN_HPP_NAMESPACE::GeneratedCommandsMemoryRequirementsInfoEXT & info ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( getDispatcher()->vkGetGeneratedCommandsMemoryRequirementsEXT &&
"Function <vkGetGeneratedCommandsMemoryRequirementsEXT> requires <VK_EXT_device_generated_commands>" );
VULKAN_HPP_NAMESPACE::MemoryRequirements2 memoryRequirements;
getDispatcher()->vkGetGeneratedCommandsMemoryRequirementsEXT( static_cast<VkDevice>( m_device ),
reinterpret_cast<const VkGeneratedCommandsMemoryRequirementsInfoEXT *>( &info ),
reinterpret_cast<VkMemoryRequirements2 *>( &memoryRequirements ) );
return memoryRequirements;
}
template <typename X, typename Y, typename... Z>
VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::StructureChain<X, Y, Z...> Device::getGeneratedCommandsMemoryRequirementsEXT(
const VULKAN_HPP_NAMESPACE::GeneratedCommandsMemoryRequirementsInfoEXT & info ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( getDispatcher()->vkGetGeneratedCommandsMemoryRequirementsEXT &&
"Function <vkGetGeneratedCommandsMemoryRequirementsEXT> requires <VK_EXT_device_generated_commands>" );
VULKAN_HPP_NAMESPACE::StructureChain<X, Y, Z...> structureChain;
VULKAN_HPP_NAMESPACE::MemoryRequirements2 & memoryRequirements = structureChain.template get<VULKAN_HPP_NAMESPACE::MemoryRequirements2>();
getDispatcher()->vkGetGeneratedCommandsMemoryRequirementsEXT( static_cast<VkDevice>( m_device ),
reinterpret_cast<const VkGeneratedCommandsMemoryRequirementsInfoEXT *>( &info ),
reinterpret_cast<VkMemoryRequirements2 *>( &memoryRequirements ) );
return structureChain;
}
VULKAN_HPP_INLINE void CommandBuffer::preprocessGeneratedCommandsEXT( const VULKAN_HPP_NAMESPACE::GeneratedCommandsInfoEXT & generatedCommandsInfo,
VULKAN_HPP_NAMESPACE::CommandBuffer stateCommandBuffer ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( getDispatcher()->vkCmdPreprocessGeneratedCommandsEXT &&
"Function <vkCmdPreprocessGeneratedCommandsEXT> requires <VK_EXT_device_generated_commands>" );
getDispatcher()->vkCmdPreprocessGeneratedCommandsEXT( static_cast<VkCommandBuffer>( m_commandBuffer ),
reinterpret_cast<const VkGeneratedCommandsInfoEXT *>( &generatedCommandsInfo ),
static_cast<VkCommandBuffer>( stateCommandBuffer ) );
}
VULKAN_HPP_INLINE void
CommandBuffer::executeGeneratedCommandsEXT( VULKAN_HPP_NAMESPACE::Bool32 isPreprocessed,
const VULKAN_HPP_NAMESPACE::GeneratedCommandsInfoEXT & generatedCommandsInfo ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( getDispatcher()->vkCmdExecuteGeneratedCommandsEXT &&
"Function <vkCmdExecuteGeneratedCommandsEXT> requires <VK_EXT_device_generated_commands>" );
getDispatcher()->vkCmdExecuteGeneratedCommandsEXT( static_cast<VkCommandBuffer>( m_commandBuffer ),
static_cast<VkBool32>( isPreprocessed ),
reinterpret_cast<const VkGeneratedCommandsInfoEXT *>( &generatedCommandsInfo ) );
}
VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::CreateReturnType<
VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::IndirectCommandsLayoutEXT>::Type
2024-10-05 00:12:51 +00:00
Device::createIndirectCommandsLayoutEXT( VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutCreateInfoEXT const & createInfo,
VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator ) const
VULKAN_HPP_RAII_CREATE_NOEXCEPT
{
VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutEXT indirectCommandsLayout;
VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( getDispatcher()->vkCreateIndirectCommandsLayoutEXT(
static_cast<VkDevice>( m_device ),
reinterpret_cast<const VkIndirectCommandsLayoutCreateInfoEXT *>( &createInfo ),
reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
reinterpret_cast<VkIndirectCommandsLayoutEXT *>( &indirectCommandsLayout ) ) );
if ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess )
{
# if defined( VULKAN_HPP_RAII_NO_EXCEPTIONS )
return VULKAN_HPP_UNEXPECTED( result );
# else
VULKAN_HPP_NAMESPACE::detail::throwResultException( result, "Device::createIndirectCommandsLayoutEXT" );
# endif
}
return VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::IndirectCommandsLayoutEXT(
*this, *reinterpret_cast<VkIndirectCommandsLayoutEXT *>( &indirectCommandsLayout ), allocator );
}
VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE
VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::CreateReturnType<VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::IndirectExecutionSetEXT>::Type
2024-10-05 00:12:51 +00:00
Device::createIndirectExecutionSetEXT( VULKAN_HPP_NAMESPACE::IndirectExecutionSetCreateInfoEXT const & createInfo,
VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator ) const
VULKAN_HPP_RAII_CREATE_NOEXCEPT
{
VULKAN_HPP_NAMESPACE::IndirectExecutionSetEXT indirectExecutionSet;
VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( getDispatcher()->vkCreateIndirectExecutionSetEXT(
static_cast<VkDevice>( m_device ),
reinterpret_cast<const VkIndirectExecutionSetCreateInfoEXT *>( &createInfo ),
reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
reinterpret_cast<VkIndirectExecutionSetEXT *>( &indirectExecutionSet ) ) );
if ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess )
{
# if defined( VULKAN_HPP_RAII_NO_EXCEPTIONS )
return VULKAN_HPP_UNEXPECTED( result );
# else
VULKAN_HPP_NAMESPACE::detail::throwResultException( result, "Device::createIndirectExecutionSetEXT" );
# endif
}
return VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::IndirectExecutionSetEXT(
*this, *reinterpret_cast<VkIndirectExecutionSetEXT *>( &indirectExecutionSet ), allocator );
}
VULKAN_HPP_INLINE void IndirectExecutionSetEXT::updatePipeline(
VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::WriteIndirectExecutionSetPipelineEXT> const & executionSetWrites ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( getDispatcher()->vkUpdateIndirectExecutionSetPipelineEXT &&
"Function <vkUpdateIndirectExecutionSetPipelineEXT> requires <VK_EXT_device_generated_commands>" );
getDispatcher()->vkUpdateIndirectExecutionSetPipelineEXT( static_cast<VkDevice>( m_device ),
static_cast<VkIndirectExecutionSetEXT>( m_indirectExecutionSet ),
executionSetWrites.size(),
reinterpret_cast<const VkWriteIndirectExecutionSetPipelineEXT *>( executionSetWrites.data() ) );
}
VULKAN_HPP_INLINE void IndirectExecutionSetEXT::updateShader(
VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::WriteIndirectExecutionSetShaderEXT> const & executionSetWrites ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( getDispatcher()->vkUpdateIndirectExecutionSetShaderEXT &&
"Function <vkUpdateIndirectExecutionSetShaderEXT> requires <VK_EXT_device_generated_commands>" );
getDispatcher()->vkUpdateIndirectExecutionSetShaderEXT( static_cast<VkDevice>( m_device ),
static_cast<VkIndirectExecutionSetEXT>( m_indirectExecutionSet ),
executionSetWrites.size(),
reinterpret_cast<const VkWriteIndirectExecutionSetShaderEXT *>( executionSetWrites.data() ) );
}
//=== VK_NV_cooperative_matrix2 ===
VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector<VULKAN_HPP_NAMESPACE::CooperativeMatrixFlexibleDimensionsPropertiesNV>
PhysicalDevice::getCooperativeMatrixFlexibleDimensionsPropertiesNV() const
{
VULKAN_HPP_ASSERT( getDispatcher()->vkGetPhysicalDeviceCooperativeMatrixFlexibleDimensionsPropertiesNV &&
"Function <vkGetPhysicalDeviceCooperativeMatrixFlexibleDimensionsPropertiesNV> requires <VK_NV_cooperative_matrix2>" );
std::vector<VULKAN_HPP_NAMESPACE::CooperativeMatrixFlexibleDimensionsPropertiesNV> properties;
uint32_t propertyCount;
VULKAN_HPP_NAMESPACE::Result result;
do
{
result = static_cast<VULKAN_HPP_NAMESPACE::Result>( getDispatcher()->vkGetPhysicalDeviceCooperativeMatrixFlexibleDimensionsPropertiesNV(
static_cast<VkPhysicalDevice>( m_physicalDevice ), &propertyCount, nullptr ) );
if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) && propertyCount )
{
properties.resize( propertyCount );
result = static_cast<VULKAN_HPP_NAMESPACE::Result>( getDispatcher()->vkGetPhysicalDeviceCooperativeMatrixFlexibleDimensionsPropertiesNV(
static_cast<VkPhysicalDevice>( m_physicalDevice ),
&propertyCount,
reinterpret_cast<VkCooperativeMatrixFlexibleDimensionsPropertiesNV *>( properties.data() ) ) );
}
} while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete );
VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getCooperativeMatrixFlexibleDimensionsPropertiesNV" );
VULKAN_HPP_ASSERT( propertyCount <= properties.size() );
if ( propertyCount < properties.size() )
{
properties.resize( propertyCount );
}
return properties;
}
//====================
//=== RAII Helpers ===
//====================
template <typename RAIIType>
std::vector<typename RAIIType::CppType> filterCppTypes( std::vector<RAIIType> const & raiiTypes )
{
std::vector<typename RAIIType::CppType> cppTypes( raiiTypes.size() );
std::transform( raiiTypes.begin(), raiiTypes.end(), cppTypes.begin(), []( RAIIType const & d ) { return *d; } );
return cppTypes;
}
template <typename RAIIType, class UnaryPredicate>
std::vector<typename RAIIType::CppType> filterCppTypes( std::vector<RAIIType> const & raiiTypes, UnaryPredicate p )
{
std::vector<typename RAIIType::CppType> cppTypes;
for ( auto const & t : raiiTypes )
{
if ( p( t ) )
{
cppTypes.push_back( *t );
}
}
return cppTypes;
}
} // namespace VULKAN_HPP_RAII_NAMESPACE
} // namespace VULKAN_HPP_NAMESPACE
#endif
#endif