diff --git a/src/video/khronos/vulkan/vk_icd.h b/src/video/khronos/vulkan/vk_icd.h new file mode 100644 index 000000000..b935fa178 --- /dev/null +++ b/src/video/khronos/vulkan/vk_icd.h @@ -0,0 +1,170 @@ +// +// File: vk_icd.h +// +/* + * Copyright (c) 2015-2016 The Khronos Group Inc. + * Copyright (c) 2015-2016 Valve Corporation + * Copyright (c) 2015-2016 LunarG, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * + */ + +#ifndef VKICD_H +#define VKICD_H + +#include "vulkan.h" +#include + +// Loader-ICD version negotiation API. Versions add the following features: +// Version 0 - Initial. Doesn't support vk_icdGetInstanceProcAddr +// or vk_icdNegotiateLoaderICDInterfaceVersion. +// Version 1 - Add support for vk_icdGetInstanceProcAddr. +// Version 2 - Add Loader/ICD Interface version negotiation +// via vk_icdNegotiateLoaderICDInterfaceVersion. +// Version 3 - Add ICD creation/destruction of KHR_surface objects. +// Version 4 - Add unknown physical device extension qyering via +// vk_icdGetPhysicalDeviceProcAddr. +// Version 5 - Tells ICDs that the loader is now paying attention to the +// application version of Vulkan passed into the ApplicationInfo +// structure during vkCreateInstance. This will tell the ICD +// that if the loader is older, it should automatically fail a +// call for any API version > 1.0. Otherwise, the loader will +// manually determine if it can support the expected version. +#define CURRENT_LOADER_ICD_INTERFACE_VERSION 5 +#define MIN_SUPPORTED_LOADER_ICD_INTERFACE_VERSION 0 +#define MIN_PHYS_DEV_EXTENSION_ICD_INTERFACE_VERSION 4 +typedef VkResult(VKAPI_PTR *PFN_vkNegotiateLoaderICDInterfaceVersion)(uint32_t *pVersion); + +// This is defined in vk_layer.h which will be found by the loader, but if an ICD is building against this +// file directly, it won't be found. +#ifndef PFN_GetPhysicalDeviceProcAddr +typedef PFN_vkVoidFunction(VKAPI_PTR *PFN_GetPhysicalDeviceProcAddr)(VkInstance instance, const char *pName); +#endif + +/* + * The ICD must reserve space for a pointer for the loader's dispatch + * table, at the start of . + * The ICD must initialize this variable using the SET_LOADER_MAGIC_VALUE macro. + */ + +#define ICD_LOADER_MAGIC 0x01CDC0DE + +typedef union { + uintptr_t loaderMagic; + void *loaderData; +} VK_LOADER_DATA; + +static inline void set_loader_magic_value(void *pNewObject) { + VK_LOADER_DATA *loader_info = (VK_LOADER_DATA *)pNewObject; + loader_info->loaderMagic = ICD_LOADER_MAGIC; +} + +static inline bool valid_loader_magic_value(void *pNewObject) { + const VK_LOADER_DATA *loader_info = (VK_LOADER_DATA *)pNewObject; + return (loader_info->loaderMagic & 0xffffffff) == ICD_LOADER_MAGIC; +} + +/* + * Windows and Linux ICDs will treat VkSurfaceKHR as a pointer to a struct that + * contains the platform-specific connection and surface information. + */ +typedef enum { + VK_ICD_WSI_PLATFORM_MIR, + VK_ICD_WSI_PLATFORM_WAYLAND, + VK_ICD_WSI_PLATFORM_WIN32, + VK_ICD_WSI_PLATFORM_XCB, + VK_ICD_WSI_PLATFORM_XLIB, + VK_ICD_WSI_PLATFORM_ANDROID, + VK_ICD_WSI_PLATFORM_MACOS, + VK_ICD_WSI_PLATFORM_IOS, + VK_ICD_WSI_PLATFORM_DISPLAY +} VkIcdWsiPlatform; + +typedef struct { + VkIcdWsiPlatform platform; +} VkIcdSurfaceBase; + +#ifdef VK_USE_PLATFORM_MIR_KHR +typedef struct { + VkIcdSurfaceBase base; + MirConnection *connection; + MirSurface *mirSurface; +} VkIcdSurfaceMir; +#endif // VK_USE_PLATFORM_MIR_KHR + +#ifdef VK_USE_PLATFORM_WAYLAND_KHR +typedef struct { + VkIcdSurfaceBase base; + struct wl_display *display; + struct wl_surface *surface; +} VkIcdSurfaceWayland; +#endif // VK_USE_PLATFORM_WAYLAND_KHR + +#ifdef VK_USE_PLATFORM_WIN32_KHR +typedef struct { + VkIcdSurfaceBase base; + HINSTANCE hinstance; + HWND hwnd; +} VkIcdSurfaceWin32; +#endif // VK_USE_PLATFORM_WIN32_KHR + +#ifdef VK_USE_PLATFORM_XCB_KHR +typedef struct { + VkIcdSurfaceBase base; + xcb_connection_t *connection; + xcb_window_t window; +} VkIcdSurfaceXcb; +#endif // VK_USE_PLATFORM_XCB_KHR + +#ifdef VK_USE_PLATFORM_XLIB_KHR +typedef struct { + VkIcdSurfaceBase base; + Display *dpy; + Window window; +} VkIcdSurfaceXlib; +#endif // VK_USE_PLATFORM_XLIB_KHR + +#ifdef VK_USE_PLATFORM_ANDROID_KHR +typedef struct { + VkIcdSurfaceBase base; + struct ANativeWindow *window; +} VkIcdSurfaceAndroid; +#endif // VK_USE_PLATFORM_ANDROID_KHR + +#ifdef VK_USE_PLATFORM_MACOS_MVK +typedef struct { + VkIcdSurfaceBase base; + const void *pView; +} VkIcdSurfaceMacOS; +#endif // VK_USE_PLATFORM_MACOS_MVK + +#ifdef VK_USE_PLATFORM_IOS_MVK +typedef struct { + VkIcdSurfaceBase base; + const void *pView; +} VkIcdSurfaceIOS; +#endif // VK_USE_PLATFORM_IOS_MVK + +typedef struct { + VkIcdSurfaceBase base; + VkDisplayModeKHR displayMode; + uint32_t planeIndex; + uint32_t planeStackIndex; + VkSurfaceTransformFlagBitsKHR transform; + float globalAlpha; + VkDisplayPlaneAlphaFlagBitsKHR alphaMode; + VkExtent2D imageExtent; +} VkIcdSurfaceDisplay; + +#endif // VKICD_H diff --git a/src/video/khronos/vulkan/vk_layer.h b/src/video/khronos/vulkan/vk_layer.h new file mode 100644 index 000000000..823c88ab7 --- /dev/null +++ b/src/video/khronos/vulkan/vk_layer.h @@ -0,0 +1,195 @@ +// +// File: vk_layer.h +// +/* + * Copyright (c) 2015-2017 The Khronos Group Inc. + * Copyright (c) 2015-2017 Valve Corporation + * Copyright (c) 2015-2017 LunarG, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * + */ + +/* Need to define dispatch table + * Core struct can then have ptr to dispatch table at the top + * Along with object ptrs for current and next OBJ + */ +#pragma once + +#include "vulkan.h" +#if defined(__GNUC__) && __GNUC__ >= 4 +#define VK_LAYER_EXPORT __attribute__((visibility("default"))) +#elif defined(__SUNPRO_C) && (__SUNPRO_C >= 0x590) +#define VK_LAYER_EXPORT __attribute__((visibility("default"))) +#else +#define VK_LAYER_EXPORT +#endif + +#define MAX_NUM_UNKNOWN_EXTS 250 + + // Loader-Layer version negotiation API. Versions add the following features: + // Versions 0/1 - Initial. Doesn't support vk_layerGetPhysicalDeviceProcAddr + // or vk_icdNegotiateLoaderLayerInterfaceVersion. + // Version 2 - Add support for vk_layerGetPhysicalDeviceProcAddr and + // vk_icdNegotiateLoaderLayerInterfaceVersion. +#define CURRENT_LOADER_LAYER_INTERFACE_VERSION 2 +#define MIN_SUPPORTED_LOADER_LAYER_INTERFACE_VERSION 1 + +#define VK_CURRENT_CHAIN_VERSION 1 + +// Typedef for use in the interfaces below +typedef PFN_vkVoidFunction (VKAPI_PTR *PFN_GetPhysicalDeviceProcAddr)(VkInstance instance, const char* pName); + +// Version negotiation values +typedef enum VkNegotiateLayerStructType { + LAYER_NEGOTIATE_UNINTIALIZED = 0, + LAYER_NEGOTIATE_INTERFACE_STRUCT = 1, +} VkNegotiateLayerStructType; + +// Version negotiation structures +typedef struct VkNegotiateLayerInterface { + VkNegotiateLayerStructType sType; + void *pNext; + uint32_t loaderLayerInterfaceVersion; + PFN_vkGetInstanceProcAddr pfnGetInstanceProcAddr; + PFN_vkGetDeviceProcAddr pfnGetDeviceProcAddr; + PFN_GetPhysicalDeviceProcAddr pfnGetPhysicalDeviceProcAddr; +} VkNegotiateLayerInterface; + +// Version negotiation functions +typedef VkResult (VKAPI_PTR *PFN_vkNegotiateLoaderLayerInterfaceVersion)(VkNegotiateLayerInterface *pVersionStruct); + +// Function prototype for unknown physical device extension command +typedef VkResult(VKAPI_PTR *PFN_PhysDevExt)(VkPhysicalDevice phys_device); + +// ------------------------------------------------------------------------------------------------ +// CreateInstance and CreateDevice support structures + +/* Sub type of structure for instance and device loader ext of CreateInfo. + * When sType == VK_STRUCTURE_TYPE_LOADER_INSTANCE_CREATE_INFO + * or sType == VK_STRUCTURE_TYPE_LOADER_DEVICE_CREATE_INFO + * then VkLayerFunction indicates struct type pointed to by pNext + */ +typedef enum VkLayerFunction_ { + VK_LAYER_LINK_INFO = 0, + VK_LOADER_DATA_CALLBACK = 1 +} VkLayerFunction; + +typedef struct VkLayerInstanceLink_ { + struct VkLayerInstanceLink_ *pNext; + PFN_vkGetInstanceProcAddr pfnNextGetInstanceProcAddr; + PFN_GetPhysicalDeviceProcAddr pfnNextGetPhysicalDeviceProcAddr; +} VkLayerInstanceLink; + +/* + * When creating the device chain the loader needs to pass + * down information about it's device structure needed at + * the end of the chain. Passing the data via the + * VkLayerDeviceInfo avoids issues with finding the + * exact instance being used. + */ +typedef struct VkLayerDeviceInfo_ { + void *device_info; + PFN_vkGetInstanceProcAddr pfnNextGetInstanceProcAddr; +} VkLayerDeviceInfo; + +typedef VkResult (VKAPI_PTR *PFN_vkSetInstanceLoaderData)(VkInstance instance, + void *object); +typedef VkResult (VKAPI_PTR *PFN_vkSetDeviceLoaderData)(VkDevice device, + void *object); + +typedef struct { + VkStructureType sType; // VK_STRUCTURE_TYPE_LOADER_INSTANCE_CREATE_INFO + const void *pNext; + VkLayerFunction function; + union { + VkLayerInstanceLink *pLayerInfo; + PFN_vkSetInstanceLoaderData pfnSetInstanceLoaderData; + } u; +} VkLayerInstanceCreateInfo; + +typedef struct VkLayerDeviceLink_ { + struct VkLayerDeviceLink_ *pNext; + PFN_vkGetInstanceProcAddr pfnNextGetInstanceProcAddr; + PFN_vkGetDeviceProcAddr pfnNextGetDeviceProcAddr; +} VkLayerDeviceLink; + +typedef struct { + VkStructureType sType; // VK_STRUCTURE_TYPE_LOADER_DEVICE_CREATE_INFO + const void *pNext; + VkLayerFunction function; + union { + VkLayerDeviceLink *pLayerInfo; + PFN_vkSetDeviceLoaderData pfnSetDeviceLoaderData; + } u; +} VkLayerDeviceCreateInfo; + +#ifdef __cplusplus +extern "C" { +#endif + +VKAPI_ATTR VkResult VKAPI_CALL vkNegotiateLoaderLayerInterfaceVersion(VkNegotiateLayerInterface *pVersionStruct); + +typedef enum VkChainType { + VK_CHAIN_TYPE_UNKNOWN = 0, + VK_CHAIN_TYPE_ENUMERATE_INSTANCE_EXTENSION_PROPERTIES = 1, + VK_CHAIN_TYPE_ENUMERATE_INSTANCE_LAYER_PROPERTIES = 2, + VK_CHAIN_TYPE_ENUMERATE_INSTANCE_VERSION = 3, +} VkChainType; + +typedef struct VkChainHeader { + VkChainType type; + uint32_t version; + uint32_t size; +} VkChainHeader; + +typedef struct VkEnumerateInstanceExtensionPropertiesChain { + VkChainHeader header; + VkResult(VKAPI_PTR *pfnNextLayer)(const struct VkEnumerateInstanceExtensionPropertiesChain *, const char *, uint32_t *, + VkExtensionProperties *); + const struct VkEnumerateInstanceExtensionPropertiesChain *pNextLink; + +#if defined(__cplusplus) + inline VkResult CallDown(const char *pLayerName, uint32_t *pPropertyCount, VkExtensionProperties *pProperties) const { + return pfnNextLayer(pNextLink, pLayerName, pPropertyCount, pProperties); + } +#endif +} VkEnumerateInstanceExtensionPropertiesChain; + +typedef struct VkEnumerateInstanceLayerPropertiesChain { + VkChainHeader header; + VkResult(VKAPI_PTR *pfnNextLayer)(const struct VkEnumerateInstanceLayerPropertiesChain *, uint32_t *, VkLayerProperties *); + const struct VkEnumerateInstanceLayerPropertiesChain *pNextLink; + +#if defined(__cplusplus) + inline VkResult CallDown(uint32_t *pPropertyCount, VkLayerProperties *pProperties) const { + return pfnNextLayer(pNextLink, pPropertyCount, pProperties); + } +#endif +} VkEnumerateInstanceLayerPropertiesChain; + +typedef struct VkEnumerateInstanceVersionChain { + VkChainHeader header; + VkResult(VKAPI_PTR *pfnNextLayer)(const struct VkEnumerateInstanceVersionChain *, uint32_t *); + const struct VkEnumerateInstanceVersionChain *pNextLink; + +#if defined(__cplusplus) + inline VkResult CallDown(uint32_t *pApiVersion) const { + return pfnNextLayer(pNextLink, pApiVersion); + } +#endif +} VkEnumerateInstanceVersionChain; + +#ifdef __cplusplus +} +#endif diff --git a/src/video/khronos/vulkan/vk_platform.h b/src/video/khronos/vulkan/vk_platform.h index 72f80493c..728929924 100644 --- a/src/video/khronos/vulkan/vk_platform.h +++ b/src/video/khronos/vulkan/vk_platform.h @@ -89,32 +89,4 @@ extern "C" } // extern "C" #endif // __cplusplus -// Platform-specific headers required by platform window system extensions. -// These are enabled prior to #including "vulkan.h". The same enable then -// controls inclusion of the extension interfaces in vulkan.h. - -#ifdef VK_USE_PLATFORM_ANDROID_KHR -#include -#endif - -#ifdef VK_USE_PLATFORM_MIR_KHR -#include -#endif - -#ifdef VK_USE_PLATFORM_WAYLAND_KHR -#include -#endif - -#ifdef VK_USE_PLATFORM_WIN32_KHR -#include -#endif - -#ifdef VK_USE_PLATFORM_XLIB_KHR -#include -#endif - -#ifdef VK_USE_PLATFORM_XCB_KHR -#include -#endif - #endif diff --git a/src/video/khronos/vulkan/vk_sdk_platform.h b/src/video/khronos/vulkan/vk_sdk_platform.h new file mode 100644 index 000000000..96d867694 --- /dev/null +++ b/src/video/khronos/vulkan/vk_sdk_platform.h @@ -0,0 +1,69 @@ +// +// File: vk_sdk_platform.h +// +/* + * Copyright (c) 2015-2016 The Khronos Group Inc. + * Copyright (c) 2015-2016 Valve Corporation + * Copyright (c) 2015-2016 LunarG, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef VK_SDK_PLATFORM_H +#define VK_SDK_PLATFORM_H + +#if defined(_WIN32) +#define NOMINMAX +#ifndef __cplusplus +#undef inline +#define inline __inline +#endif // __cplusplus + +#if (defined(_MSC_VER) && _MSC_VER < 1900 /*vs2015*/) +// C99: +// Microsoft didn't implement C99 in Visual Studio; but started adding it with +// VS2013. However, VS2013 still didn't have snprintf(). The following is a +// work-around (Note: The _CRT_SECURE_NO_WARNINGS macro must be set in the +// "CMakeLists.txt" file). +// NOTE: This is fixed in Visual Studio 2015. +#define snprintf _snprintf +#endif + +#define strdup _strdup + +#endif // _WIN32 + +// Check for noexcept support using clang, with fallback to Windows or GCC version numbers +#ifndef NOEXCEPT +#if defined(__clang__) +#if __has_feature(cxx_noexcept) +#define HAS_NOEXCEPT +#endif +#else +#if defined(__GXX_EXPERIMENTAL_CXX0X__) && __GNUC__ * 10 + __GNUC_MINOR__ >= 46 +#define HAS_NOEXCEPT +#else +#if defined(_MSC_FULL_VER) && _MSC_FULL_VER >= 190023026 && defined(_HAS_EXCEPTIONS) && _HAS_EXCEPTIONS +#define HAS_NOEXCEPT +#endif +#endif +#endif + +#ifdef HAS_NOEXCEPT +#define NOEXCEPT noexcept +#else +#define NOEXCEPT +#endif +#endif + +#endif // VK_SDK_PLATFORM_H diff --git a/src/video/khronos/vulkan/vulkan.h b/src/video/khronos/vulkan/vulkan.h index 04495fa0c..77da63783 100644 --- a/src/video/khronos/vulkan/vulkan.h +++ b/src/video/khronos/vulkan/vulkan.h @@ -1,12 +1,8 @@ #ifndef VULKAN_H_ #define VULKAN_H_ 1 -#ifdef __cplusplus -extern "C" { -#endif - /* -** Copyright (c) 2015-2017 The Khronos Group Inc. +** Copyright (c) 2015-2018 The Khronos Group Inc. ** ** Licensed under the Apache License, Version 2.0 (the "License"); ** you may not use this file except in compliance with the License. @@ -21,6438 +17,61 @@ extern "C" { ** limitations under the License. */ -/* -** This header is generated from the Khronos Vulkan XML API Registry. -** -*/ - - -#define VK_VERSION_1_0 1 -#include "./vk_platform.h" - -#define VK_MAKE_VERSION(major, minor, patch) \ - (((major) << 22) | ((minor) << 12) | (patch)) - -// DEPRECATED: This define has been removed. Specific version defines (e.g. VK_API_VERSION_1_0), or the VK_MAKE_VERSION macro, should be used instead. -//#define VK_API_VERSION VK_MAKE_VERSION(1, 0, 0) // Patch version should always be set to 0 - -// Vulkan 1.0 version number -#define VK_API_VERSION_1_0 VK_MAKE_VERSION(1, 0, 0)// Patch version should always be set to 0 - -#define VK_VERSION_MAJOR(version) ((uint32_t)(version) >> 22) -#define VK_VERSION_MINOR(version) (((uint32_t)(version) >> 12) & 0x3ff) -#define VK_VERSION_PATCH(version) ((uint32_t)(version) & 0xfff) -// Version of this file -#define VK_HEADER_VERSION 59 - - -#define VK_NULL_HANDLE 0 - - - -#define VK_DEFINE_HANDLE(object) typedef struct object##_T* object; - - -#if !defined(VK_DEFINE_NON_DISPATCHABLE_HANDLE) -#if defined(__LP64__) || defined(_WIN64) || (defined(__x86_64__) && !defined(__ILP32__) ) || defined(_M_X64) || defined(__ia64) || defined (_M_IA64) || defined(__aarch64__) || defined(__powerpc64__) - #define VK_DEFINE_NON_DISPATCHABLE_HANDLE(object) typedef struct object##_T *object; -#else - #define VK_DEFINE_NON_DISPATCHABLE_HANDLE(object) typedef uint64_t object; -#endif -#endif - - - -typedef uint32_t VkFlags; -typedef uint32_t VkBool32; -typedef uint64_t VkDeviceSize; -typedef uint32_t VkSampleMask; - -VK_DEFINE_HANDLE(VkInstance) -VK_DEFINE_HANDLE(VkPhysicalDevice) -VK_DEFINE_HANDLE(VkDevice) -VK_DEFINE_HANDLE(VkQueue) -VK_DEFINE_NON_DISPATCHABLE_HANDLE(VkSemaphore) -VK_DEFINE_HANDLE(VkCommandBuffer) -VK_DEFINE_NON_DISPATCHABLE_HANDLE(VkFence) -VK_DEFINE_NON_DISPATCHABLE_HANDLE(VkDeviceMemory) -VK_DEFINE_NON_DISPATCHABLE_HANDLE(VkBuffer) -VK_DEFINE_NON_DISPATCHABLE_HANDLE(VkImage) -VK_DEFINE_NON_DISPATCHABLE_HANDLE(VkEvent) -VK_DEFINE_NON_DISPATCHABLE_HANDLE(VkQueryPool) -VK_DEFINE_NON_DISPATCHABLE_HANDLE(VkBufferView) -VK_DEFINE_NON_DISPATCHABLE_HANDLE(VkImageView) -VK_DEFINE_NON_DISPATCHABLE_HANDLE(VkShaderModule) -VK_DEFINE_NON_DISPATCHABLE_HANDLE(VkPipelineCache) -VK_DEFINE_NON_DISPATCHABLE_HANDLE(VkPipelineLayout) -VK_DEFINE_NON_DISPATCHABLE_HANDLE(VkRenderPass) -VK_DEFINE_NON_DISPATCHABLE_HANDLE(VkPipeline) -VK_DEFINE_NON_DISPATCHABLE_HANDLE(VkDescriptorSetLayout) -VK_DEFINE_NON_DISPATCHABLE_HANDLE(VkSampler) -VK_DEFINE_NON_DISPATCHABLE_HANDLE(VkDescriptorPool) -VK_DEFINE_NON_DISPATCHABLE_HANDLE(VkDescriptorSet) -VK_DEFINE_NON_DISPATCHABLE_HANDLE(VkFramebuffer) -VK_DEFINE_NON_DISPATCHABLE_HANDLE(VkCommandPool) - -#define VK_LOD_CLAMP_NONE 1000.0f -#define VK_REMAINING_MIP_LEVELS (~0U) -#define VK_REMAINING_ARRAY_LAYERS (~0U) -#define VK_WHOLE_SIZE (~0ULL) -#define VK_ATTACHMENT_UNUSED (~0U) -#define VK_TRUE 1 -#define VK_FALSE 0 -#define VK_QUEUE_FAMILY_IGNORED (~0U) -#define VK_SUBPASS_EXTERNAL (~0U) -#define VK_MAX_PHYSICAL_DEVICE_NAME_SIZE 256 -#define VK_UUID_SIZE 16 -#define VK_MAX_MEMORY_TYPES 32 -#define VK_MAX_MEMORY_HEAPS 16 -#define VK_MAX_EXTENSION_NAME_SIZE 256 -#define VK_MAX_DESCRIPTION_SIZE 256 - - -typedef enum VkPipelineCacheHeaderVersion { - VK_PIPELINE_CACHE_HEADER_VERSION_ONE = 1, - VK_PIPELINE_CACHE_HEADER_VERSION_BEGIN_RANGE = VK_PIPELINE_CACHE_HEADER_VERSION_ONE, - VK_PIPELINE_CACHE_HEADER_VERSION_END_RANGE = VK_PIPELINE_CACHE_HEADER_VERSION_ONE, - VK_PIPELINE_CACHE_HEADER_VERSION_RANGE_SIZE = (VK_PIPELINE_CACHE_HEADER_VERSION_ONE - VK_PIPELINE_CACHE_HEADER_VERSION_ONE + 1), - VK_PIPELINE_CACHE_HEADER_VERSION_MAX_ENUM = 0x7FFFFFFF -} VkPipelineCacheHeaderVersion; - -typedef enum VkResult { - VK_SUCCESS = 0, - VK_NOT_READY = 1, - VK_TIMEOUT = 2, - VK_EVENT_SET = 3, - VK_EVENT_RESET = 4, - VK_INCOMPLETE = 5, - VK_ERROR_OUT_OF_HOST_MEMORY = -1, - VK_ERROR_OUT_OF_DEVICE_MEMORY = -2, - VK_ERROR_INITIALIZATION_FAILED = -3, - VK_ERROR_DEVICE_LOST = -4, - VK_ERROR_MEMORY_MAP_FAILED = -5, - VK_ERROR_LAYER_NOT_PRESENT = -6, - VK_ERROR_EXTENSION_NOT_PRESENT = -7, - VK_ERROR_FEATURE_NOT_PRESENT = -8, - VK_ERROR_INCOMPATIBLE_DRIVER = -9, - VK_ERROR_TOO_MANY_OBJECTS = -10, - VK_ERROR_FORMAT_NOT_SUPPORTED = -11, - VK_ERROR_FRAGMENTED_POOL = -12, - VK_ERROR_SURFACE_LOST_KHR = -1000000000, - VK_ERROR_NATIVE_WINDOW_IN_USE_KHR = -1000000001, - VK_SUBOPTIMAL_KHR = 1000001003, - VK_ERROR_OUT_OF_DATE_KHR = -1000001004, - VK_ERROR_INCOMPATIBLE_DISPLAY_KHR = -1000003001, - VK_ERROR_VALIDATION_FAILED_EXT = -1000011001, - VK_ERROR_INVALID_SHADER_NV = -1000012000, - VK_ERROR_OUT_OF_POOL_MEMORY_KHR = -1000069000, - VK_ERROR_INVALID_EXTERNAL_HANDLE_KHR = -1000072003, - VK_RESULT_BEGIN_RANGE = VK_ERROR_FRAGMENTED_POOL, - VK_RESULT_END_RANGE = VK_INCOMPLETE, - VK_RESULT_RANGE_SIZE = (VK_INCOMPLETE - VK_ERROR_FRAGMENTED_POOL + 1), - VK_RESULT_MAX_ENUM = 0x7FFFFFFF -} VkResult; - -typedef enum VkStructureType { - VK_STRUCTURE_TYPE_APPLICATION_INFO = 0, - VK_STRUCTURE_TYPE_INSTANCE_CREATE_INFO = 1, - VK_STRUCTURE_TYPE_DEVICE_QUEUE_CREATE_INFO = 2, - VK_STRUCTURE_TYPE_DEVICE_CREATE_INFO = 3, - VK_STRUCTURE_TYPE_SUBMIT_INFO = 4, - VK_STRUCTURE_TYPE_MEMORY_ALLOCATE_INFO = 5, - VK_STRUCTURE_TYPE_MAPPED_MEMORY_RANGE = 6, - VK_STRUCTURE_TYPE_BIND_SPARSE_INFO = 7, - VK_STRUCTURE_TYPE_FENCE_CREATE_INFO = 8, - VK_STRUCTURE_TYPE_SEMAPHORE_CREATE_INFO = 9, - VK_STRUCTURE_TYPE_EVENT_CREATE_INFO = 10, - VK_STRUCTURE_TYPE_QUERY_POOL_CREATE_INFO = 11, - VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO = 12, - VK_STRUCTURE_TYPE_BUFFER_VIEW_CREATE_INFO = 13, - VK_STRUCTURE_TYPE_IMAGE_CREATE_INFO = 14, - VK_STRUCTURE_TYPE_IMAGE_VIEW_CREATE_INFO = 15, - VK_STRUCTURE_TYPE_SHADER_MODULE_CREATE_INFO = 16, - VK_STRUCTURE_TYPE_PIPELINE_CACHE_CREATE_INFO = 17, - VK_STRUCTURE_TYPE_PIPELINE_SHADER_STAGE_CREATE_INFO = 18, - VK_STRUCTURE_TYPE_PIPELINE_VERTEX_INPUT_STATE_CREATE_INFO = 19, - VK_STRUCTURE_TYPE_PIPELINE_INPUT_ASSEMBLY_STATE_CREATE_INFO = 20, - VK_STRUCTURE_TYPE_PIPELINE_TESSELLATION_STATE_CREATE_INFO = 21, - VK_STRUCTURE_TYPE_PIPELINE_VIEWPORT_STATE_CREATE_INFO = 22, - VK_STRUCTURE_TYPE_PIPELINE_RASTERIZATION_STATE_CREATE_INFO = 23, - VK_STRUCTURE_TYPE_PIPELINE_MULTISAMPLE_STATE_CREATE_INFO = 24, - VK_STRUCTURE_TYPE_PIPELINE_DEPTH_STENCIL_STATE_CREATE_INFO = 25, - VK_STRUCTURE_TYPE_PIPELINE_COLOR_BLEND_STATE_CREATE_INFO = 26, - VK_STRUCTURE_TYPE_PIPELINE_DYNAMIC_STATE_CREATE_INFO = 27, - VK_STRUCTURE_TYPE_GRAPHICS_PIPELINE_CREATE_INFO = 28, - VK_STRUCTURE_TYPE_COMPUTE_PIPELINE_CREATE_INFO = 29, - VK_STRUCTURE_TYPE_PIPELINE_LAYOUT_CREATE_INFO = 30, - VK_STRUCTURE_TYPE_SAMPLER_CREATE_INFO = 31, - VK_STRUCTURE_TYPE_DESCRIPTOR_SET_LAYOUT_CREATE_INFO = 32, - VK_STRUCTURE_TYPE_DESCRIPTOR_POOL_CREATE_INFO = 33, - VK_STRUCTURE_TYPE_DESCRIPTOR_SET_ALLOCATE_INFO = 34, - VK_STRUCTURE_TYPE_WRITE_DESCRIPTOR_SET = 35, - VK_STRUCTURE_TYPE_COPY_DESCRIPTOR_SET = 36, - VK_STRUCTURE_TYPE_FRAMEBUFFER_CREATE_INFO = 37, - VK_STRUCTURE_TYPE_RENDER_PASS_CREATE_INFO = 38, - VK_STRUCTURE_TYPE_COMMAND_POOL_CREATE_INFO = 39, - VK_STRUCTURE_TYPE_COMMAND_BUFFER_ALLOCATE_INFO = 40, - VK_STRUCTURE_TYPE_COMMAND_BUFFER_INHERITANCE_INFO = 41, - VK_STRUCTURE_TYPE_COMMAND_BUFFER_BEGIN_INFO = 42, - VK_STRUCTURE_TYPE_RENDER_PASS_BEGIN_INFO = 43, - VK_STRUCTURE_TYPE_BUFFER_MEMORY_BARRIER = 44, - VK_STRUCTURE_TYPE_IMAGE_MEMORY_BARRIER = 45, - VK_STRUCTURE_TYPE_MEMORY_BARRIER = 46, - VK_STRUCTURE_TYPE_LOADER_INSTANCE_CREATE_INFO = 47, - VK_STRUCTURE_TYPE_LOADER_DEVICE_CREATE_INFO = 48, - VK_STRUCTURE_TYPE_SWAPCHAIN_CREATE_INFO_KHR = 1000001000, - VK_STRUCTURE_TYPE_PRESENT_INFO_KHR = 1000001001, - VK_STRUCTURE_TYPE_DISPLAY_MODE_CREATE_INFO_KHR = 1000002000, - VK_STRUCTURE_TYPE_DISPLAY_SURFACE_CREATE_INFO_KHR = 1000002001, - VK_STRUCTURE_TYPE_DISPLAY_PRESENT_INFO_KHR = 1000003000, - VK_STRUCTURE_TYPE_XLIB_SURFACE_CREATE_INFO_KHR = 1000004000, - VK_STRUCTURE_TYPE_XCB_SURFACE_CREATE_INFO_KHR = 1000005000, - VK_STRUCTURE_TYPE_WAYLAND_SURFACE_CREATE_INFO_KHR = 1000006000, - VK_STRUCTURE_TYPE_MIR_SURFACE_CREATE_INFO_KHR = 1000007000, - VK_STRUCTURE_TYPE_ANDROID_SURFACE_CREATE_INFO_KHR = 1000008000, - VK_STRUCTURE_TYPE_WIN32_SURFACE_CREATE_INFO_KHR = 1000009000, - VK_STRUCTURE_TYPE_DEBUG_REPORT_CALLBACK_CREATE_INFO_EXT = 1000011000, - VK_STRUCTURE_TYPE_PIPELINE_RASTERIZATION_STATE_RASTERIZATION_ORDER_AMD = 1000018000, - VK_STRUCTURE_TYPE_DEBUG_MARKER_OBJECT_NAME_INFO_EXT = 1000022000, - VK_STRUCTURE_TYPE_DEBUG_MARKER_OBJECT_TAG_INFO_EXT = 1000022001, - VK_STRUCTURE_TYPE_DEBUG_MARKER_MARKER_INFO_EXT = 1000022002, - VK_STRUCTURE_TYPE_DEDICATED_ALLOCATION_IMAGE_CREATE_INFO_NV = 1000026000, - VK_STRUCTURE_TYPE_DEDICATED_ALLOCATION_BUFFER_CREATE_INFO_NV = 1000026001, - VK_STRUCTURE_TYPE_DEDICATED_ALLOCATION_MEMORY_ALLOCATE_INFO_NV = 1000026002, - VK_STRUCTURE_TYPE_TEXTURE_LOD_GATHER_FORMAT_PROPERTIES_AMD = 1000041000, - VK_STRUCTURE_TYPE_RENDER_PASS_MULTIVIEW_CREATE_INFO_KHX = 1000053000, - VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MULTIVIEW_FEATURES_KHX = 1000053001, - VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MULTIVIEW_PROPERTIES_KHX = 1000053002, - VK_STRUCTURE_TYPE_EXTERNAL_MEMORY_IMAGE_CREATE_INFO_NV = 1000056000, - VK_STRUCTURE_TYPE_EXPORT_MEMORY_ALLOCATE_INFO_NV = 1000056001, - VK_STRUCTURE_TYPE_IMPORT_MEMORY_WIN32_HANDLE_INFO_NV = 1000057000, - VK_STRUCTURE_TYPE_EXPORT_MEMORY_WIN32_HANDLE_INFO_NV = 1000057001, - VK_STRUCTURE_TYPE_WIN32_KEYED_MUTEX_ACQUIRE_RELEASE_INFO_NV = 1000058000, - VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FEATURES_2_KHR = 1000059000, - VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PROPERTIES_2_KHR = 1000059001, - VK_STRUCTURE_TYPE_FORMAT_PROPERTIES_2_KHR = 1000059002, - VK_STRUCTURE_TYPE_IMAGE_FORMAT_PROPERTIES_2_KHR = 1000059003, - VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_IMAGE_FORMAT_INFO_2_KHR = 1000059004, - VK_STRUCTURE_TYPE_QUEUE_FAMILY_PROPERTIES_2_KHR = 1000059005, - VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MEMORY_PROPERTIES_2_KHR = 1000059006, - VK_STRUCTURE_TYPE_SPARSE_IMAGE_FORMAT_PROPERTIES_2_KHR = 1000059007, - VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SPARSE_IMAGE_FORMAT_INFO_2_KHR = 1000059008, - VK_STRUCTURE_TYPE_MEMORY_ALLOCATE_FLAGS_INFO_KHX = 1000060000, - VK_STRUCTURE_TYPE_BIND_BUFFER_MEMORY_INFO_KHX = 1000060001, - VK_STRUCTURE_TYPE_BIND_IMAGE_MEMORY_INFO_KHX = 1000060002, - VK_STRUCTURE_TYPE_DEVICE_GROUP_RENDER_PASS_BEGIN_INFO_KHX = 1000060003, - VK_STRUCTURE_TYPE_DEVICE_GROUP_COMMAND_BUFFER_BEGIN_INFO_KHX = 1000060004, - VK_STRUCTURE_TYPE_DEVICE_GROUP_SUBMIT_INFO_KHX = 1000060005, - VK_STRUCTURE_TYPE_DEVICE_GROUP_BIND_SPARSE_INFO_KHX = 1000060006, - VK_STRUCTURE_TYPE_DEVICE_GROUP_PRESENT_CAPABILITIES_KHX = 1000060007, - VK_STRUCTURE_TYPE_IMAGE_SWAPCHAIN_CREATE_INFO_KHX = 1000060008, - VK_STRUCTURE_TYPE_BIND_IMAGE_MEMORY_SWAPCHAIN_INFO_KHX = 1000060009, - VK_STRUCTURE_TYPE_ACQUIRE_NEXT_IMAGE_INFO_KHX = 1000060010, - VK_STRUCTURE_TYPE_DEVICE_GROUP_PRESENT_INFO_KHX = 1000060011, - VK_STRUCTURE_TYPE_DEVICE_GROUP_SWAPCHAIN_CREATE_INFO_KHX = 1000060012, - VK_STRUCTURE_TYPE_VALIDATION_FLAGS_EXT = 1000061000, - VK_STRUCTURE_TYPE_VI_SURFACE_CREATE_INFO_NN = 1000062000, - VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_GROUP_PROPERTIES_KHX = 1000070000, - VK_STRUCTURE_TYPE_DEVICE_GROUP_DEVICE_CREATE_INFO_KHX = 1000070001, - VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_EXTERNAL_IMAGE_FORMAT_INFO_KHR = 1000071000, - VK_STRUCTURE_TYPE_EXTERNAL_IMAGE_FORMAT_PROPERTIES_KHR = 1000071001, - VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_EXTERNAL_BUFFER_INFO_KHR = 1000071002, - VK_STRUCTURE_TYPE_EXTERNAL_BUFFER_PROPERTIES_KHR = 1000071003, - VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_ID_PROPERTIES_KHR = 1000071004, - VK_STRUCTURE_TYPE_EXTERNAL_MEMORY_BUFFER_CREATE_INFO_KHR = 1000072000, - VK_STRUCTURE_TYPE_EXTERNAL_MEMORY_IMAGE_CREATE_INFO_KHR = 1000072001, - VK_STRUCTURE_TYPE_EXPORT_MEMORY_ALLOCATE_INFO_KHR = 1000072002, - VK_STRUCTURE_TYPE_IMPORT_MEMORY_WIN32_HANDLE_INFO_KHR = 1000073000, - VK_STRUCTURE_TYPE_EXPORT_MEMORY_WIN32_HANDLE_INFO_KHR = 1000073001, - VK_STRUCTURE_TYPE_MEMORY_WIN32_HANDLE_PROPERTIES_KHR = 1000073002, - VK_STRUCTURE_TYPE_MEMORY_GET_WIN32_HANDLE_INFO_KHR = 1000073003, - VK_STRUCTURE_TYPE_IMPORT_MEMORY_FD_INFO_KHR = 1000074000, - VK_STRUCTURE_TYPE_MEMORY_FD_PROPERTIES_KHR = 1000074001, - VK_STRUCTURE_TYPE_MEMORY_GET_FD_INFO_KHR = 1000074002, - VK_STRUCTURE_TYPE_WIN32_KEYED_MUTEX_ACQUIRE_RELEASE_INFO_KHR = 1000075000, - VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_EXTERNAL_SEMAPHORE_INFO_KHR = 1000076000, - VK_STRUCTURE_TYPE_EXTERNAL_SEMAPHORE_PROPERTIES_KHR = 1000076001, - VK_STRUCTURE_TYPE_EXPORT_SEMAPHORE_CREATE_INFO_KHR = 1000077000, - VK_STRUCTURE_TYPE_IMPORT_SEMAPHORE_WIN32_HANDLE_INFO_KHR = 1000078000, - VK_STRUCTURE_TYPE_EXPORT_SEMAPHORE_WIN32_HANDLE_INFO_KHR = 1000078001, - VK_STRUCTURE_TYPE_D3D12_FENCE_SUBMIT_INFO_KHR = 1000078002, - VK_STRUCTURE_TYPE_SEMAPHORE_GET_WIN32_HANDLE_INFO_KHR = 1000078003, - VK_STRUCTURE_TYPE_IMPORT_SEMAPHORE_FD_INFO_KHR = 1000079000, - VK_STRUCTURE_TYPE_SEMAPHORE_GET_FD_INFO_KHR = 1000079001, - VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PUSH_DESCRIPTOR_PROPERTIES_KHR = 1000080000, - VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_16BIT_STORAGE_FEATURES_KHR = 1000083000, - VK_STRUCTURE_TYPE_PRESENT_REGIONS_KHR = 1000084000, - VK_STRUCTURE_TYPE_DESCRIPTOR_UPDATE_TEMPLATE_CREATE_INFO_KHR = 1000085000, - VK_STRUCTURE_TYPE_OBJECT_TABLE_CREATE_INFO_NVX = 1000086000, - VK_STRUCTURE_TYPE_INDIRECT_COMMANDS_LAYOUT_CREATE_INFO_NVX = 1000086001, - VK_STRUCTURE_TYPE_CMD_PROCESS_COMMANDS_INFO_NVX = 1000086002, - VK_STRUCTURE_TYPE_CMD_RESERVE_SPACE_FOR_COMMANDS_INFO_NVX = 1000086003, - VK_STRUCTURE_TYPE_DEVICE_GENERATED_COMMANDS_LIMITS_NVX = 1000086004, - VK_STRUCTURE_TYPE_DEVICE_GENERATED_COMMANDS_FEATURES_NVX = 1000086005, - VK_STRUCTURE_TYPE_PIPELINE_VIEWPORT_W_SCALING_STATE_CREATE_INFO_NV = 1000087000, - VK_STRUCTURE_TYPE_SURFACE_CAPABILITIES_2_EXT = 1000090000, - VK_STRUCTURE_TYPE_DISPLAY_POWER_INFO_EXT = 1000091000, - VK_STRUCTURE_TYPE_DEVICE_EVENT_INFO_EXT = 1000091001, - VK_STRUCTURE_TYPE_DISPLAY_EVENT_INFO_EXT = 1000091002, - VK_STRUCTURE_TYPE_SWAPCHAIN_COUNTER_CREATE_INFO_EXT = 1000091003, - VK_STRUCTURE_TYPE_PRESENT_TIMES_INFO_GOOGLE = 1000092000, - VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MULTIVIEW_PER_VIEW_ATTRIBUTES_PROPERTIES_NVX = 1000097000, - VK_STRUCTURE_TYPE_PIPELINE_VIEWPORT_SWIZZLE_STATE_CREATE_INFO_NV = 1000098000, - VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DISCARD_RECTANGLE_PROPERTIES_EXT = 1000099000, - VK_STRUCTURE_TYPE_PIPELINE_DISCARD_RECTANGLE_STATE_CREATE_INFO_EXT = 1000099001, - VK_STRUCTURE_TYPE_HDR_METADATA_EXT = 1000105000, - VK_STRUCTURE_TYPE_SHARED_PRESENT_SURFACE_CAPABILITIES_KHR = 1000111000, - VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_EXTERNAL_FENCE_INFO_KHR = 1000112000, - VK_STRUCTURE_TYPE_EXTERNAL_FENCE_PROPERTIES_KHR = 1000112001, - VK_STRUCTURE_TYPE_EXPORT_FENCE_CREATE_INFO_KHR = 1000113000, - VK_STRUCTURE_TYPE_IMPORT_FENCE_WIN32_HANDLE_INFO_KHR = 1000114000, - VK_STRUCTURE_TYPE_EXPORT_FENCE_WIN32_HANDLE_INFO_KHR = 1000114001, - VK_STRUCTURE_TYPE_FENCE_GET_WIN32_HANDLE_INFO_KHR = 1000114002, - VK_STRUCTURE_TYPE_IMPORT_FENCE_FD_INFO_KHR = 1000115000, - VK_STRUCTURE_TYPE_FENCE_GET_FD_INFO_KHR = 1000115001, - VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SURFACE_INFO_2_KHR = 1000119000, - VK_STRUCTURE_TYPE_SURFACE_CAPABILITIES_2_KHR = 1000119001, - VK_STRUCTURE_TYPE_SURFACE_FORMAT_2_KHR = 1000119002, - VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VARIABLE_POINTER_FEATURES_KHR = 1000120000, - VK_STRUCTURE_TYPE_IOS_SURFACE_CREATE_INFO_MVK = 1000122000, - VK_STRUCTURE_TYPE_MACOS_SURFACE_CREATE_INFO_MVK = 1000123000, - VK_STRUCTURE_TYPE_MEMORY_DEDICATED_REQUIREMENTS_KHR = 1000127000, - VK_STRUCTURE_TYPE_MEMORY_DEDICATED_ALLOCATE_INFO_KHR = 1000127001, - VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SAMPLER_FILTER_MINMAX_PROPERTIES_EXT = 1000130000, - VK_STRUCTURE_TYPE_SAMPLER_REDUCTION_MODE_CREATE_INFO_EXT = 1000130001, - VK_STRUCTURE_TYPE_BUFFER_MEMORY_REQUIREMENTS_INFO_2_KHR = 1000146000, - VK_STRUCTURE_TYPE_IMAGE_MEMORY_REQUIREMENTS_INFO_2_KHR = 1000146001, - VK_STRUCTURE_TYPE_IMAGE_SPARSE_MEMORY_REQUIREMENTS_INFO_2_KHR = 1000146002, - VK_STRUCTURE_TYPE_MEMORY_REQUIREMENTS_2_KHR = 1000146003, - VK_STRUCTURE_TYPE_SPARSE_IMAGE_MEMORY_REQUIREMENTS_2_KHR = 1000146004, - VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_BLEND_OPERATION_ADVANCED_FEATURES_EXT = 1000148000, - VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_BLEND_OPERATION_ADVANCED_PROPERTIES_EXT = 1000148001, - VK_STRUCTURE_TYPE_PIPELINE_COLOR_BLEND_ADVANCED_STATE_CREATE_INFO_EXT = 1000148002, - VK_STRUCTURE_TYPE_PIPELINE_COVERAGE_TO_COLOR_STATE_CREATE_INFO_NV = 1000149000, - VK_STRUCTURE_TYPE_PIPELINE_COVERAGE_MODULATION_STATE_CREATE_INFO_NV = 1000152000, - VK_STRUCTURE_TYPE_BEGIN_RANGE = VK_STRUCTURE_TYPE_APPLICATION_INFO, - VK_STRUCTURE_TYPE_END_RANGE = VK_STRUCTURE_TYPE_LOADER_DEVICE_CREATE_INFO, - VK_STRUCTURE_TYPE_RANGE_SIZE = (VK_STRUCTURE_TYPE_LOADER_DEVICE_CREATE_INFO - VK_STRUCTURE_TYPE_APPLICATION_INFO + 1), - VK_STRUCTURE_TYPE_MAX_ENUM = 0x7FFFFFFF -} VkStructureType; - -typedef enum VkSystemAllocationScope { - VK_SYSTEM_ALLOCATION_SCOPE_COMMAND = 0, - VK_SYSTEM_ALLOCATION_SCOPE_OBJECT = 1, - VK_SYSTEM_ALLOCATION_SCOPE_CACHE = 2, - VK_SYSTEM_ALLOCATION_SCOPE_DEVICE = 3, - VK_SYSTEM_ALLOCATION_SCOPE_INSTANCE = 4, - VK_SYSTEM_ALLOCATION_SCOPE_BEGIN_RANGE = VK_SYSTEM_ALLOCATION_SCOPE_COMMAND, - VK_SYSTEM_ALLOCATION_SCOPE_END_RANGE = VK_SYSTEM_ALLOCATION_SCOPE_INSTANCE, - VK_SYSTEM_ALLOCATION_SCOPE_RANGE_SIZE = (VK_SYSTEM_ALLOCATION_SCOPE_INSTANCE - VK_SYSTEM_ALLOCATION_SCOPE_COMMAND + 1), - VK_SYSTEM_ALLOCATION_SCOPE_MAX_ENUM = 0x7FFFFFFF -} VkSystemAllocationScope; - -typedef enum VkInternalAllocationType { - VK_INTERNAL_ALLOCATION_TYPE_EXECUTABLE = 0, - VK_INTERNAL_ALLOCATION_TYPE_BEGIN_RANGE = VK_INTERNAL_ALLOCATION_TYPE_EXECUTABLE, - VK_INTERNAL_ALLOCATION_TYPE_END_RANGE = VK_INTERNAL_ALLOCATION_TYPE_EXECUTABLE, - VK_INTERNAL_ALLOCATION_TYPE_RANGE_SIZE = (VK_INTERNAL_ALLOCATION_TYPE_EXECUTABLE - VK_INTERNAL_ALLOCATION_TYPE_EXECUTABLE + 1), - VK_INTERNAL_ALLOCATION_TYPE_MAX_ENUM = 0x7FFFFFFF -} VkInternalAllocationType; - -typedef enum VkFormat { - VK_FORMAT_UNDEFINED = 0, - VK_FORMAT_R4G4_UNORM_PACK8 = 1, - VK_FORMAT_R4G4B4A4_UNORM_PACK16 = 2, - VK_FORMAT_B4G4R4A4_UNORM_PACK16 = 3, - VK_FORMAT_R5G6B5_UNORM_PACK16 = 4, - VK_FORMAT_B5G6R5_UNORM_PACK16 = 5, - VK_FORMAT_R5G5B5A1_UNORM_PACK16 = 6, - VK_FORMAT_B5G5R5A1_UNORM_PACK16 = 7, - VK_FORMAT_A1R5G5B5_UNORM_PACK16 = 8, - VK_FORMAT_R8_UNORM = 9, - VK_FORMAT_R8_SNORM = 10, - VK_FORMAT_R8_USCALED = 11, - VK_FORMAT_R8_SSCALED = 12, - VK_FORMAT_R8_UINT = 13, - VK_FORMAT_R8_SINT = 14, - VK_FORMAT_R8_SRGB = 15, - VK_FORMAT_R8G8_UNORM = 16, - VK_FORMAT_R8G8_SNORM = 17, - VK_FORMAT_R8G8_USCALED = 18, - VK_FORMAT_R8G8_SSCALED = 19, - VK_FORMAT_R8G8_UINT = 20, - VK_FORMAT_R8G8_SINT = 21, - VK_FORMAT_R8G8_SRGB = 22, - VK_FORMAT_R8G8B8_UNORM = 23, - VK_FORMAT_R8G8B8_SNORM = 24, - VK_FORMAT_R8G8B8_USCALED = 25, - VK_FORMAT_R8G8B8_SSCALED = 26, - VK_FORMAT_R8G8B8_UINT = 27, - VK_FORMAT_R8G8B8_SINT = 28, - VK_FORMAT_R8G8B8_SRGB = 29, - VK_FORMAT_B8G8R8_UNORM = 30, - VK_FORMAT_B8G8R8_SNORM = 31, - VK_FORMAT_B8G8R8_USCALED = 32, - VK_FORMAT_B8G8R8_SSCALED = 33, - VK_FORMAT_B8G8R8_UINT = 34, - VK_FORMAT_B8G8R8_SINT = 35, - VK_FORMAT_B8G8R8_SRGB = 36, - VK_FORMAT_R8G8B8A8_UNORM = 37, - VK_FORMAT_R8G8B8A8_SNORM = 38, - VK_FORMAT_R8G8B8A8_USCALED = 39, - VK_FORMAT_R8G8B8A8_SSCALED = 40, - VK_FORMAT_R8G8B8A8_UINT = 41, - VK_FORMAT_R8G8B8A8_SINT = 42, - VK_FORMAT_R8G8B8A8_SRGB = 43, - VK_FORMAT_B8G8R8A8_UNORM = 44, - VK_FORMAT_B8G8R8A8_SNORM = 45, - VK_FORMAT_B8G8R8A8_USCALED = 46, - VK_FORMAT_B8G8R8A8_SSCALED = 47, - VK_FORMAT_B8G8R8A8_UINT = 48, - VK_FORMAT_B8G8R8A8_SINT = 49, - VK_FORMAT_B8G8R8A8_SRGB = 50, - VK_FORMAT_A8B8G8R8_UNORM_PACK32 = 51, - VK_FORMAT_A8B8G8R8_SNORM_PACK32 = 52, - VK_FORMAT_A8B8G8R8_USCALED_PACK32 = 53, - VK_FORMAT_A8B8G8R8_SSCALED_PACK32 = 54, - VK_FORMAT_A8B8G8R8_UINT_PACK32 = 55, - VK_FORMAT_A8B8G8R8_SINT_PACK32 = 56, - VK_FORMAT_A8B8G8R8_SRGB_PACK32 = 57, - VK_FORMAT_A2R10G10B10_UNORM_PACK32 = 58, - VK_FORMAT_A2R10G10B10_SNORM_PACK32 = 59, - VK_FORMAT_A2R10G10B10_USCALED_PACK32 = 60, - VK_FORMAT_A2R10G10B10_SSCALED_PACK32 = 61, - VK_FORMAT_A2R10G10B10_UINT_PACK32 = 62, - VK_FORMAT_A2R10G10B10_SINT_PACK32 = 63, - VK_FORMAT_A2B10G10R10_UNORM_PACK32 = 64, - VK_FORMAT_A2B10G10R10_SNORM_PACK32 = 65, - VK_FORMAT_A2B10G10R10_USCALED_PACK32 = 66, - VK_FORMAT_A2B10G10R10_SSCALED_PACK32 = 67, - VK_FORMAT_A2B10G10R10_UINT_PACK32 = 68, - VK_FORMAT_A2B10G10R10_SINT_PACK32 = 69, - VK_FORMAT_R16_UNORM = 70, - VK_FORMAT_R16_SNORM = 71, - VK_FORMAT_R16_USCALED = 72, - VK_FORMAT_R16_SSCALED = 73, - VK_FORMAT_R16_UINT = 74, - VK_FORMAT_R16_SINT = 75, - VK_FORMAT_R16_SFLOAT = 76, - VK_FORMAT_R16G16_UNORM = 77, - VK_FORMAT_R16G16_SNORM = 78, - VK_FORMAT_R16G16_USCALED = 79, - VK_FORMAT_R16G16_SSCALED = 80, - VK_FORMAT_R16G16_UINT = 81, - VK_FORMAT_R16G16_SINT = 82, - VK_FORMAT_R16G16_SFLOAT = 83, - VK_FORMAT_R16G16B16_UNORM = 84, - VK_FORMAT_R16G16B16_SNORM = 85, - VK_FORMAT_R16G16B16_USCALED = 86, - VK_FORMAT_R16G16B16_SSCALED = 87, - VK_FORMAT_R16G16B16_UINT = 88, - VK_FORMAT_R16G16B16_SINT = 89, - VK_FORMAT_R16G16B16_SFLOAT = 90, - VK_FORMAT_R16G16B16A16_UNORM = 91, - VK_FORMAT_R16G16B16A16_SNORM = 92, - VK_FORMAT_R16G16B16A16_USCALED = 93, - VK_FORMAT_R16G16B16A16_SSCALED = 94, - VK_FORMAT_R16G16B16A16_UINT = 95, - VK_FORMAT_R16G16B16A16_SINT = 96, - VK_FORMAT_R16G16B16A16_SFLOAT = 97, - VK_FORMAT_R32_UINT = 98, - VK_FORMAT_R32_SINT = 99, - VK_FORMAT_R32_SFLOAT = 100, - VK_FORMAT_R32G32_UINT = 101, - VK_FORMAT_R32G32_SINT = 102, - VK_FORMAT_R32G32_SFLOAT = 103, - VK_FORMAT_R32G32B32_UINT = 104, - VK_FORMAT_R32G32B32_SINT = 105, - VK_FORMAT_R32G32B32_SFLOAT = 106, - VK_FORMAT_R32G32B32A32_UINT = 107, - VK_FORMAT_R32G32B32A32_SINT = 108, - VK_FORMAT_R32G32B32A32_SFLOAT = 109, - VK_FORMAT_R64_UINT = 110, - VK_FORMAT_R64_SINT = 111, - VK_FORMAT_R64_SFLOAT = 112, - VK_FORMAT_R64G64_UINT = 113, - VK_FORMAT_R64G64_SINT = 114, - VK_FORMAT_R64G64_SFLOAT = 115, - VK_FORMAT_R64G64B64_UINT = 116, - VK_FORMAT_R64G64B64_SINT = 117, - VK_FORMAT_R64G64B64_SFLOAT = 118, - VK_FORMAT_R64G64B64A64_UINT = 119, - VK_FORMAT_R64G64B64A64_SINT = 120, - VK_FORMAT_R64G64B64A64_SFLOAT = 121, - VK_FORMAT_B10G11R11_UFLOAT_PACK32 = 122, - VK_FORMAT_E5B9G9R9_UFLOAT_PACK32 = 123, - VK_FORMAT_D16_UNORM = 124, - VK_FORMAT_X8_D24_UNORM_PACK32 = 125, - VK_FORMAT_D32_SFLOAT = 126, - VK_FORMAT_S8_UINT = 127, - VK_FORMAT_D16_UNORM_S8_UINT = 128, - VK_FORMAT_D24_UNORM_S8_UINT = 129, - VK_FORMAT_D32_SFLOAT_S8_UINT = 130, - VK_FORMAT_BC1_RGB_UNORM_BLOCK = 131, - VK_FORMAT_BC1_RGB_SRGB_BLOCK = 132, - VK_FORMAT_BC1_RGBA_UNORM_BLOCK = 133, - VK_FORMAT_BC1_RGBA_SRGB_BLOCK = 134, - VK_FORMAT_BC2_UNORM_BLOCK = 135, - VK_FORMAT_BC2_SRGB_BLOCK = 136, - VK_FORMAT_BC3_UNORM_BLOCK = 137, - VK_FORMAT_BC3_SRGB_BLOCK = 138, - VK_FORMAT_BC4_UNORM_BLOCK = 139, - VK_FORMAT_BC4_SNORM_BLOCK = 140, - VK_FORMAT_BC5_UNORM_BLOCK = 141, - VK_FORMAT_BC5_SNORM_BLOCK = 142, - VK_FORMAT_BC6H_UFLOAT_BLOCK = 143, - VK_FORMAT_BC6H_SFLOAT_BLOCK = 144, - VK_FORMAT_BC7_UNORM_BLOCK = 145, - VK_FORMAT_BC7_SRGB_BLOCK = 146, - VK_FORMAT_ETC2_R8G8B8_UNORM_BLOCK = 147, - VK_FORMAT_ETC2_R8G8B8_SRGB_BLOCK = 148, - VK_FORMAT_ETC2_R8G8B8A1_UNORM_BLOCK = 149, - VK_FORMAT_ETC2_R8G8B8A1_SRGB_BLOCK = 150, - VK_FORMAT_ETC2_R8G8B8A8_UNORM_BLOCK = 151, - VK_FORMAT_ETC2_R8G8B8A8_SRGB_BLOCK = 152, - VK_FORMAT_EAC_R11_UNORM_BLOCK = 153, - VK_FORMAT_EAC_R11_SNORM_BLOCK = 154, - VK_FORMAT_EAC_R11G11_UNORM_BLOCK = 155, - VK_FORMAT_EAC_R11G11_SNORM_BLOCK = 156, - VK_FORMAT_ASTC_4x4_UNORM_BLOCK = 157, - VK_FORMAT_ASTC_4x4_SRGB_BLOCK = 158, - VK_FORMAT_ASTC_5x4_UNORM_BLOCK = 159, - VK_FORMAT_ASTC_5x4_SRGB_BLOCK = 160, - VK_FORMAT_ASTC_5x5_UNORM_BLOCK = 161, - VK_FORMAT_ASTC_5x5_SRGB_BLOCK = 162, - VK_FORMAT_ASTC_6x5_UNORM_BLOCK = 163, - VK_FORMAT_ASTC_6x5_SRGB_BLOCK = 164, - VK_FORMAT_ASTC_6x6_UNORM_BLOCK = 165, - VK_FORMAT_ASTC_6x6_SRGB_BLOCK = 166, - VK_FORMAT_ASTC_8x5_UNORM_BLOCK = 167, - VK_FORMAT_ASTC_8x5_SRGB_BLOCK = 168, - VK_FORMAT_ASTC_8x6_UNORM_BLOCK = 169, - VK_FORMAT_ASTC_8x6_SRGB_BLOCK = 170, - VK_FORMAT_ASTC_8x8_UNORM_BLOCK = 171, - VK_FORMAT_ASTC_8x8_SRGB_BLOCK = 172, - VK_FORMAT_ASTC_10x5_UNORM_BLOCK = 173, - VK_FORMAT_ASTC_10x5_SRGB_BLOCK = 174, - VK_FORMAT_ASTC_10x6_UNORM_BLOCK = 175, - VK_FORMAT_ASTC_10x6_SRGB_BLOCK = 176, - VK_FORMAT_ASTC_10x8_UNORM_BLOCK = 177, - VK_FORMAT_ASTC_10x8_SRGB_BLOCK = 178, - VK_FORMAT_ASTC_10x10_UNORM_BLOCK = 179, - VK_FORMAT_ASTC_10x10_SRGB_BLOCK = 180, - VK_FORMAT_ASTC_12x10_UNORM_BLOCK = 181, - VK_FORMAT_ASTC_12x10_SRGB_BLOCK = 182, - VK_FORMAT_ASTC_12x12_UNORM_BLOCK = 183, - VK_FORMAT_ASTC_12x12_SRGB_BLOCK = 184, - VK_FORMAT_PVRTC1_2BPP_UNORM_BLOCK_IMG = 1000054000, - VK_FORMAT_PVRTC1_4BPP_UNORM_BLOCK_IMG = 1000054001, - VK_FORMAT_PVRTC2_2BPP_UNORM_BLOCK_IMG = 1000054002, - VK_FORMAT_PVRTC2_4BPP_UNORM_BLOCK_IMG = 1000054003, - VK_FORMAT_PVRTC1_2BPP_SRGB_BLOCK_IMG = 1000054004, - VK_FORMAT_PVRTC1_4BPP_SRGB_BLOCK_IMG = 1000054005, - VK_FORMAT_PVRTC2_2BPP_SRGB_BLOCK_IMG = 1000054006, - VK_FORMAT_PVRTC2_4BPP_SRGB_BLOCK_IMG = 1000054007, - VK_FORMAT_BEGIN_RANGE = VK_FORMAT_UNDEFINED, - VK_FORMAT_END_RANGE = VK_FORMAT_ASTC_12x12_SRGB_BLOCK, - VK_FORMAT_RANGE_SIZE = (VK_FORMAT_ASTC_12x12_SRGB_BLOCK - VK_FORMAT_UNDEFINED + 1), - VK_FORMAT_MAX_ENUM = 0x7FFFFFFF -} VkFormat; - -typedef enum VkImageType { - VK_IMAGE_TYPE_1D = 0, - VK_IMAGE_TYPE_2D = 1, - VK_IMAGE_TYPE_3D = 2, - VK_IMAGE_TYPE_BEGIN_RANGE = VK_IMAGE_TYPE_1D, - VK_IMAGE_TYPE_END_RANGE = VK_IMAGE_TYPE_3D, - VK_IMAGE_TYPE_RANGE_SIZE = (VK_IMAGE_TYPE_3D - VK_IMAGE_TYPE_1D + 1), - VK_IMAGE_TYPE_MAX_ENUM = 0x7FFFFFFF -} VkImageType; - -typedef enum VkImageTiling { - VK_IMAGE_TILING_OPTIMAL = 0, - VK_IMAGE_TILING_LINEAR = 1, - VK_IMAGE_TILING_BEGIN_RANGE = VK_IMAGE_TILING_OPTIMAL, - VK_IMAGE_TILING_END_RANGE = VK_IMAGE_TILING_LINEAR, - VK_IMAGE_TILING_RANGE_SIZE = (VK_IMAGE_TILING_LINEAR - VK_IMAGE_TILING_OPTIMAL + 1), - VK_IMAGE_TILING_MAX_ENUM = 0x7FFFFFFF -} VkImageTiling; - -typedef enum VkPhysicalDeviceType { - VK_PHYSICAL_DEVICE_TYPE_OTHER = 0, - VK_PHYSICAL_DEVICE_TYPE_INTEGRATED_GPU = 1, - VK_PHYSICAL_DEVICE_TYPE_DISCRETE_GPU = 2, - VK_PHYSICAL_DEVICE_TYPE_VIRTUAL_GPU = 3, - VK_PHYSICAL_DEVICE_TYPE_CPU = 4, - VK_PHYSICAL_DEVICE_TYPE_BEGIN_RANGE = VK_PHYSICAL_DEVICE_TYPE_OTHER, - VK_PHYSICAL_DEVICE_TYPE_END_RANGE = VK_PHYSICAL_DEVICE_TYPE_CPU, - VK_PHYSICAL_DEVICE_TYPE_RANGE_SIZE = (VK_PHYSICAL_DEVICE_TYPE_CPU - VK_PHYSICAL_DEVICE_TYPE_OTHER + 1), - VK_PHYSICAL_DEVICE_TYPE_MAX_ENUM = 0x7FFFFFFF -} VkPhysicalDeviceType; - -typedef enum VkQueryType { - VK_QUERY_TYPE_OCCLUSION = 0, - VK_QUERY_TYPE_PIPELINE_STATISTICS = 1, - VK_QUERY_TYPE_TIMESTAMP = 2, - VK_QUERY_TYPE_BEGIN_RANGE = VK_QUERY_TYPE_OCCLUSION, - VK_QUERY_TYPE_END_RANGE = VK_QUERY_TYPE_TIMESTAMP, - VK_QUERY_TYPE_RANGE_SIZE = (VK_QUERY_TYPE_TIMESTAMP - VK_QUERY_TYPE_OCCLUSION + 1), - VK_QUERY_TYPE_MAX_ENUM = 0x7FFFFFFF -} VkQueryType; - -typedef enum VkSharingMode { - VK_SHARING_MODE_EXCLUSIVE = 0, - VK_SHARING_MODE_CONCURRENT = 1, - VK_SHARING_MODE_BEGIN_RANGE = VK_SHARING_MODE_EXCLUSIVE, - VK_SHARING_MODE_END_RANGE = VK_SHARING_MODE_CONCURRENT, - VK_SHARING_MODE_RANGE_SIZE = (VK_SHARING_MODE_CONCURRENT - VK_SHARING_MODE_EXCLUSIVE + 1), - VK_SHARING_MODE_MAX_ENUM = 0x7FFFFFFF -} VkSharingMode; - -typedef enum VkImageLayout { - VK_IMAGE_LAYOUT_UNDEFINED = 0, - VK_IMAGE_LAYOUT_GENERAL = 1, - VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL = 2, - VK_IMAGE_LAYOUT_DEPTH_STENCIL_ATTACHMENT_OPTIMAL = 3, - VK_IMAGE_LAYOUT_DEPTH_STENCIL_READ_ONLY_OPTIMAL = 4, - VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL = 5, - VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL = 6, - VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL = 7, - VK_IMAGE_LAYOUT_PREINITIALIZED = 8, - VK_IMAGE_LAYOUT_PRESENT_SRC_KHR = 1000001002, - VK_IMAGE_LAYOUT_SHARED_PRESENT_KHR = 1000111000, - VK_IMAGE_LAYOUT_BEGIN_RANGE = VK_IMAGE_LAYOUT_UNDEFINED, - VK_IMAGE_LAYOUT_END_RANGE = VK_IMAGE_LAYOUT_PREINITIALIZED, - VK_IMAGE_LAYOUT_RANGE_SIZE = (VK_IMAGE_LAYOUT_PREINITIALIZED - VK_IMAGE_LAYOUT_UNDEFINED + 1), - VK_IMAGE_LAYOUT_MAX_ENUM = 0x7FFFFFFF -} VkImageLayout; - -typedef enum VkImageViewType { - VK_IMAGE_VIEW_TYPE_1D = 0, - VK_IMAGE_VIEW_TYPE_2D = 1, - VK_IMAGE_VIEW_TYPE_3D = 2, - VK_IMAGE_VIEW_TYPE_CUBE = 3, - VK_IMAGE_VIEW_TYPE_1D_ARRAY = 4, - VK_IMAGE_VIEW_TYPE_2D_ARRAY = 5, - VK_IMAGE_VIEW_TYPE_CUBE_ARRAY = 6, - VK_IMAGE_VIEW_TYPE_BEGIN_RANGE = VK_IMAGE_VIEW_TYPE_1D, - VK_IMAGE_VIEW_TYPE_END_RANGE = VK_IMAGE_VIEW_TYPE_CUBE_ARRAY, - VK_IMAGE_VIEW_TYPE_RANGE_SIZE = (VK_IMAGE_VIEW_TYPE_CUBE_ARRAY - VK_IMAGE_VIEW_TYPE_1D + 1), - VK_IMAGE_VIEW_TYPE_MAX_ENUM = 0x7FFFFFFF -} VkImageViewType; - -typedef enum VkComponentSwizzle { - VK_COMPONENT_SWIZZLE_IDENTITY = 0, - VK_COMPONENT_SWIZZLE_ZERO = 1, - VK_COMPONENT_SWIZZLE_ONE = 2, - VK_COMPONENT_SWIZZLE_R = 3, - VK_COMPONENT_SWIZZLE_G = 4, - VK_COMPONENT_SWIZZLE_B = 5, - VK_COMPONENT_SWIZZLE_A = 6, - VK_COMPONENT_SWIZZLE_BEGIN_RANGE = VK_COMPONENT_SWIZZLE_IDENTITY, - VK_COMPONENT_SWIZZLE_END_RANGE = VK_COMPONENT_SWIZZLE_A, - VK_COMPONENT_SWIZZLE_RANGE_SIZE = (VK_COMPONENT_SWIZZLE_A - VK_COMPONENT_SWIZZLE_IDENTITY + 1), - VK_COMPONENT_SWIZZLE_MAX_ENUM = 0x7FFFFFFF -} VkComponentSwizzle; - -typedef enum VkVertexInputRate { - VK_VERTEX_INPUT_RATE_VERTEX = 0, - VK_VERTEX_INPUT_RATE_INSTANCE = 1, - VK_VERTEX_INPUT_RATE_BEGIN_RANGE = VK_VERTEX_INPUT_RATE_VERTEX, - VK_VERTEX_INPUT_RATE_END_RANGE = VK_VERTEX_INPUT_RATE_INSTANCE, - VK_VERTEX_INPUT_RATE_RANGE_SIZE = (VK_VERTEX_INPUT_RATE_INSTANCE - VK_VERTEX_INPUT_RATE_VERTEX + 1), - VK_VERTEX_INPUT_RATE_MAX_ENUM = 0x7FFFFFFF -} VkVertexInputRate; - -typedef enum VkPrimitiveTopology { - VK_PRIMITIVE_TOPOLOGY_POINT_LIST = 0, - VK_PRIMITIVE_TOPOLOGY_LINE_LIST = 1, - VK_PRIMITIVE_TOPOLOGY_LINE_STRIP = 2, - VK_PRIMITIVE_TOPOLOGY_TRIANGLE_LIST = 3, - VK_PRIMITIVE_TOPOLOGY_TRIANGLE_STRIP = 4, - VK_PRIMITIVE_TOPOLOGY_TRIANGLE_FAN = 5, - VK_PRIMITIVE_TOPOLOGY_LINE_LIST_WITH_ADJACENCY = 6, - VK_PRIMITIVE_TOPOLOGY_LINE_STRIP_WITH_ADJACENCY = 7, - VK_PRIMITIVE_TOPOLOGY_TRIANGLE_LIST_WITH_ADJACENCY = 8, - VK_PRIMITIVE_TOPOLOGY_TRIANGLE_STRIP_WITH_ADJACENCY = 9, - VK_PRIMITIVE_TOPOLOGY_PATCH_LIST = 10, - VK_PRIMITIVE_TOPOLOGY_BEGIN_RANGE = VK_PRIMITIVE_TOPOLOGY_POINT_LIST, - VK_PRIMITIVE_TOPOLOGY_END_RANGE = VK_PRIMITIVE_TOPOLOGY_PATCH_LIST, - VK_PRIMITIVE_TOPOLOGY_RANGE_SIZE = (VK_PRIMITIVE_TOPOLOGY_PATCH_LIST - VK_PRIMITIVE_TOPOLOGY_POINT_LIST + 1), - VK_PRIMITIVE_TOPOLOGY_MAX_ENUM = 0x7FFFFFFF -} VkPrimitiveTopology; - -typedef enum VkPolygonMode { - VK_POLYGON_MODE_FILL = 0, - VK_POLYGON_MODE_LINE = 1, - VK_POLYGON_MODE_POINT = 2, - VK_POLYGON_MODE_FILL_RECTANGLE_NV = 1000153000, - VK_POLYGON_MODE_BEGIN_RANGE = VK_POLYGON_MODE_FILL, - VK_POLYGON_MODE_END_RANGE = VK_POLYGON_MODE_POINT, - VK_POLYGON_MODE_RANGE_SIZE = (VK_POLYGON_MODE_POINT - VK_POLYGON_MODE_FILL + 1), - VK_POLYGON_MODE_MAX_ENUM = 0x7FFFFFFF -} VkPolygonMode; - -typedef enum VkFrontFace { - VK_FRONT_FACE_COUNTER_CLOCKWISE = 0, - VK_FRONT_FACE_CLOCKWISE = 1, - VK_FRONT_FACE_BEGIN_RANGE = VK_FRONT_FACE_COUNTER_CLOCKWISE, - VK_FRONT_FACE_END_RANGE = VK_FRONT_FACE_CLOCKWISE, - VK_FRONT_FACE_RANGE_SIZE = (VK_FRONT_FACE_CLOCKWISE - VK_FRONT_FACE_COUNTER_CLOCKWISE + 1), - VK_FRONT_FACE_MAX_ENUM = 0x7FFFFFFF -} VkFrontFace; - -typedef enum VkCompareOp { - VK_COMPARE_OP_NEVER = 0, - VK_COMPARE_OP_LESS = 1, - VK_COMPARE_OP_EQUAL = 2, - VK_COMPARE_OP_LESS_OR_EQUAL = 3, - VK_COMPARE_OP_GREATER = 4, - VK_COMPARE_OP_NOT_EQUAL = 5, - VK_COMPARE_OP_GREATER_OR_EQUAL = 6, - VK_COMPARE_OP_ALWAYS = 7, - VK_COMPARE_OP_BEGIN_RANGE = VK_COMPARE_OP_NEVER, - VK_COMPARE_OP_END_RANGE = VK_COMPARE_OP_ALWAYS, - VK_COMPARE_OP_RANGE_SIZE = (VK_COMPARE_OP_ALWAYS - VK_COMPARE_OP_NEVER + 1), - VK_COMPARE_OP_MAX_ENUM = 0x7FFFFFFF -} VkCompareOp; - -typedef enum VkStencilOp { - VK_STENCIL_OP_KEEP = 0, - VK_STENCIL_OP_ZERO = 1, - VK_STENCIL_OP_REPLACE = 2, - VK_STENCIL_OP_INCREMENT_AND_CLAMP = 3, - VK_STENCIL_OP_DECREMENT_AND_CLAMP = 4, - VK_STENCIL_OP_INVERT = 5, - VK_STENCIL_OP_INCREMENT_AND_WRAP = 6, - VK_STENCIL_OP_DECREMENT_AND_WRAP = 7, - VK_STENCIL_OP_BEGIN_RANGE = VK_STENCIL_OP_KEEP, - VK_STENCIL_OP_END_RANGE = VK_STENCIL_OP_DECREMENT_AND_WRAP, - VK_STENCIL_OP_RANGE_SIZE = (VK_STENCIL_OP_DECREMENT_AND_WRAP - VK_STENCIL_OP_KEEP + 1), - VK_STENCIL_OP_MAX_ENUM = 0x7FFFFFFF -} VkStencilOp; - -typedef enum VkLogicOp { - VK_LOGIC_OP_CLEAR = 0, - VK_LOGIC_OP_AND = 1, - VK_LOGIC_OP_AND_REVERSE = 2, - VK_LOGIC_OP_COPY = 3, - VK_LOGIC_OP_AND_INVERTED = 4, - VK_LOGIC_OP_NO_OP = 5, - VK_LOGIC_OP_XOR = 6, - VK_LOGIC_OP_OR = 7, - VK_LOGIC_OP_NOR = 8, - VK_LOGIC_OP_EQUIVALENT = 9, - VK_LOGIC_OP_INVERT = 10, - VK_LOGIC_OP_OR_REVERSE = 11, - VK_LOGIC_OP_COPY_INVERTED = 12, - VK_LOGIC_OP_OR_INVERTED = 13, - VK_LOGIC_OP_NAND = 14, - VK_LOGIC_OP_SET = 15, - VK_LOGIC_OP_BEGIN_RANGE = VK_LOGIC_OP_CLEAR, - VK_LOGIC_OP_END_RANGE = VK_LOGIC_OP_SET, - VK_LOGIC_OP_RANGE_SIZE = (VK_LOGIC_OP_SET - VK_LOGIC_OP_CLEAR + 1), - VK_LOGIC_OP_MAX_ENUM = 0x7FFFFFFF -} VkLogicOp; - -typedef enum VkBlendFactor { - VK_BLEND_FACTOR_ZERO = 0, - VK_BLEND_FACTOR_ONE = 1, - VK_BLEND_FACTOR_SRC_COLOR = 2, - VK_BLEND_FACTOR_ONE_MINUS_SRC_COLOR = 3, - VK_BLEND_FACTOR_DST_COLOR = 4, - VK_BLEND_FACTOR_ONE_MINUS_DST_COLOR = 5, - VK_BLEND_FACTOR_SRC_ALPHA = 6, - VK_BLEND_FACTOR_ONE_MINUS_SRC_ALPHA = 7, - VK_BLEND_FACTOR_DST_ALPHA = 8, - VK_BLEND_FACTOR_ONE_MINUS_DST_ALPHA = 9, - VK_BLEND_FACTOR_CONSTANT_COLOR = 10, - VK_BLEND_FACTOR_ONE_MINUS_CONSTANT_COLOR = 11, - VK_BLEND_FACTOR_CONSTANT_ALPHA = 12, - VK_BLEND_FACTOR_ONE_MINUS_CONSTANT_ALPHA = 13, - VK_BLEND_FACTOR_SRC_ALPHA_SATURATE = 14, - VK_BLEND_FACTOR_SRC1_COLOR = 15, - VK_BLEND_FACTOR_ONE_MINUS_SRC1_COLOR = 16, - VK_BLEND_FACTOR_SRC1_ALPHA = 17, - VK_BLEND_FACTOR_ONE_MINUS_SRC1_ALPHA = 18, - VK_BLEND_FACTOR_BEGIN_RANGE = VK_BLEND_FACTOR_ZERO, - VK_BLEND_FACTOR_END_RANGE = VK_BLEND_FACTOR_ONE_MINUS_SRC1_ALPHA, - VK_BLEND_FACTOR_RANGE_SIZE = (VK_BLEND_FACTOR_ONE_MINUS_SRC1_ALPHA - VK_BLEND_FACTOR_ZERO + 1), - VK_BLEND_FACTOR_MAX_ENUM = 0x7FFFFFFF -} VkBlendFactor; - -typedef enum VkBlendOp { - VK_BLEND_OP_ADD = 0, - VK_BLEND_OP_SUBTRACT = 1, - VK_BLEND_OP_REVERSE_SUBTRACT = 2, - VK_BLEND_OP_MIN = 3, - VK_BLEND_OP_MAX = 4, - VK_BLEND_OP_ZERO_EXT = 1000148000, - VK_BLEND_OP_SRC_EXT = 1000148001, - VK_BLEND_OP_DST_EXT = 1000148002, - VK_BLEND_OP_SRC_OVER_EXT = 1000148003, - VK_BLEND_OP_DST_OVER_EXT = 1000148004, - VK_BLEND_OP_SRC_IN_EXT = 1000148005, - VK_BLEND_OP_DST_IN_EXT = 1000148006, - VK_BLEND_OP_SRC_OUT_EXT = 1000148007, - VK_BLEND_OP_DST_OUT_EXT = 1000148008, - VK_BLEND_OP_SRC_ATOP_EXT = 1000148009, - VK_BLEND_OP_DST_ATOP_EXT = 1000148010, - VK_BLEND_OP_XOR_EXT = 1000148011, - VK_BLEND_OP_MULTIPLY_EXT = 1000148012, - VK_BLEND_OP_SCREEN_EXT = 1000148013, - VK_BLEND_OP_OVERLAY_EXT = 1000148014, - VK_BLEND_OP_DARKEN_EXT = 1000148015, - VK_BLEND_OP_LIGHTEN_EXT = 1000148016, - VK_BLEND_OP_COLORDODGE_EXT = 1000148017, - VK_BLEND_OP_COLORBURN_EXT = 1000148018, - VK_BLEND_OP_HARDLIGHT_EXT = 1000148019, - VK_BLEND_OP_SOFTLIGHT_EXT = 1000148020, - VK_BLEND_OP_DIFFERENCE_EXT = 1000148021, - VK_BLEND_OP_EXCLUSION_EXT = 1000148022, - VK_BLEND_OP_INVERT_EXT = 1000148023, - VK_BLEND_OP_INVERT_RGB_EXT = 1000148024, - VK_BLEND_OP_LINEARDODGE_EXT = 1000148025, - VK_BLEND_OP_LINEARBURN_EXT = 1000148026, - VK_BLEND_OP_VIVIDLIGHT_EXT = 1000148027, - VK_BLEND_OP_LINEARLIGHT_EXT = 1000148028, - VK_BLEND_OP_PINLIGHT_EXT = 1000148029, - VK_BLEND_OP_HARDMIX_EXT = 1000148030, - VK_BLEND_OP_HSL_HUE_EXT = 1000148031, - VK_BLEND_OP_HSL_SATURATION_EXT = 1000148032, - VK_BLEND_OP_HSL_COLOR_EXT = 1000148033, - VK_BLEND_OP_HSL_LUMINOSITY_EXT = 1000148034, - VK_BLEND_OP_PLUS_EXT = 1000148035, - VK_BLEND_OP_PLUS_CLAMPED_EXT = 1000148036, - VK_BLEND_OP_PLUS_CLAMPED_ALPHA_EXT = 1000148037, - VK_BLEND_OP_PLUS_DARKER_EXT = 1000148038, - VK_BLEND_OP_MINUS_EXT = 1000148039, - VK_BLEND_OP_MINUS_CLAMPED_EXT = 1000148040, - VK_BLEND_OP_CONTRAST_EXT = 1000148041, - VK_BLEND_OP_INVERT_OVG_EXT = 1000148042, - VK_BLEND_OP_RED_EXT = 1000148043, - VK_BLEND_OP_GREEN_EXT = 1000148044, - VK_BLEND_OP_BLUE_EXT = 1000148045, - VK_BLEND_OP_BEGIN_RANGE = VK_BLEND_OP_ADD, - VK_BLEND_OP_END_RANGE = VK_BLEND_OP_MAX, - VK_BLEND_OP_RANGE_SIZE = (VK_BLEND_OP_MAX - VK_BLEND_OP_ADD + 1), - VK_BLEND_OP_MAX_ENUM = 0x7FFFFFFF -} VkBlendOp; - -typedef enum VkDynamicState { - VK_DYNAMIC_STATE_VIEWPORT = 0, - VK_DYNAMIC_STATE_SCISSOR = 1, - VK_DYNAMIC_STATE_LINE_WIDTH = 2, - VK_DYNAMIC_STATE_DEPTH_BIAS = 3, - VK_DYNAMIC_STATE_BLEND_CONSTANTS = 4, - VK_DYNAMIC_STATE_DEPTH_BOUNDS = 5, - VK_DYNAMIC_STATE_STENCIL_COMPARE_MASK = 6, - VK_DYNAMIC_STATE_STENCIL_WRITE_MASK = 7, - VK_DYNAMIC_STATE_STENCIL_REFERENCE = 8, - VK_DYNAMIC_STATE_VIEWPORT_W_SCALING_NV = 1000087000, - VK_DYNAMIC_STATE_DISCARD_RECTANGLE_EXT = 1000099000, - VK_DYNAMIC_STATE_BEGIN_RANGE = VK_DYNAMIC_STATE_VIEWPORT, - VK_DYNAMIC_STATE_END_RANGE = VK_DYNAMIC_STATE_STENCIL_REFERENCE, - VK_DYNAMIC_STATE_RANGE_SIZE = (VK_DYNAMIC_STATE_STENCIL_REFERENCE - VK_DYNAMIC_STATE_VIEWPORT + 1), - VK_DYNAMIC_STATE_MAX_ENUM = 0x7FFFFFFF -} VkDynamicState; - -typedef enum VkFilter { - VK_FILTER_NEAREST = 0, - VK_FILTER_LINEAR = 1, - VK_FILTER_CUBIC_IMG = 1000015000, - VK_FILTER_BEGIN_RANGE = VK_FILTER_NEAREST, - VK_FILTER_END_RANGE = VK_FILTER_LINEAR, - VK_FILTER_RANGE_SIZE = (VK_FILTER_LINEAR - VK_FILTER_NEAREST + 1), - VK_FILTER_MAX_ENUM = 0x7FFFFFFF -} VkFilter; - -typedef enum VkSamplerMipmapMode { - VK_SAMPLER_MIPMAP_MODE_NEAREST = 0, - VK_SAMPLER_MIPMAP_MODE_LINEAR = 1, - VK_SAMPLER_MIPMAP_MODE_BEGIN_RANGE = VK_SAMPLER_MIPMAP_MODE_NEAREST, - VK_SAMPLER_MIPMAP_MODE_END_RANGE = VK_SAMPLER_MIPMAP_MODE_LINEAR, - VK_SAMPLER_MIPMAP_MODE_RANGE_SIZE = (VK_SAMPLER_MIPMAP_MODE_LINEAR - VK_SAMPLER_MIPMAP_MODE_NEAREST + 1), - VK_SAMPLER_MIPMAP_MODE_MAX_ENUM = 0x7FFFFFFF -} VkSamplerMipmapMode; - -typedef enum VkSamplerAddressMode { - VK_SAMPLER_ADDRESS_MODE_REPEAT = 0, - VK_SAMPLER_ADDRESS_MODE_MIRRORED_REPEAT = 1, - VK_SAMPLER_ADDRESS_MODE_CLAMP_TO_EDGE = 2, - VK_SAMPLER_ADDRESS_MODE_CLAMP_TO_BORDER = 3, - VK_SAMPLER_ADDRESS_MODE_MIRROR_CLAMP_TO_EDGE = 4, - VK_SAMPLER_ADDRESS_MODE_BEGIN_RANGE = VK_SAMPLER_ADDRESS_MODE_REPEAT, - VK_SAMPLER_ADDRESS_MODE_END_RANGE = VK_SAMPLER_ADDRESS_MODE_CLAMP_TO_BORDER, - VK_SAMPLER_ADDRESS_MODE_RANGE_SIZE = (VK_SAMPLER_ADDRESS_MODE_CLAMP_TO_BORDER - VK_SAMPLER_ADDRESS_MODE_REPEAT + 1), - VK_SAMPLER_ADDRESS_MODE_MAX_ENUM = 0x7FFFFFFF -} VkSamplerAddressMode; - -typedef enum VkBorderColor { - VK_BORDER_COLOR_FLOAT_TRANSPARENT_BLACK = 0, - VK_BORDER_COLOR_INT_TRANSPARENT_BLACK = 1, - VK_BORDER_COLOR_FLOAT_OPAQUE_BLACK = 2, - VK_BORDER_COLOR_INT_OPAQUE_BLACK = 3, - VK_BORDER_COLOR_FLOAT_OPAQUE_WHITE = 4, - VK_BORDER_COLOR_INT_OPAQUE_WHITE = 5, - VK_BORDER_COLOR_BEGIN_RANGE = VK_BORDER_COLOR_FLOAT_TRANSPARENT_BLACK, - VK_BORDER_COLOR_END_RANGE = VK_BORDER_COLOR_INT_OPAQUE_WHITE, - VK_BORDER_COLOR_RANGE_SIZE = (VK_BORDER_COLOR_INT_OPAQUE_WHITE - VK_BORDER_COLOR_FLOAT_TRANSPARENT_BLACK + 1), - VK_BORDER_COLOR_MAX_ENUM = 0x7FFFFFFF -} VkBorderColor; - -typedef enum VkDescriptorType { - VK_DESCRIPTOR_TYPE_SAMPLER = 0, - VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER = 1, - VK_DESCRIPTOR_TYPE_SAMPLED_IMAGE = 2, - VK_DESCRIPTOR_TYPE_STORAGE_IMAGE = 3, - VK_DESCRIPTOR_TYPE_UNIFORM_TEXEL_BUFFER = 4, - VK_DESCRIPTOR_TYPE_STORAGE_TEXEL_BUFFER = 5, - VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER = 6, - VK_DESCRIPTOR_TYPE_STORAGE_BUFFER = 7, - VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER_DYNAMIC = 8, - VK_DESCRIPTOR_TYPE_STORAGE_BUFFER_DYNAMIC = 9, - VK_DESCRIPTOR_TYPE_INPUT_ATTACHMENT = 10, - VK_DESCRIPTOR_TYPE_BEGIN_RANGE = VK_DESCRIPTOR_TYPE_SAMPLER, - VK_DESCRIPTOR_TYPE_END_RANGE = VK_DESCRIPTOR_TYPE_INPUT_ATTACHMENT, - VK_DESCRIPTOR_TYPE_RANGE_SIZE = (VK_DESCRIPTOR_TYPE_INPUT_ATTACHMENT - VK_DESCRIPTOR_TYPE_SAMPLER + 1), - VK_DESCRIPTOR_TYPE_MAX_ENUM = 0x7FFFFFFF -} VkDescriptorType; - -typedef enum VkAttachmentLoadOp { - VK_ATTACHMENT_LOAD_OP_LOAD = 0, - VK_ATTACHMENT_LOAD_OP_CLEAR = 1, - VK_ATTACHMENT_LOAD_OP_DONT_CARE = 2, - VK_ATTACHMENT_LOAD_OP_BEGIN_RANGE = VK_ATTACHMENT_LOAD_OP_LOAD, - VK_ATTACHMENT_LOAD_OP_END_RANGE = VK_ATTACHMENT_LOAD_OP_DONT_CARE, - VK_ATTACHMENT_LOAD_OP_RANGE_SIZE = (VK_ATTACHMENT_LOAD_OP_DONT_CARE - VK_ATTACHMENT_LOAD_OP_LOAD + 1), - VK_ATTACHMENT_LOAD_OP_MAX_ENUM = 0x7FFFFFFF -} VkAttachmentLoadOp; - -typedef enum VkAttachmentStoreOp { - VK_ATTACHMENT_STORE_OP_STORE = 0, - VK_ATTACHMENT_STORE_OP_DONT_CARE = 1, - VK_ATTACHMENT_STORE_OP_BEGIN_RANGE = VK_ATTACHMENT_STORE_OP_STORE, - VK_ATTACHMENT_STORE_OP_END_RANGE = VK_ATTACHMENT_STORE_OP_DONT_CARE, - VK_ATTACHMENT_STORE_OP_RANGE_SIZE = (VK_ATTACHMENT_STORE_OP_DONT_CARE - VK_ATTACHMENT_STORE_OP_STORE + 1), - VK_ATTACHMENT_STORE_OP_MAX_ENUM = 0x7FFFFFFF -} VkAttachmentStoreOp; - -typedef enum VkPipelineBindPoint { - VK_PIPELINE_BIND_POINT_GRAPHICS = 0, - VK_PIPELINE_BIND_POINT_COMPUTE = 1, - VK_PIPELINE_BIND_POINT_BEGIN_RANGE = VK_PIPELINE_BIND_POINT_GRAPHICS, - VK_PIPELINE_BIND_POINT_END_RANGE = VK_PIPELINE_BIND_POINT_COMPUTE, - VK_PIPELINE_BIND_POINT_RANGE_SIZE = (VK_PIPELINE_BIND_POINT_COMPUTE - VK_PIPELINE_BIND_POINT_GRAPHICS + 1), - VK_PIPELINE_BIND_POINT_MAX_ENUM = 0x7FFFFFFF -} VkPipelineBindPoint; - -typedef enum VkCommandBufferLevel { - VK_COMMAND_BUFFER_LEVEL_PRIMARY = 0, - VK_COMMAND_BUFFER_LEVEL_SECONDARY = 1, - VK_COMMAND_BUFFER_LEVEL_BEGIN_RANGE = VK_COMMAND_BUFFER_LEVEL_PRIMARY, - VK_COMMAND_BUFFER_LEVEL_END_RANGE = VK_COMMAND_BUFFER_LEVEL_SECONDARY, - VK_COMMAND_BUFFER_LEVEL_RANGE_SIZE = (VK_COMMAND_BUFFER_LEVEL_SECONDARY - VK_COMMAND_BUFFER_LEVEL_PRIMARY + 1), - VK_COMMAND_BUFFER_LEVEL_MAX_ENUM = 0x7FFFFFFF -} VkCommandBufferLevel; - -typedef enum VkIndexType { - VK_INDEX_TYPE_UINT16 = 0, - VK_INDEX_TYPE_UINT32 = 1, - VK_INDEX_TYPE_BEGIN_RANGE = VK_INDEX_TYPE_UINT16, - VK_INDEX_TYPE_END_RANGE = VK_INDEX_TYPE_UINT32, - VK_INDEX_TYPE_RANGE_SIZE = (VK_INDEX_TYPE_UINT32 - VK_INDEX_TYPE_UINT16 + 1), - VK_INDEX_TYPE_MAX_ENUM = 0x7FFFFFFF -} VkIndexType; - -typedef enum VkSubpassContents { - VK_SUBPASS_CONTENTS_INLINE = 0, - VK_SUBPASS_CONTENTS_SECONDARY_COMMAND_BUFFERS = 1, - VK_SUBPASS_CONTENTS_BEGIN_RANGE = VK_SUBPASS_CONTENTS_INLINE, - VK_SUBPASS_CONTENTS_END_RANGE = VK_SUBPASS_CONTENTS_SECONDARY_COMMAND_BUFFERS, - VK_SUBPASS_CONTENTS_RANGE_SIZE = (VK_SUBPASS_CONTENTS_SECONDARY_COMMAND_BUFFERS - VK_SUBPASS_CONTENTS_INLINE + 1), - VK_SUBPASS_CONTENTS_MAX_ENUM = 0x7FFFFFFF -} VkSubpassContents; - -typedef enum VkObjectType { - VK_OBJECT_TYPE_UNKNOWN = 0, - VK_OBJECT_TYPE_INSTANCE = 1, - VK_OBJECT_TYPE_PHYSICAL_DEVICE = 2, - VK_OBJECT_TYPE_DEVICE = 3, - VK_OBJECT_TYPE_QUEUE = 4, - VK_OBJECT_TYPE_SEMAPHORE = 5, - VK_OBJECT_TYPE_COMMAND_BUFFER = 6, - VK_OBJECT_TYPE_FENCE = 7, - VK_OBJECT_TYPE_DEVICE_MEMORY = 8, - VK_OBJECT_TYPE_BUFFER = 9, - VK_OBJECT_TYPE_IMAGE = 10, - VK_OBJECT_TYPE_EVENT = 11, - VK_OBJECT_TYPE_QUERY_POOL = 12, - VK_OBJECT_TYPE_BUFFER_VIEW = 13, - VK_OBJECT_TYPE_IMAGE_VIEW = 14, - VK_OBJECT_TYPE_SHADER_MODULE = 15, - VK_OBJECT_TYPE_PIPELINE_CACHE = 16, - VK_OBJECT_TYPE_PIPELINE_LAYOUT = 17, - VK_OBJECT_TYPE_RENDER_PASS = 18, - VK_OBJECT_TYPE_PIPELINE = 19, - VK_OBJECT_TYPE_DESCRIPTOR_SET_LAYOUT = 20, - VK_OBJECT_TYPE_SAMPLER = 21, - VK_OBJECT_TYPE_DESCRIPTOR_POOL = 22, - VK_OBJECT_TYPE_DESCRIPTOR_SET = 23, - VK_OBJECT_TYPE_FRAMEBUFFER = 24, - VK_OBJECT_TYPE_COMMAND_POOL = 25, - VK_OBJECT_TYPE_SURFACE_KHR = 1000000000, - VK_OBJECT_TYPE_SWAPCHAIN_KHR = 1000001000, - VK_OBJECT_TYPE_DISPLAY_KHR = 1000002000, - VK_OBJECT_TYPE_DISPLAY_MODE_KHR = 1000002001, - VK_OBJECT_TYPE_DEBUG_REPORT_CALLBACK_EXT = 1000011000, - VK_OBJECT_TYPE_DESCRIPTOR_UPDATE_TEMPLATE_KHR = 1000085000, - VK_OBJECT_TYPE_OBJECT_TABLE_NVX = 1000086000, - VK_OBJECT_TYPE_INDIRECT_COMMANDS_LAYOUT_NVX = 1000086001, - VK_OBJECT_TYPE_BEGIN_RANGE = VK_OBJECT_TYPE_UNKNOWN, - VK_OBJECT_TYPE_END_RANGE = VK_OBJECT_TYPE_COMMAND_POOL, - VK_OBJECT_TYPE_RANGE_SIZE = (VK_OBJECT_TYPE_COMMAND_POOL - VK_OBJECT_TYPE_UNKNOWN + 1), - VK_OBJECT_TYPE_MAX_ENUM = 0x7FFFFFFF -} VkObjectType; - -typedef VkFlags VkInstanceCreateFlags; - -typedef enum VkFormatFeatureFlagBits { - VK_FORMAT_FEATURE_SAMPLED_IMAGE_BIT = 0x00000001, - VK_FORMAT_FEATURE_STORAGE_IMAGE_BIT = 0x00000002, - VK_FORMAT_FEATURE_STORAGE_IMAGE_ATOMIC_BIT = 0x00000004, - VK_FORMAT_FEATURE_UNIFORM_TEXEL_BUFFER_BIT = 0x00000008, - VK_FORMAT_FEATURE_STORAGE_TEXEL_BUFFER_BIT = 0x00000010, - VK_FORMAT_FEATURE_STORAGE_TEXEL_BUFFER_ATOMIC_BIT = 0x00000020, - VK_FORMAT_FEATURE_VERTEX_BUFFER_BIT = 0x00000040, - VK_FORMAT_FEATURE_COLOR_ATTACHMENT_BIT = 0x00000080, - VK_FORMAT_FEATURE_COLOR_ATTACHMENT_BLEND_BIT = 0x00000100, - VK_FORMAT_FEATURE_DEPTH_STENCIL_ATTACHMENT_BIT = 0x00000200, - VK_FORMAT_FEATURE_BLIT_SRC_BIT = 0x00000400, - VK_FORMAT_FEATURE_BLIT_DST_BIT = 0x00000800, - VK_FORMAT_FEATURE_SAMPLED_IMAGE_FILTER_LINEAR_BIT = 0x00001000, - VK_FORMAT_FEATURE_SAMPLED_IMAGE_FILTER_CUBIC_BIT_IMG = 0x00002000, - VK_FORMAT_FEATURE_TRANSFER_SRC_BIT_KHR = 0x00004000, - VK_FORMAT_FEATURE_TRANSFER_DST_BIT_KHR = 0x00008000, - VK_FORMAT_FEATURE_SAMPLED_IMAGE_FILTER_MINMAX_BIT_EXT = 0x00010000, - VK_FORMAT_FEATURE_FLAG_BITS_MAX_ENUM = 0x7FFFFFFF -} VkFormatFeatureFlagBits; -typedef VkFlags VkFormatFeatureFlags; - -typedef enum VkImageUsageFlagBits { - VK_IMAGE_USAGE_TRANSFER_SRC_BIT = 0x00000001, - VK_IMAGE_USAGE_TRANSFER_DST_BIT = 0x00000002, - VK_IMAGE_USAGE_SAMPLED_BIT = 0x00000004, - VK_IMAGE_USAGE_STORAGE_BIT = 0x00000008, - VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT = 0x00000010, - VK_IMAGE_USAGE_DEPTH_STENCIL_ATTACHMENT_BIT = 0x00000020, - VK_IMAGE_USAGE_TRANSIENT_ATTACHMENT_BIT = 0x00000040, - VK_IMAGE_USAGE_INPUT_ATTACHMENT_BIT = 0x00000080, - VK_IMAGE_USAGE_FLAG_BITS_MAX_ENUM = 0x7FFFFFFF -} VkImageUsageFlagBits; -typedef VkFlags VkImageUsageFlags; - -typedef enum VkImageCreateFlagBits { - VK_IMAGE_CREATE_SPARSE_BINDING_BIT = 0x00000001, - VK_IMAGE_CREATE_SPARSE_RESIDENCY_BIT = 0x00000002, - VK_IMAGE_CREATE_SPARSE_ALIASED_BIT = 0x00000004, - VK_IMAGE_CREATE_MUTABLE_FORMAT_BIT = 0x00000008, - VK_IMAGE_CREATE_CUBE_COMPATIBLE_BIT = 0x00000010, - VK_IMAGE_CREATE_BIND_SFR_BIT_KHX = 0x00000040, - VK_IMAGE_CREATE_2D_ARRAY_COMPATIBLE_BIT_KHR = 0x00000020, - VK_IMAGE_CREATE_FLAG_BITS_MAX_ENUM = 0x7FFFFFFF -} VkImageCreateFlagBits; -typedef VkFlags VkImageCreateFlags; - -typedef enum VkSampleCountFlagBits { - VK_SAMPLE_COUNT_1_BIT = 0x00000001, - VK_SAMPLE_COUNT_2_BIT = 0x00000002, - VK_SAMPLE_COUNT_4_BIT = 0x00000004, - VK_SAMPLE_COUNT_8_BIT = 0x00000008, - VK_SAMPLE_COUNT_16_BIT = 0x00000010, - VK_SAMPLE_COUNT_32_BIT = 0x00000020, - VK_SAMPLE_COUNT_64_BIT = 0x00000040, - VK_SAMPLE_COUNT_FLAG_BITS_MAX_ENUM = 0x7FFFFFFF -} VkSampleCountFlagBits; -typedef VkFlags VkSampleCountFlags; - -typedef enum VkQueueFlagBits { - VK_QUEUE_GRAPHICS_BIT = 0x00000001, - VK_QUEUE_COMPUTE_BIT = 0x00000002, - VK_QUEUE_TRANSFER_BIT = 0x00000004, - VK_QUEUE_SPARSE_BINDING_BIT = 0x00000008, - VK_QUEUE_FLAG_BITS_MAX_ENUM = 0x7FFFFFFF -} VkQueueFlagBits; -typedef VkFlags VkQueueFlags; - -typedef enum VkMemoryPropertyFlagBits { - VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT = 0x00000001, - VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT = 0x00000002, - VK_MEMORY_PROPERTY_HOST_COHERENT_BIT = 0x00000004, - VK_MEMORY_PROPERTY_HOST_CACHED_BIT = 0x00000008, - VK_MEMORY_PROPERTY_LAZILY_ALLOCATED_BIT = 0x00000010, - VK_MEMORY_PROPERTY_FLAG_BITS_MAX_ENUM = 0x7FFFFFFF -} VkMemoryPropertyFlagBits; -typedef VkFlags VkMemoryPropertyFlags; - -typedef enum VkMemoryHeapFlagBits { - VK_MEMORY_HEAP_DEVICE_LOCAL_BIT = 0x00000001, - VK_MEMORY_HEAP_MULTI_INSTANCE_BIT_KHX = 0x00000002, - VK_MEMORY_HEAP_FLAG_BITS_MAX_ENUM = 0x7FFFFFFF -} VkMemoryHeapFlagBits; -typedef VkFlags VkMemoryHeapFlags; -typedef VkFlags VkDeviceCreateFlags; -typedef VkFlags VkDeviceQueueCreateFlags; - -typedef enum VkPipelineStageFlagBits { - VK_PIPELINE_STAGE_TOP_OF_PIPE_BIT = 0x00000001, - VK_PIPELINE_STAGE_DRAW_INDIRECT_BIT = 0x00000002, - VK_PIPELINE_STAGE_VERTEX_INPUT_BIT = 0x00000004, - VK_PIPELINE_STAGE_VERTEX_SHADER_BIT = 0x00000008, - VK_PIPELINE_STAGE_TESSELLATION_CONTROL_SHADER_BIT = 0x00000010, - VK_PIPELINE_STAGE_TESSELLATION_EVALUATION_SHADER_BIT = 0x00000020, - VK_PIPELINE_STAGE_GEOMETRY_SHADER_BIT = 0x00000040, - VK_PIPELINE_STAGE_FRAGMENT_SHADER_BIT = 0x00000080, - VK_PIPELINE_STAGE_EARLY_FRAGMENT_TESTS_BIT = 0x00000100, - VK_PIPELINE_STAGE_LATE_FRAGMENT_TESTS_BIT = 0x00000200, - VK_PIPELINE_STAGE_COLOR_ATTACHMENT_OUTPUT_BIT = 0x00000400, - VK_PIPELINE_STAGE_COMPUTE_SHADER_BIT = 0x00000800, - VK_PIPELINE_STAGE_TRANSFER_BIT = 0x00001000, - VK_PIPELINE_STAGE_BOTTOM_OF_PIPE_BIT = 0x00002000, - VK_PIPELINE_STAGE_HOST_BIT = 0x00004000, - VK_PIPELINE_STAGE_ALL_GRAPHICS_BIT = 0x00008000, - VK_PIPELINE_STAGE_ALL_COMMANDS_BIT = 0x00010000, - VK_PIPELINE_STAGE_COMMAND_PROCESS_BIT_NVX = 0x00020000, - VK_PIPELINE_STAGE_FLAG_BITS_MAX_ENUM = 0x7FFFFFFF -} VkPipelineStageFlagBits; -typedef VkFlags VkPipelineStageFlags; -typedef VkFlags VkMemoryMapFlags; - -typedef enum VkImageAspectFlagBits { - VK_IMAGE_ASPECT_COLOR_BIT = 0x00000001, - VK_IMAGE_ASPECT_DEPTH_BIT = 0x00000002, - VK_IMAGE_ASPECT_STENCIL_BIT = 0x00000004, - VK_IMAGE_ASPECT_METADATA_BIT = 0x00000008, - VK_IMAGE_ASPECT_FLAG_BITS_MAX_ENUM = 0x7FFFFFFF -} VkImageAspectFlagBits; -typedef VkFlags VkImageAspectFlags; - -typedef enum VkSparseImageFormatFlagBits { - VK_SPARSE_IMAGE_FORMAT_SINGLE_MIPTAIL_BIT = 0x00000001, - VK_SPARSE_IMAGE_FORMAT_ALIGNED_MIP_SIZE_BIT = 0x00000002, - VK_SPARSE_IMAGE_FORMAT_NONSTANDARD_BLOCK_SIZE_BIT = 0x00000004, - VK_SPARSE_IMAGE_FORMAT_FLAG_BITS_MAX_ENUM = 0x7FFFFFFF -} VkSparseImageFormatFlagBits; -typedef VkFlags VkSparseImageFormatFlags; - -typedef enum VkSparseMemoryBindFlagBits { - VK_SPARSE_MEMORY_BIND_METADATA_BIT = 0x00000001, - VK_SPARSE_MEMORY_BIND_FLAG_BITS_MAX_ENUM = 0x7FFFFFFF -} VkSparseMemoryBindFlagBits; -typedef VkFlags VkSparseMemoryBindFlags; - -typedef enum VkFenceCreateFlagBits { - VK_FENCE_CREATE_SIGNALED_BIT = 0x00000001, - VK_FENCE_CREATE_FLAG_BITS_MAX_ENUM = 0x7FFFFFFF -} VkFenceCreateFlagBits; -typedef VkFlags VkFenceCreateFlags; -typedef VkFlags VkSemaphoreCreateFlags; -typedef VkFlags VkEventCreateFlags; -typedef VkFlags VkQueryPoolCreateFlags; - -typedef enum VkQueryPipelineStatisticFlagBits { - VK_QUERY_PIPELINE_STATISTIC_INPUT_ASSEMBLY_VERTICES_BIT = 0x00000001, - VK_QUERY_PIPELINE_STATISTIC_INPUT_ASSEMBLY_PRIMITIVES_BIT = 0x00000002, - VK_QUERY_PIPELINE_STATISTIC_VERTEX_SHADER_INVOCATIONS_BIT = 0x00000004, - VK_QUERY_PIPELINE_STATISTIC_GEOMETRY_SHADER_INVOCATIONS_BIT = 0x00000008, - VK_QUERY_PIPELINE_STATISTIC_GEOMETRY_SHADER_PRIMITIVES_BIT = 0x00000010, - VK_QUERY_PIPELINE_STATISTIC_CLIPPING_INVOCATIONS_BIT = 0x00000020, - VK_QUERY_PIPELINE_STATISTIC_CLIPPING_PRIMITIVES_BIT = 0x00000040, - VK_QUERY_PIPELINE_STATISTIC_FRAGMENT_SHADER_INVOCATIONS_BIT = 0x00000080, - VK_QUERY_PIPELINE_STATISTIC_TESSELLATION_CONTROL_SHADER_PATCHES_BIT = 0x00000100, - VK_QUERY_PIPELINE_STATISTIC_TESSELLATION_EVALUATION_SHADER_INVOCATIONS_BIT = 0x00000200, - VK_QUERY_PIPELINE_STATISTIC_COMPUTE_SHADER_INVOCATIONS_BIT = 0x00000400, - VK_QUERY_PIPELINE_STATISTIC_FLAG_BITS_MAX_ENUM = 0x7FFFFFFF -} VkQueryPipelineStatisticFlagBits; -typedef VkFlags VkQueryPipelineStatisticFlags; - -typedef enum VkQueryResultFlagBits { - VK_QUERY_RESULT_64_BIT = 0x00000001, - VK_QUERY_RESULT_WAIT_BIT = 0x00000002, - VK_QUERY_RESULT_WITH_AVAILABILITY_BIT = 0x00000004, - VK_QUERY_RESULT_PARTIAL_BIT = 0x00000008, - VK_QUERY_RESULT_FLAG_BITS_MAX_ENUM = 0x7FFFFFFF -} VkQueryResultFlagBits; -typedef VkFlags VkQueryResultFlags; - -typedef enum VkBufferCreateFlagBits { - VK_BUFFER_CREATE_SPARSE_BINDING_BIT = 0x00000001, - VK_BUFFER_CREATE_SPARSE_RESIDENCY_BIT = 0x00000002, - VK_BUFFER_CREATE_SPARSE_ALIASED_BIT = 0x00000004, - VK_BUFFER_CREATE_FLAG_BITS_MAX_ENUM = 0x7FFFFFFF -} VkBufferCreateFlagBits; -typedef VkFlags VkBufferCreateFlags; - -typedef enum VkBufferUsageFlagBits { - VK_BUFFER_USAGE_TRANSFER_SRC_BIT = 0x00000001, - VK_BUFFER_USAGE_TRANSFER_DST_BIT = 0x00000002, - VK_BUFFER_USAGE_UNIFORM_TEXEL_BUFFER_BIT = 0x00000004, - VK_BUFFER_USAGE_STORAGE_TEXEL_BUFFER_BIT = 0x00000008, - VK_BUFFER_USAGE_UNIFORM_BUFFER_BIT = 0x00000010, - VK_BUFFER_USAGE_STORAGE_BUFFER_BIT = 0x00000020, - VK_BUFFER_USAGE_INDEX_BUFFER_BIT = 0x00000040, - VK_BUFFER_USAGE_VERTEX_BUFFER_BIT = 0x00000080, - VK_BUFFER_USAGE_INDIRECT_BUFFER_BIT = 0x00000100, - VK_BUFFER_USAGE_FLAG_BITS_MAX_ENUM = 0x7FFFFFFF -} VkBufferUsageFlagBits; -typedef VkFlags VkBufferUsageFlags; -typedef VkFlags VkBufferViewCreateFlags; -typedef VkFlags VkImageViewCreateFlags; -typedef VkFlags VkShaderModuleCreateFlags; -typedef VkFlags VkPipelineCacheCreateFlags; - -typedef enum VkPipelineCreateFlagBits { - VK_PIPELINE_CREATE_DISABLE_OPTIMIZATION_BIT = 0x00000001, - VK_PIPELINE_CREATE_ALLOW_DERIVATIVES_BIT = 0x00000002, - VK_PIPELINE_CREATE_DERIVATIVE_BIT = 0x00000004, - VK_PIPELINE_CREATE_VIEW_INDEX_FROM_DEVICE_INDEX_BIT_KHX = 0x00000008, - VK_PIPELINE_CREATE_DISPATCH_BASE_KHX = 0x00000010, - VK_PIPELINE_CREATE_FLAG_BITS_MAX_ENUM = 0x7FFFFFFF -} VkPipelineCreateFlagBits; -typedef VkFlags VkPipelineCreateFlags; -typedef VkFlags VkPipelineShaderStageCreateFlags; - -typedef enum VkShaderStageFlagBits { - VK_SHADER_STAGE_VERTEX_BIT = 0x00000001, - VK_SHADER_STAGE_TESSELLATION_CONTROL_BIT = 0x00000002, - VK_SHADER_STAGE_TESSELLATION_EVALUATION_BIT = 0x00000004, - VK_SHADER_STAGE_GEOMETRY_BIT = 0x00000008, - VK_SHADER_STAGE_FRAGMENT_BIT = 0x00000010, - VK_SHADER_STAGE_COMPUTE_BIT = 0x00000020, - VK_SHADER_STAGE_ALL_GRAPHICS = 0x0000001F, - VK_SHADER_STAGE_ALL = 0x7FFFFFFF, - VK_SHADER_STAGE_FLAG_BITS_MAX_ENUM = 0x7FFFFFFF -} VkShaderStageFlagBits; -typedef VkFlags VkPipelineVertexInputStateCreateFlags; -typedef VkFlags VkPipelineInputAssemblyStateCreateFlags; -typedef VkFlags VkPipelineTessellationStateCreateFlags; -typedef VkFlags VkPipelineViewportStateCreateFlags; -typedef VkFlags VkPipelineRasterizationStateCreateFlags; - -typedef enum VkCullModeFlagBits { - VK_CULL_MODE_NONE = 0, - VK_CULL_MODE_FRONT_BIT = 0x00000001, - VK_CULL_MODE_BACK_BIT = 0x00000002, - VK_CULL_MODE_FRONT_AND_BACK = 0x00000003, - VK_CULL_MODE_FLAG_BITS_MAX_ENUM = 0x7FFFFFFF -} VkCullModeFlagBits; -typedef VkFlags VkCullModeFlags; -typedef VkFlags VkPipelineMultisampleStateCreateFlags; -typedef VkFlags VkPipelineDepthStencilStateCreateFlags; -typedef VkFlags VkPipelineColorBlendStateCreateFlags; - -typedef enum VkColorComponentFlagBits { - VK_COLOR_COMPONENT_R_BIT = 0x00000001, - VK_COLOR_COMPONENT_G_BIT = 0x00000002, - VK_COLOR_COMPONENT_B_BIT = 0x00000004, - VK_COLOR_COMPONENT_A_BIT = 0x00000008, - VK_COLOR_COMPONENT_FLAG_BITS_MAX_ENUM = 0x7FFFFFFF -} VkColorComponentFlagBits; -typedef VkFlags VkColorComponentFlags; -typedef VkFlags VkPipelineDynamicStateCreateFlags; -typedef VkFlags VkPipelineLayoutCreateFlags; -typedef VkFlags VkShaderStageFlags; -typedef VkFlags VkSamplerCreateFlags; - -typedef enum VkDescriptorSetLayoutCreateFlagBits { - VK_DESCRIPTOR_SET_LAYOUT_CREATE_PUSH_DESCRIPTOR_BIT_KHR = 0x00000001, - VK_DESCRIPTOR_SET_LAYOUT_CREATE_FLAG_BITS_MAX_ENUM = 0x7FFFFFFF -} VkDescriptorSetLayoutCreateFlagBits; -typedef VkFlags VkDescriptorSetLayoutCreateFlags; - -typedef enum VkDescriptorPoolCreateFlagBits { - VK_DESCRIPTOR_POOL_CREATE_FREE_DESCRIPTOR_SET_BIT = 0x00000001, - VK_DESCRIPTOR_POOL_CREATE_FLAG_BITS_MAX_ENUM = 0x7FFFFFFF -} VkDescriptorPoolCreateFlagBits; -typedef VkFlags VkDescriptorPoolCreateFlags; -typedef VkFlags VkDescriptorPoolResetFlags; -typedef VkFlags VkFramebufferCreateFlags; -typedef VkFlags VkRenderPassCreateFlags; - -typedef enum VkAttachmentDescriptionFlagBits { - VK_ATTACHMENT_DESCRIPTION_MAY_ALIAS_BIT = 0x00000001, - VK_ATTACHMENT_DESCRIPTION_FLAG_BITS_MAX_ENUM = 0x7FFFFFFF -} VkAttachmentDescriptionFlagBits; -typedef VkFlags VkAttachmentDescriptionFlags; - -typedef enum VkSubpassDescriptionFlagBits { - VK_SUBPASS_DESCRIPTION_PER_VIEW_ATTRIBUTES_BIT_NVX = 0x00000001, - VK_SUBPASS_DESCRIPTION_PER_VIEW_POSITION_X_ONLY_BIT_NVX = 0x00000002, - VK_SUBPASS_DESCRIPTION_FLAG_BITS_MAX_ENUM = 0x7FFFFFFF -} VkSubpassDescriptionFlagBits; -typedef VkFlags VkSubpassDescriptionFlags; - -typedef enum VkAccessFlagBits { - VK_ACCESS_INDIRECT_COMMAND_READ_BIT = 0x00000001, - VK_ACCESS_INDEX_READ_BIT = 0x00000002, - VK_ACCESS_VERTEX_ATTRIBUTE_READ_BIT = 0x00000004, - VK_ACCESS_UNIFORM_READ_BIT = 0x00000008, - VK_ACCESS_INPUT_ATTACHMENT_READ_BIT = 0x00000010, - VK_ACCESS_SHADER_READ_BIT = 0x00000020, - VK_ACCESS_SHADER_WRITE_BIT = 0x00000040, - VK_ACCESS_COLOR_ATTACHMENT_READ_BIT = 0x00000080, - VK_ACCESS_COLOR_ATTACHMENT_WRITE_BIT = 0x00000100, - VK_ACCESS_DEPTH_STENCIL_ATTACHMENT_READ_BIT = 0x00000200, - VK_ACCESS_DEPTH_STENCIL_ATTACHMENT_WRITE_BIT = 0x00000400, - VK_ACCESS_TRANSFER_READ_BIT = 0x00000800, - VK_ACCESS_TRANSFER_WRITE_BIT = 0x00001000, - VK_ACCESS_HOST_READ_BIT = 0x00002000, - VK_ACCESS_HOST_WRITE_BIT = 0x00004000, - VK_ACCESS_MEMORY_READ_BIT = 0x00008000, - VK_ACCESS_MEMORY_WRITE_BIT = 0x00010000, - VK_ACCESS_COMMAND_PROCESS_READ_BIT_NVX = 0x00020000, - VK_ACCESS_COMMAND_PROCESS_WRITE_BIT_NVX = 0x00040000, - VK_ACCESS_COLOR_ATTACHMENT_READ_NONCOHERENT_BIT_EXT = 0x00080000, - VK_ACCESS_FLAG_BITS_MAX_ENUM = 0x7FFFFFFF -} VkAccessFlagBits; -typedef VkFlags VkAccessFlags; - -typedef enum VkDependencyFlagBits { - VK_DEPENDENCY_BY_REGION_BIT = 0x00000001, - VK_DEPENDENCY_VIEW_LOCAL_BIT_KHX = 0x00000002, - VK_DEPENDENCY_DEVICE_GROUP_BIT_KHX = 0x00000004, - VK_DEPENDENCY_FLAG_BITS_MAX_ENUM = 0x7FFFFFFF -} VkDependencyFlagBits; -typedef VkFlags VkDependencyFlags; - -typedef enum VkCommandPoolCreateFlagBits { - VK_COMMAND_POOL_CREATE_TRANSIENT_BIT = 0x00000001, - VK_COMMAND_POOL_CREATE_RESET_COMMAND_BUFFER_BIT = 0x00000002, - VK_COMMAND_POOL_CREATE_FLAG_BITS_MAX_ENUM = 0x7FFFFFFF -} VkCommandPoolCreateFlagBits; -typedef VkFlags VkCommandPoolCreateFlags; - -typedef enum VkCommandPoolResetFlagBits { - VK_COMMAND_POOL_RESET_RELEASE_RESOURCES_BIT = 0x00000001, - VK_COMMAND_POOL_RESET_FLAG_BITS_MAX_ENUM = 0x7FFFFFFF -} VkCommandPoolResetFlagBits; -typedef VkFlags VkCommandPoolResetFlags; - -typedef enum VkCommandBufferUsageFlagBits { - VK_COMMAND_BUFFER_USAGE_ONE_TIME_SUBMIT_BIT = 0x00000001, - VK_COMMAND_BUFFER_USAGE_RENDER_PASS_CONTINUE_BIT = 0x00000002, - VK_COMMAND_BUFFER_USAGE_SIMULTANEOUS_USE_BIT = 0x00000004, - VK_COMMAND_BUFFER_USAGE_FLAG_BITS_MAX_ENUM = 0x7FFFFFFF -} VkCommandBufferUsageFlagBits; -typedef VkFlags VkCommandBufferUsageFlags; - -typedef enum VkQueryControlFlagBits { - VK_QUERY_CONTROL_PRECISE_BIT = 0x00000001, - VK_QUERY_CONTROL_FLAG_BITS_MAX_ENUM = 0x7FFFFFFF -} VkQueryControlFlagBits; -typedef VkFlags VkQueryControlFlags; - -typedef enum VkCommandBufferResetFlagBits { - VK_COMMAND_BUFFER_RESET_RELEASE_RESOURCES_BIT = 0x00000001, - VK_COMMAND_BUFFER_RESET_FLAG_BITS_MAX_ENUM = 0x7FFFFFFF -} VkCommandBufferResetFlagBits; -typedef VkFlags VkCommandBufferResetFlags; - -typedef enum VkStencilFaceFlagBits { - VK_STENCIL_FACE_FRONT_BIT = 0x00000001, - VK_STENCIL_FACE_BACK_BIT = 0x00000002, - VK_STENCIL_FRONT_AND_BACK = 0x00000003, - VK_STENCIL_FACE_FLAG_BITS_MAX_ENUM = 0x7FFFFFFF -} VkStencilFaceFlagBits; -typedef VkFlags VkStencilFaceFlags; - -typedef struct VkApplicationInfo { - VkStructureType sType; - const void* pNext; - const char* pApplicationName; - uint32_t applicationVersion; - const char* pEngineName; - uint32_t engineVersion; - uint32_t apiVersion; -} VkApplicationInfo; - -typedef struct VkInstanceCreateInfo { - VkStructureType sType; - const void* pNext; - VkInstanceCreateFlags flags; - const VkApplicationInfo* pApplicationInfo; - uint32_t enabledLayerCount; - const char* const* ppEnabledLayerNames; - uint32_t enabledExtensionCount; - const char* const* ppEnabledExtensionNames; -} VkInstanceCreateInfo; - -typedef void* (VKAPI_PTR *PFN_vkAllocationFunction)( - void* pUserData, - size_t size, - size_t alignment, - VkSystemAllocationScope allocationScope); - -typedef void* (VKAPI_PTR *PFN_vkReallocationFunction)( - void* pUserData, - void* pOriginal, - size_t size, - size_t alignment, - VkSystemAllocationScope allocationScope); - -typedef void (VKAPI_PTR *PFN_vkFreeFunction)( - void* pUserData, - void* pMemory); - -typedef void (VKAPI_PTR *PFN_vkInternalAllocationNotification)( - void* pUserData, - size_t size, - VkInternalAllocationType allocationType, - VkSystemAllocationScope allocationScope); - -typedef void (VKAPI_PTR *PFN_vkInternalFreeNotification)( - void* pUserData, - size_t size, - VkInternalAllocationType allocationType, - VkSystemAllocationScope allocationScope); - -typedef struct VkAllocationCallbacks { - void* pUserData; - PFN_vkAllocationFunction pfnAllocation; - PFN_vkReallocationFunction pfnReallocation; - PFN_vkFreeFunction pfnFree; - PFN_vkInternalAllocationNotification pfnInternalAllocation; - PFN_vkInternalFreeNotification pfnInternalFree; -} VkAllocationCallbacks; - -typedef struct VkPhysicalDeviceFeatures { - VkBool32 robustBufferAccess; - VkBool32 fullDrawIndexUint32; - VkBool32 imageCubeArray; - VkBool32 independentBlend; - VkBool32 geometryShader; - VkBool32 tessellationShader; - VkBool32 sampleRateShading; - VkBool32 dualSrcBlend; - VkBool32 logicOp; - VkBool32 multiDrawIndirect; - VkBool32 drawIndirectFirstInstance; - VkBool32 depthClamp; - VkBool32 depthBiasClamp; - VkBool32 fillModeNonSolid; - VkBool32 depthBounds; - VkBool32 wideLines; - VkBool32 largePoints; - VkBool32 alphaToOne; - VkBool32 multiViewport; - VkBool32 samplerAnisotropy; - VkBool32 textureCompressionETC2; - VkBool32 textureCompressionASTC_LDR; - VkBool32 textureCompressionBC; - VkBool32 occlusionQueryPrecise; - VkBool32 pipelineStatisticsQuery; - VkBool32 vertexPipelineStoresAndAtomics; - VkBool32 fragmentStoresAndAtomics; - VkBool32 shaderTessellationAndGeometryPointSize; - VkBool32 shaderImageGatherExtended; - VkBool32 shaderStorageImageExtendedFormats; - VkBool32 shaderStorageImageMultisample; - VkBool32 shaderStorageImageReadWithoutFormat; - VkBool32 shaderStorageImageWriteWithoutFormat; - VkBool32 shaderUniformBufferArrayDynamicIndexing; - VkBool32 shaderSampledImageArrayDynamicIndexing; - VkBool32 shaderStorageBufferArrayDynamicIndexing; - VkBool32 shaderStorageImageArrayDynamicIndexing; - VkBool32 shaderClipDistance; - VkBool32 shaderCullDistance; - VkBool32 shaderFloat64; - VkBool32 shaderInt64; - VkBool32 shaderInt16; - VkBool32 shaderResourceResidency; - VkBool32 shaderResourceMinLod; - VkBool32 sparseBinding; - VkBool32 sparseResidencyBuffer; - VkBool32 sparseResidencyImage2D; - VkBool32 sparseResidencyImage3D; - VkBool32 sparseResidency2Samples; - VkBool32 sparseResidency4Samples; - VkBool32 sparseResidency8Samples; - VkBool32 sparseResidency16Samples; - VkBool32 sparseResidencyAliased; - VkBool32 variableMultisampleRate; - VkBool32 inheritedQueries; -} VkPhysicalDeviceFeatures; - -typedef struct VkFormatProperties { - VkFormatFeatureFlags linearTilingFeatures; - VkFormatFeatureFlags optimalTilingFeatures; - VkFormatFeatureFlags bufferFeatures; -} VkFormatProperties; - -typedef struct VkExtent3D { - uint32_t width; - uint32_t height; - uint32_t depth; -} VkExtent3D; - -typedef struct VkImageFormatProperties { - VkExtent3D maxExtent; - uint32_t maxMipLevels; - uint32_t maxArrayLayers; - VkSampleCountFlags sampleCounts; - VkDeviceSize maxResourceSize; -} VkImageFormatProperties; - -typedef struct VkPhysicalDeviceLimits { - uint32_t maxImageDimension1D; - uint32_t maxImageDimension2D; - uint32_t maxImageDimension3D; - uint32_t maxImageDimensionCube; - uint32_t maxImageArrayLayers; - uint32_t maxTexelBufferElements; - uint32_t maxUniformBufferRange; - uint32_t maxStorageBufferRange; - uint32_t maxPushConstantsSize; - uint32_t maxMemoryAllocationCount; - uint32_t maxSamplerAllocationCount; - VkDeviceSize bufferImageGranularity; - VkDeviceSize sparseAddressSpaceSize; - uint32_t maxBoundDescriptorSets; - uint32_t maxPerStageDescriptorSamplers; - uint32_t maxPerStageDescriptorUniformBuffers; - uint32_t maxPerStageDescriptorStorageBuffers; - uint32_t maxPerStageDescriptorSampledImages; - uint32_t maxPerStageDescriptorStorageImages; - uint32_t maxPerStageDescriptorInputAttachments; - uint32_t maxPerStageResources; - uint32_t maxDescriptorSetSamplers; - uint32_t maxDescriptorSetUniformBuffers; - uint32_t maxDescriptorSetUniformBuffersDynamic; - uint32_t maxDescriptorSetStorageBuffers; - uint32_t maxDescriptorSetStorageBuffersDynamic; - uint32_t maxDescriptorSetSampledImages; - uint32_t maxDescriptorSetStorageImages; - uint32_t maxDescriptorSetInputAttachments; - uint32_t maxVertexInputAttributes; - uint32_t maxVertexInputBindings; - uint32_t maxVertexInputAttributeOffset; - uint32_t maxVertexInputBindingStride; - uint32_t maxVertexOutputComponents; - uint32_t maxTessellationGenerationLevel; - uint32_t maxTessellationPatchSize; - uint32_t maxTessellationControlPerVertexInputComponents; - uint32_t maxTessellationControlPerVertexOutputComponents; - uint32_t maxTessellationControlPerPatchOutputComponents; - uint32_t maxTessellationControlTotalOutputComponents; - uint32_t maxTessellationEvaluationInputComponents; - uint32_t maxTessellationEvaluationOutputComponents; - uint32_t maxGeometryShaderInvocations; - uint32_t maxGeometryInputComponents; - uint32_t maxGeometryOutputComponents; - uint32_t maxGeometryOutputVertices; - uint32_t maxGeometryTotalOutputComponents; - uint32_t maxFragmentInputComponents; - uint32_t maxFragmentOutputAttachments; - uint32_t maxFragmentDualSrcAttachments; - uint32_t maxFragmentCombinedOutputResources; - uint32_t maxComputeSharedMemorySize; - uint32_t maxComputeWorkGroupCount[3]; - uint32_t maxComputeWorkGroupInvocations; - uint32_t maxComputeWorkGroupSize[3]; - uint32_t subPixelPrecisionBits; - uint32_t subTexelPrecisionBits; - uint32_t mipmapPrecisionBits; - uint32_t maxDrawIndexedIndexValue; - uint32_t maxDrawIndirectCount; - float maxSamplerLodBias; - float maxSamplerAnisotropy; - uint32_t maxViewports; - uint32_t maxViewportDimensions[2]; - float viewportBoundsRange[2]; - uint32_t viewportSubPixelBits; - size_t minMemoryMapAlignment; - VkDeviceSize minTexelBufferOffsetAlignment; - VkDeviceSize minUniformBufferOffsetAlignment; - VkDeviceSize minStorageBufferOffsetAlignment; - int32_t minTexelOffset; - uint32_t maxTexelOffset; - int32_t minTexelGatherOffset; - uint32_t maxTexelGatherOffset; - float minInterpolationOffset; - float maxInterpolationOffset; - uint32_t subPixelInterpolationOffsetBits; - uint32_t maxFramebufferWidth; - uint32_t maxFramebufferHeight; - uint32_t maxFramebufferLayers; - VkSampleCountFlags framebufferColorSampleCounts; - VkSampleCountFlags framebufferDepthSampleCounts; - VkSampleCountFlags framebufferStencilSampleCounts; - VkSampleCountFlags framebufferNoAttachmentsSampleCounts; - uint32_t maxColorAttachments; - VkSampleCountFlags sampledImageColorSampleCounts; - VkSampleCountFlags sampledImageIntegerSampleCounts; - VkSampleCountFlags sampledImageDepthSampleCounts; - VkSampleCountFlags sampledImageStencilSampleCounts; - VkSampleCountFlags storageImageSampleCounts; - uint32_t maxSampleMaskWords; - VkBool32 timestampComputeAndGraphics; - float timestampPeriod; - uint32_t maxClipDistances; - uint32_t maxCullDistances; - uint32_t maxCombinedClipAndCullDistances; - uint32_t discreteQueuePriorities; - float pointSizeRange[2]; - float lineWidthRange[2]; - float pointSizeGranularity; - float lineWidthGranularity; - VkBool32 strictLines; - VkBool32 standardSampleLocations; - VkDeviceSize optimalBufferCopyOffsetAlignment; - VkDeviceSize optimalBufferCopyRowPitchAlignment; - VkDeviceSize nonCoherentAtomSize; -} VkPhysicalDeviceLimits; - -typedef struct VkPhysicalDeviceSparseProperties { - VkBool32 residencyStandard2DBlockShape; - VkBool32 residencyStandard2DMultisampleBlockShape; - VkBool32 residencyStandard3DBlockShape; - VkBool32 residencyAlignedMipSize; - VkBool32 residencyNonResidentStrict; -} VkPhysicalDeviceSparseProperties; - -typedef struct VkPhysicalDeviceProperties { - uint32_t apiVersion; - uint32_t driverVersion; - uint32_t vendorID; - uint32_t deviceID; - VkPhysicalDeviceType deviceType; - char deviceName[VK_MAX_PHYSICAL_DEVICE_NAME_SIZE]; - uint8_t pipelineCacheUUID[VK_UUID_SIZE]; - VkPhysicalDeviceLimits limits; - VkPhysicalDeviceSparseProperties sparseProperties; -} VkPhysicalDeviceProperties; - -typedef struct VkQueueFamilyProperties { - VkQueueFlags queueFlags; - uint32_t queueCount; - uint32_t timestampValidBits; - VkExtent3D minImageTransferGranularity; -} VkQueueFamilyProperties; - -typedef struct VkMemoryType { - VkMemoryPropertyFlags propertyFlags; - uint32_t heapIndex; -} VkMemoryType; - -typedef struct VkMemoryHeap { - VkDeviceSize size; - VkMemoryHeapFlags flags; -} VkMemoryHeap; - -typedef struct VkPhysicalDeviceMemoryProperties { - uint32_t memoryTypeCount; - VkMemoryType memoryTypes[VK_MAX_MEMORY_TYPES]; - uint32_t memoryHeapCount; - VkMemoryHeap memoryHeaps[VK_MAX_MEMORY_HEAPS]; -} VkPhysicalDeviceMemoryProperties; - -typedef void (VKAPI_PTR *PFN_vkVoidFunction)(void); -typedef struct VkDeviceQueueCreateInfo { - VkStructureType sType; - const void* pNext; - VkDeviceQueueCreateFlags flags; - uint32_t queueFamilyIndex; - uint32_t queueCount; - const float* pQueuePriorities; -} VkDeviceQueueCreateInfo; - -typedef struct VkDeviceCreateInfo { - VkStructureType sType; - const void* pNext; - VkDeviceCreateFlags flags; - uint32_t queueCreateInfoCount; - const VkDeviceQueueCreateInfo* pQueueCreateInfos; - uint32_t enabledLayerCount; - const char* const* ppEnabledLayerNames; - uint32_t enabledExtensionCount; - const char* const* ppEnabledExtensionNames; - const VkPhysicalDeviceFeatures* pEnabledFeatures; -} VkDeviceCreateInfo; - -typedef struct VkExtensionProperties { - char extensionName[VK_MAX_EXTENSION_NAME_SIZE]; - uint32_t specVersion; -} VkExtensionProperties; - -typedef struct VkLayerProperties { - char layerName[VK_MAX_EXTENSION_NAME_SIZE]; - uint32_t specVersion; - uint32_t implementationVersion; - char description[VK_MAX_DESCRIPTION_SIZE]; -} VkLayerProperties; - -typedef struct VkSubmitInfo { - VkStructureType sType; - const void* pNext; - uint32_t waitSemaphoreCount; - const VkSemaphore* pWaitSemaphores; - const VkPipelineStageFlags* pWaitDstStageMask; - uint32_t commandBufferCount; - const VkCommandBuffer* pCommandBuffers; - uint32_t signalSemaphoreCount; - const VkSemaphore* pSignalSemaphores; -} VkSubmitInfo; - -typedef struct VkMemoryAllocateInfo { - VkStructureType sType; - const void* pNext; - VkDeviceSize allocationSize; - uint32_t memoryTypeIndex; -} VkMemoryAllocateInfo; - -typedef struct VkMappedMemoryRange { - VkStructureType sType; - const void* pNext; - VkDeviceMemory memory; - VkDeviceSize offset; - VkDeviceSize size; -} VkMappedMemoryRange; - -typedef struct VkMemoryRequirements { - VkDeviceSize size; - VkDeviceSize alignment; - uint32_t memoryTypeBits; -} VkMemoryRequirements; - -typedef struct VkSparseImageFormatProperties { - VkImageAspectFlags aspectMask; - VkExtent3D imageGranularity; - VkSparseImageFormatFlags flags; -} VkSparseImageFormatProperties; - -typedef struct VkSparseImageMemoryRequirements { - VkSparseImageFormatProperties formatProperties; - uint32_t imageMipTailFirstLod; - VkDeviceSize imageMipTailSize; - VkDeviceSize imageMipTailOffset; - VkDeviceSize imageMipTailStride; -} VkSparseImageMemoryRequirements; - -typedef struct VkSparseMemoryBind { - VkDeviceSize resourceOffset; - VkDeviceSize size; - VkDeviceMemory memory; - VkDeviceSize memoryOffset; - VkSparseMemoryBindFlags flags; -} VkSparseMemoryBind; - -typedef struct VkSparseBufferMemoryBindInfo { - VkBuffer buffer; - uint32_t bindCount; - const VkSparseMemoryBind* pBinds; -} VkSparseBufferMemoryBindInfo; - -typedef struct VkSparseImageOpaqueMemoryBindInfo { - VkImage image; - uint32_t bindCount; - const VkSparseMemoryBind* pBinds; -} VkSparseImageOpaqueMemoryBindInfo; - -typedef struct VkImageSubresource { - VkImageAspectFlags aspectMask; - uint32_t mipLevel; - uint32_t arrayLayer; -} VkImageSubresource; - -typedef struct VkOffset3D { - int32_t x; - int32_t y; - int32_t z; -} VkOffset3D; - -typedef struct VkSparseImageMemoryBind { - VkImageSubresource subresource; - VkOffset3D offset; - VkExtent3D extent; - VkDeviceMemory memory; - VkDeviceSize memoryOffset; - VkSparseMemoryBindFlags flags; -} VkSparseImageMemoryBind; - -typedef struct VkSparseImageMemoryBindInfo { - VkImage image; - uint32_t bindCount; - const VkSparseImageMemoryBind* pBinds; -} VkSparseImageMemoryBindInfo; - -typedef struct VkBindSparseInfo { - VkStructureType sType; - const void* pNext; - uint32_t waitSemaphoreCount; - const VkSemaphore* pWaitSemaphores; - uint32_t bufferBindCount; - const VkSparseBufferMemoryBindInfo* pBufferBinds; - uint32_t imageOpaqueBindCount; - const VkSparseImageOpaqueMemoryBindInfo* pImageOpaqueBinds; - uint32_t imageBindCount; - const VkSparseImageMemoryBindInfo* pImageBinds; - uint32_t signalSemaphoreCount; - const VkSemaphore* pSignalSemaphores; -} VkBindSparseInfo; - -typedef struct VkFenceCreateInfo { - VkStructureType sType; - const void* pNext; - VkFenceCreateFlags flags; -} VkFenceCreateInfo; - -typedef struct VkSemaphoreCreateInfo { - VkStructureType sType; - const void* pNext; - VkSemaphoreCreateFlags flags; -} VkSemaphoreCreateInfo; - -typedef struct VkEventCreateInfo { - VkStructureType sType; - const void* pNext; - VkEventCreateFlags flags; -} VkEventCreateInfo; - -typedef struct VkQueryPoolCreateInfo { - VkStructureType sType; - const void* pNext; - VkQueryPoolCreateFlags flags; - VkQueryType queryType; - uint32_t queryCount; - VkQueryPipelineStatisticFlags pipelineStatistics; -} VkQueryPoolCreateInfo; - -typedef struct VkBufferCreateInfo { - VkStructureType sType; - const void* pNext; - VkBufferCreateFlags flags; - VkDeviceSize size; - VkBufferUsageFlags usage; - VkSharingMode sharingMode; - uint32_t queueFamilyIndexCount; - const uint32_t* pQueueFamilyIndices; -} VkBufferCreateInfo; - -typedef struct VkBufferViewCreateInfo { - VkStructureType sType; - const void* pNext; - VkBufferViewCreateFlags flags; - VkBuffer buffer; - VkFormat format; - VkDeviceSize offset; - VkDeviceSize range; -} VkBufferViewCreateInfo; - -typedef struct VkImageCreateInfo { - VkStructureType sType; - const void* pNext; - VkImageCreateFlags flags; - VkImageType imageType; - VkFormat format; - VkExtent3D extent; - uint32_t mipLevels; - uint32_t arrayLayers; - VkSampleCountFlagBits samples; - VkImageTiling tiling; - VkImageUsageFlags usage; - VkSharingMode sharingMode; - uint32_t queueFamilyIndexCount; - const uint32_t* pQueueFamilyIndices; - VkImageLayout initialLayout; -} VkImageCreateInfo; - -typedef struct VkSubresourceLayout { - VkDeviceSize offset; - VkDeviceSize size; - VkDeviceSize rowPitch; - VkDeviceSize arrayPitch; - VkDeviceSize depthPitch; -} VkSubresourceLayout; - -typedef struct VkComponentMapping { - VkComponentSwizzle r; - VkComponentSwizzle g; - VkComponentSwizzle b; - VkComponentSwizzle a; -} VkComponentMapping; - -typedef struct VkImageSubresourceRange { - VkImageAspectFlags aspectMask; - uint32_t baseMipLevel; - uint32_t levelCount; - uint32_t baseArrayLayer; - uint32_t layerCount; -} VkImageSubresourceRange; - -typedef struct VkImageViewCreateInfo { - VkStructureType sType; - const void* pNext; - VkImageViewCreateFlags flags; - VkImage image; - VkImageViewType viewType; - VkFormat format; - VkComponentMapping components; - VkImageSubresourceRange subresourceRange; -} VkImageViewCreateInfo; - -typedef struct VkShaderModuleCreateInfo { - VkStructureType sType; - const void* pNext; - VkShaderModuleCreateFlags flags; - size_t codeSize; - const uint32_t* pCode; -} VkShaderModuleCreateInfo; - -typedef struct VkPipelineCacheCreateInfo { - VkStructureType sType; - const void* pNext; - VkPipelineCacheCreateFlags flags; - size_t initialDataSize; - const void* pInitialData; -} VkPipelineCacheCreateInfo; - -typedef struct VkSpecializationMapEntry { - uint32_t constantID; - uint32_t offset; - size_t size; -} VkSpecializationMapEntry; - -typedef struct VkSpecializationInfo { - uint32_t mapEntryCount; - const VkSpecializationMapEntry* pMapEntries; - size_t dataSize; - const void* pData; -} VkSpecializationInfo; - -typedef struct VkPipelineShaderStageCreateInfo { - VkStructureType sType; - const void* pNext; - VkPipelineShaderStageCreateFlags flags; - VkShaderStageFlagBits stage; - VkShaderModule module; - const char* pName; - const VkSpecializationInfo* pSpecializationInfo; -} VkPipelineShaderStageCreateInfo; - -typedef struct VkVertexInputBindingDescription { - uint32_t binding; - uint32_t stride; - VkVertexInputRate inputRate; -} VkVertexInputBindingDescription; - -typedef struct VkVertexInputAttributeDescription { - uint32_t location; - uint32_t binding; - VkFormat format; - uint32_t offset; -} VkVertexInputAttributeDescription; - -typedef struct VkPipelineVertexInputStateCreateInfo { - VkStructureType sType; - const void* pNext; - VkPipelineVertexInputStateCreateFlags flags; - uint32_t vertexBindingDescriptionCount; - const VkVertexInputBindingDescription* pVertexBindingDescriptions; - uint32_t vertexAttributeDescriptionCount; - const VkVertexInputAttributeDescription* pVertexAttributeDescriptions; -} VkPipelineVertexInputStateCreateInfo; - -typedef struct VkPipelineInputAssemblyStateCreateInfo { - VkStructureType sType; - const void* pNext; - VkPipelineInputAssemblyStateCreateFlags flags; - VkPrimitiveTopology topology; - VkBool32 primitiveRestartEnable; -} VkPipelineInputAssemblyStateCreateInfo; - -typedef struct VkPipelineTessellationStateCreateInfo { - VkStructureType sType; - const void* pNext; - VkPipelineTessellationStateCreateFlags flags; - uint32_t patchControlPoints; -} VkPipelineTessellationStateCreateInfo; - -typedef struct VkViewport { - float x; - float y; - float width; - float height; - float minDepth; - float maxDepth; -} VkViewport; - -typedef struct VkOffset2D { - int32_t x; - int32_t y; -} VkOffset2D; - -typedef struct VkExtent2D { - uint32_t width; - uint32_t height; -} VkExtent2D; - -typedef struct VkRect2D { - VkOffset2D offset; - VkExtent2D extent; -} VkRect2D; - -typedef struct VkPipelineViewportStateCreateInfo { - VkStructureType sType; - const void* pNext; - VkPipelineViewportStateCreateFlags flags; - uint32_t viewportCount; - const VkViewport* pViewports; - uint32_t scissorCount; - const VkRect2D* pScissors; -} VkPipelineViewportStateCreateInfo; - -typedef struct VkPipelineRasterizationStateCreateInfo { - VkStructureType sType; - const void* pNext; - VkPipelineRasterizationStateCreateFlags flags; - VkBool32 depthClampEnable; - VkBool32 rasterizerDiscardEnable; - VkPolygonMode polygonMode; - VkCullModeFlags cullMode; - VkFrontFace frontFace; - VkBool32 depthBiasEnable; - float depthBiasConstantFactor; - float depthBiasClamp; - float depthBiasSlopeFactor; - float lineWidth; -} VkPipelineRasterizationStateCreateInfo; - -typedef struct VkPipelineMultisampleStateCreateInfo { - VkStructureType sType; - const void* pNext; - VkPipelineMultisampleStateCreateFlags flags; - VkSampleCountFlagBits rasterizationSamples; - VkBool32 sampleShadingEnable; - float minSampleShading; - const VkSampleMask* pSampleMask; - VkBool32 alphaToCoverageEnable; - VkBool32 alphaToOneEnable; -} VkPipelineMultisampleStateCreateInfo; - -typedef struct VkStencilOpState { - VkStencilOp failOp; - VkStencilOp passOp; - VkStencilOp depthFailOp; - VkCompareOp compareOp; - uint32_t compareMask; - uint32_t writeMask; - uint32_t reference; -} VkStencilOpState; - -typedef struct VkPipelineDepthStencilStateCreateInfo { - VkStructureType sType; - const void* pNext; - VkPipelineDepthStencilStateCreateFlags flags; - VkBool32 depthTestEnable; - VkBool32 depthWriteEnable; - VkCompareOp depthCompareOp; - VkBool32 depthBoundsTestEnable; - VkBool32 stencilTestEnable; - VkStencilOpState front; - VkStencilOpState back; - float minDepthBounds; - float maxDepthBounds; -} VkPipelineDepthStencilStateCreateInfo; - -typedef struct VkPipelineColorBlendAttachmentState { - VkBool32 blendEnable; - VkBlendFactor srcColorBlendFactor; - VkBlendFactor dstColorBlendFactor; - VkBlendOp colorBlendOp; - VkBlendFactor srcAlphaBlendFactor; - VkBlendFactor dstAlphaBlendFactor; - VkBlendOp alphaBlendOp; - VkColorComponentFlags colorWriteMask; -} VkPipelineColorBlendAttachmentState; - -typedef struct VkPipelineColorBlendStateCreateInfo { - VkStructureType sType; - const void* pNext; - VkPipelineColorBlendStateCreateFlags flags; - VkBool32 logicOpEnable; - VkLogicOp logicOp; - uint32_t attachmentCount; - const VkPipelineColorBlendAttachmentState* pAttachments; - float blendConstants[4]; -} VkPipelineColorBlendStateCreateInfo; - -typedef struct VkPipelineDynamicStateCreateInfo { - VkStructureType sType; - const void* pNext; - VkPipelineDynamicStateCreateFlags flags; - uint32_t dynamicStateCount; - const VkDynamicState* pDynamicStates; -} VkPipelineDynamicStateCreateInfo; - -typedef struct VkGraphicsPipelineCreateInfo { - VkStructureType sType; - const void* pNext; - VkPipelineCreateFlags flags; - uint32_t stageCount; - const VkPipelineShaderStageCreateInfo* pStages; - const VkPipelineVertexInputStateCreateInfo* pVertexInputState; - const VkPipelineInputAssemblyStateCreateInfo* pInputAssemblyState; - const VkPipelineTessellationStateCreateInfo* pTessellationState; - const VkPipelineViewportStateCreateInfo* pViewportState; - const VkPipelineRasterizationStateCreateInfo* pRasterizationState; - const VkPipelineMultisampleStateCreateInfo* pMultisampleState; - const VkPipelineDepthStencilStateCreateInfo* pDepthStencilState; - const VkPipelineColorBlendStateCreateInfo* pColorBlendState; - const VkPipelineDynamicStateCreateInfo* pDynamicState; - VkPipelineLayout layout; - VkRenderPass renderPass; - uint32_t subpass; - VkPipeline basePipelineHandle; - int32_t basePipelineIndex; -} VkGraphicsPipelineCreateInfo; - -typedef struct VkComputePipelineCreateInfo { - VkStructureType sType; - const void* pNext; - VkPipelineCreateFlags flags; - VkPipelineShaderStageCreateInfo stage; - VkPipelineLayout layout; - VkPipeline basePipelineHandle; - int32_t basePipelineIndex; -} VkComputePipelineCreateInfo; - -typedef struct VkPushConstantRange { - VkShaderStageFlags stageFlags; - uint32_t offset; - uint32_t size; -} VkPushConstantRange; - -typedef struct VkPipelineLayoutCreateInfo { - VkStructureType sType; - const void* pNext; - VkPipelineLayoutCreateFlags flags; - uint32_t setLayoutCount; - const VkDescriptorSetLayout* pSetLayouts; - uint32_t pushConstantRangeCount; - const VkPushConstantRange* pPushConstantRanges; -} VkPipelineLayoutCreateInfo; - -typedef struct VkSamplerCreateInfo { - VkStructureType sType; - const void* pNext; - VkSamplerCreateFlags flags; - VkFilter magFilter; - VkFilter minFilter; - VkSamplerMipmapMode mipmapMode; - VkSamplerAddressMode addressModeU; - VkSamplerAddressMode addressModeV; - VkSamplerAddressMode addressModeW; - float mipLodBias; - VkBool32 anisotropyEnable; - float maxAnisotropy; - VkBool32 compareEnable; - VkCompareOp compareOp; - float minLod; - float maxLod; - VkBorderColor borderColor; - VkBool32 unnormalizedCoordinates; -} VkSamplerCreateInfo; - -typedef struct VkDescriptorSetLayoutBinding { - uint32_t binding; - VkDescriptorType descriptorType; - uint32_t descriptorCount; - VkShaderStageFlags stageFlags; - const VkSampler* pImmutableSamplers; -} VkDescriptorSetLayoutBinding; - -typedef struct VkDescriptorSetLayoutCreateInfo { - VkStructureType sType; - const void* pNext; - VkDescriptorSetLayoutCreateFlags flags; - uint32_t bindingCount; - const VkDescriptorSetLayoutBinding* pBindings; -} VkDescriptorSetLayoutCreateInfo; - -typedef struct VkDescriptorPoolSize { - VkDescriptorType type; - uint32_t descriptorCount; -} VkDescriptorPoolSize; - -typedef struct VkDescriptorPoolCreateInfo { - VkStructureType sType; - const void* pNext; - VkDescriptorPoolCreateFlags flags; - uint32_t maxSets; - uint32_t poolSizeCount; - const VkDescriptorPoolSize* pPoolSizes; -} VkDescriptorPoolCreateInfo; - -typedef struct VkDescriptorSetAllocateInfo { - VkStructureType sType; - const void* pNext; - VkDescriptorPool descriptorPool; - uint32_t descriptorSetCount; - const VkDescriptorSetLayout* pSetLayouts; -} VkDescriptorSetAllocateInfo; - -typedef struct VkDescriptorImageInfo { - VkSampler sampler; - VkImageView imageView; - VkImageLayout imageLayout; -} VkDescriptorImageInfo; - -typedef struct VkDescriptorBufferInfo { - VkBuffer buffer; - VkDeviceSize offset; - VkDeviceSize range; -} VkDescriptorBufferInfo; - -typedef struct VkWriteDescriptorSet { - VkStructureType sType; - const void* pNext; - VkDescriptorSet dstSet; - uint32_t dstBinding; - uint32_t dstArrayElement; - uint32_t descriptorCount; - VkDescriptorType descriptorType; - const VkDescriptorImageInfo* pImageInfo; - const VkDescriptorBufferInfo* pBufferInfo; - const VkBufferView* pTexelBufferView; -} VkWriteDescriptorSet; - -typedef struct VkCopyDescriptorSet { - VkStructureType sType; - const void* pNext; - VkDescriptorSet srcSet; - uint32_t srcBinding; - uint32_t srcArrayElement; - VkDescriptorSet dstSet; - uint32_t dstBinding; - uint32_t dstArrayElement; - uint32_t descriptorCount; -} VkCopyDescriptorSet; - -typedef struct VkFramebufferCreateInfo { - VkStructureType sType; - const void* pNext; - VkFramebufferCreateFlags flags; - VkRenderPass renderPass; - uint32_t attachmentCount; - const VkImageView* pAttachments; - uint32_t width; - uint32_t height; - uint32_t layers; -} VkFramebufferCreateInfo; - -typedef struct VkAttachmentDescription { - VkAttachmentDescriptionFlags flags; - VkFormat format; - VkSampleCountFlagBits samples; - VkAttachmentLoadOp loadOp; - VkAttachmentStoreOp storeOp; - VkAttachmentLoadOp stencilLoadOp; - VkAttachmentStoreOp stencilStoreOp; - VkImageLayout initialLayout; - VkImageLayout finalLayout; -} VkAttachmentDescription; - -typedef struct VkAttachmentReference { - uint32_t attachment; - VkImageLayout layout; -} VkAttachmentReference; - -typedef struct VkSubpassDescription { - VkSubpassDescriptionFlags flags; - VkPipelineBindPoint pipelineBindPoint; - uint32_t inputAttachmentCount; - const VkAttachmentReference* pInputAttachments; - uint32_t colorAttachmentCount; - const VkAttachmentReference* pColorAttachments; - const VkAttachmentReference* pResolveAttachments; - const VkAttachmentReference* pDepthStencilAttachment; - uint32_t preserveAttachmentCount; - const uint32_t* pPreserveAttachments; -} VkSubpassDescription; - -typedef struct VkSubpassDependency { - uint32_t srcSubpass; - uint32_t dstSubpass; - VkPipelineStageFlags srcStageMask; - VkPipelineStageFlags dstStageMask; - VkAccessFlags srcAccessMask; - VkAccessFlags dstAccessMask; - VkDependencyFlags dependencyFlags; -} VkSubpassDependency; - -typedef struct VkRenderPassCreateInfo { - VkStructureType sType; - const void* pNext; - VkRenderPassCreateFlags flags; - uint32_t attachmentCount; - const VkAttachmentDescription* pAttachments; - uint32_t subpassCount; - const VkSubpassDescription* pSubpasses; - uint32_t dependencyCount; - const VkSubpassDependency* pDependencies; -} VkRenderPassCreateInfo; - -typedef struct VkCommandPoolCreateInfo { - VkStructureType sType; - const void* pNext; - VkCommandPoolCreateFlags flags; - uint32_t queueFamilyIndex; -} VkCommandPoolCreateInfo; - -typedef struct VkCommandBufferAllocateInfo { - VkStructureType sType; - const void* pNext; - VkCommandPool commandPool; - VkCommandBufferLevel level; - uint32_t commandBufferCount; -} VkCommandBufferAllocateInfo; - -typedef struct VkCommandBufferInheritanceInfo { - VkStructureType sType; - const void* pNext; - VkRenderPass renderPass; - uint32_t subpass; - VkFramebuffer framebuffer; - VkBool32 occlusionQueryEnable; - VkQueryControlFlags queryFlags; - VkQueryPipelineStatisticFlags pipelineStatistics; -} VkCommandBufferInheritanceInfo; - -typedef struct VkCommandBufferBeginInfo { - VkStructureType sType; - const void* pNext; - VkCommandBufferUsageFlags flags; - const VkCommandBufferInheritanceInfo* pInheritanceInfo; -} VkCommandBufferBeginInfo; - -typedef struct VkBufferCopy { - VkDeviceSize srcOffset; - VkDeviceSize dstOffset; - VkDeviceSize size; -} VkBufferCopy; - -typedef struct VkImageSubresourceLayers { - VkImageAspectFlags aspectMask; - uint32_t mipLevel; - uint32_t baseArrayLayer; - uint32_t layerCount; -} VkImageSubresourceLayers; - -typedef struct VkImageCopy { - VkImageSubresourceLayers srcSubresource; - VkOffset3D srcOffset; - VkImageSubresourceLayers dstSubresource; - VkOffset3D dstOffset; - VkExtent3D extent; -} VkImageCopy; - -typedef struct VkImageBlit { - VkImageSubresourceLayers srcSubresource; - VkOffset3D srcOffsets[2]; - VkImageSubresourceLayers dstSubresource; - VkOffset3D dstOffsets[2]; -} VkImageBlit; - -typedef struct VkBufferImageCopy { - VkDeviceSize bufferOffset; - uint32_t bufferRowLength; - uint32_t bufferImageHeight; - VkImageSubresourceLayers imageSubresource; - VkOffset3D imageOffset; - VkExtent3D imageExtent; -} VkBufferImageCopy; - -typedef union VkClearColorValue { - float float32[4]; - int32_t int32[4]; - uint32_t uint32[4]; -} VkClearColorValue; - -typedef struct VkClearDepthStencilValue { - float depth; - uint32_t stencil; -} VkClearDepthStencilValue; - -typedef union VkClearValue { - VkClearColorValue color; - VkClearDepthStencilValue depthStencil; -} VkClearValue; - -typedef struct VkClearAttachment { - VkImageAspectFlags aspectMask; - uint32_t colorAttachment; - VkClearValue clearValue; -} VkClearAttachment; - -typedef struct VkClearRect { - VkRect2D rect; - uint32_t baseArrayLayer; - uint32_t layerCount; -} VkClearRect; - -typedef struct VkImageResolve { - VkImageSubresourceLayers srcSubresource; - VkOffset3D srcOffset; - VkImageSubresourceLayers dstSubresource; - VkOffset3D dstOffset; - VkExtent3D extent; -} VkImageResolve; - -typedef struct VkMemoryBarrier { - VkStructureType sType; - const void* pNext; - VkAccessFlags srcAccessMask; - VkAccessFlags dstAccessMask; -} VkMemoryBarrier; - -typedef struct VkBufferMemoryBarrier { - VkStructureType sType; - const void* pNext; - VkAccessFlags srcAccessMask; - VkAccessFlags dstAccessMask; - uint32_t srcQueueFamilyIndex; - uint32_t dstQueueFamilyIndex; - VkBuffer buffer; - VkDeviceSize offset; - VkDeviceSize size; -} VkBufferMemoryBarrier; - -typedef struct VkImageMemoryBarrier { - VkStructureType sType; - const void* pNext; - VkAccessFlags srcAccessMask; - VkAccessFlags dstAccessMask; - VkImageLayout oldLayout; - VkImageLayout newLayout; - uint32_t srcQueueFamilyIndex; - uint32_t dstQueueFamilyIndex; - VkImage image; - VkImageSubresourceRange subresourceRange; -} VkImageMemoryBarrier; - -typedef struct VkRenderPassBeginInfo { - VkStructureType sType; - const void* pNext; - VkRenderPass renderPass; - VkFramebuffer framebuffer; - VkRect2D renderArea; - uint32_t clearValueCount; - const VkClearValue* pClearValues; -} VkRenderPassBeginInfo; - -typedef struct VkDispatchIndirectCommand { - uint32_t x; - uint32_t y; - uint32_t z; -} VkDispatchIndirectCommand; - -typedef struct VkDrawIndexedIndirectCommand { - uint32_t indexCount; - uint32_t instanceCount; - uint32_t firstIndex; - int32_t vertexOffset; - uint32_t firstInstance; -} VkDrawIndexedIndirectCommand; - -typedef struct VkDrawIndirectCommand { - uint32_t vertexCount; - uint32_t instanceCount; - uint32_t firstVertex; - uint32_t firstInstance; -} VkDrawIndirectCommand; - - -typedef VkResult (VKAPI_PTR *PFN_vkCreateInstance)(const VkInstanceCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkInstance* pInstance); -typedef void (VKAPI_PTR *PFN_vkDestroyInstance)(VkInstance instance, const VkAllocationCallbacks* pAllocator); -typedef VkResult (VKAPI_PTR *PFN_vkEnumeratePhysicalDevices)(VkInstance instance, uint32_t* pPhysicalDeviceCount, VkPhysicalDevice* pPhysicalDevices); -typedef void (VKAPI_PTR *PFN_vkGetPhysicalDeviceFeatures)(VkPhysicalDevice physicalDevice, VkPhysicalDeviceFeatures* pFeatures); -typedef void (VKAPI_PTR *PFN_vkGetPhysicalDeviceFormatProperties)(VkPhysicalDevice physicalDevice, VkFormat format, VkFormatProperties* pFormatProperties); -typedef VkResult (VKAPI_PTR *PFN_vkGetPhysicalDeviceImageFormatProperties)(VkPhysicalDevice physicalDevice, VkFormat format, VkImageType type, VkImageTiling tiling, VkImageUsageFlags usage, VkImageCreateFlags flags, VkImageFormatProperties* pImageFormatProperties); -typedef void (VKAPI_PTR *PFN_vkGetPhysicalDeviceProperties)(VkPhysicalDevice physicalDevice, VkPhysicalDeviceProperties* pProperties); -typedef void (VKAPI_PTR *PFN_vkGetPhysicalDeviceQueueFamilyProperties)(VkPhysicalDevice physicalDevice, uint32_t* pQueueFamilyPropertyCount, VkQueueFamilyProperties* pQueueFamilyProperties); -typedef void (VKAPI_PTR *PFN_vkGetPhysicalDeviceMemoryProperties)(VkPhysicalDevice physicalDevice, VkPhysicalDeviceMemoryProperties* pMemoryProperties); -typedef PFN_vkVoidFunction (VKAPI_PTR *PFN_vkGetInstanceProcAddr)(VkInstance instance, const char* pName); -typedef PFN_vkVoidFunction (VKAPI_PTR *PFN_vkGetDeviceProcAddr)(VkDevice device, const char* pName); -typedef VkResult (VKAPI_PTR *PFN_vkCreateDevice)(VkPhysicalDevice physicalDevice, const VkDeviceCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkDevice* pDevice); -typedef void (VKAPI_PTR *PFN_vkDestroyDevice)(VkDevice device, const VkAllocationCallbacks* pAllocator); -typedef VkResult (VKAPI_PTR *PFN_vkEnumerateInstanceExtensionProperties)(const char* pLayerName, uint32_t* pPropertyCount, VkExtensionProperties* pProperties); -typedef VkResult (VKAPI_PTR *PFN_vkEnumerateDeviceExtensionProperties)(VkPhysicalDevice physicalDevice, const char* pLayerName, uint32_t* pPropertyCount, VkExtensionProperties* pProperties); -typedef VkResult (VKAPI_PTR *PFN_vkEnumerateInstanceLayerProperties)(uint32_t* pPropertyCount, VkLayerProperties* pProperties); -typedef VkResult (VKAPI_PTR *PFN_vkEnumerateDeviceLayerProperties)(VkPhysicalDevice physicalDevice, uint32_t* pPropertyCount, VkLayerProperties* pProperties); -typedef void (VKAPI_PTR *PFN_vkGetDeviceQueue)(VkDevice device, uint32_t queueFamilyIndex, uint32_t queueIndex, VkQueue* pQueue); -typedef VkResult (VKAPI_PTR *PFN_vkQueueSubmit)(VkQueue queue, uint32_t submitCount, const VkSubmitInfo* pSubmits, VkFence fence); -typedef VkResult (VKAPI_PTR *PFN_vkQueueWaitIdle)(VkQueue queue); -typedef VkResult (VKAPI_PTR *PFN_vkDeviceWaitIdle)(VkDevice device); -typedef VkResult (VKAPI_PTR *PFN_vkAllocateMemory)(VkDevice device, const VkMemoryAllocateInfo* pAllocateInfo, const VkAllocationCallbacks* pAllocator, VkDeviceMemory* pMemory); -typedef void (VKAPI_PTR *PFN_vkFreeMemory)(VkDevice device, VkDeviceMemory memory, const VkAllocationCallbacks* pAllocator); -typedef VkResult (VKAPI_PTR *PFN_vkMapMemory)(VkDevice device, VkDeviceMemory memory, VkDeviceSize offset, VkDeviceSize size, VkMemoryMapFlags flags, void** ppData); -typedef void (VKAPI_PTR *PFN_vkUnmapMemory)(VkDevice device, VkDeviceMemory memory); -typedef VkResult (VKAPI_PTR *PFN_vkFlushMappedMemoryRanges)(VkDevice device, uint32_t memoryRangeCount, const VkMappedMemoryRange* pMemoryRanges); -typedef VkResult (VKAPI_PTR *PFN_vkInvalidateMappedMemoryRanges)(VkDevice device, uint32_t memoryRangeCount, const VkMappedMemoryRange* pMemoryRanges); -typedef void (VKAPI_PTR *PFN_vkGetDeviceMemoryCommitment)(VkDevice device, VkDeviceMemory memory, VkDeviceSize* pCommittedMemoryInBytes); -typedef VkResult (VKAPI_PTR *PFN_vkBindBufferMemory)(VkDevice device, VkBuffer buffer, VkDeviceMemory memory, VkDeviceSize memoryOffset); -typedef VkResult (VKAPI_PTR *PFN_vkBindImageMemory)(VkDevice device, VkImage image, VkDeviceMemory memory, VkDeviceSize memoryOffset); -typedef void (VKAPI_PTR *PFN_vkGetBufferMemoryRequirements)(VkDevice device, VkBuffer buffer, VkMemoryRequirements* pMemoryRequirements); -typedef void (VKAPI_PTR *PFN_vkGetImageMemoryRequirements)(VkDevice device, VkImage image, VkMemoryRequirements* pMemoryRequirements); -typedef void (VKAPI_PTR *PFN_vkGetImageSparseMemoryRequirements)(VkDevice device, VkImage image, uint32_t* pSparseMemoryRequirementCount, VkSparseImageMemoryRequirements* pSparseMemoryRequirements); -typedef void (VKAPI_PTR *PFN_vkGetPhysicalDeviceSparseImageFormatProperties)(VkPhysicalDevice physicalDevice, VkFormat format, VkImageType type, VkSampleCountFlagBits samples, VkImageUsageFlags usage, VkImageTiling tiling, uint32_t* pPropertyCount, VkSparseImageFormatProperties* pProperties); -typedef VkResult (VKAPI_PTR *PFN_vkQueueBindSparse)(VkQueue queue, uint32_t bindInfoCount, const VkBindSparseInfo* pBindInfo, VkFence fence); -typedef VkResult (VKAPI_PTR *PFN_vkCreateFence)(VkDevice device, const VkFenceCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkFence* pFence); -typedef void (VKAPI_PTR *PFN_vkDestroyFence)(VkDevice device, VkFence fence, const VkAllocationCallbacks* pAllocator); -typedef VkResult (VKAPI_PTR *PFN_vkResetFences)(VkDevice device, uint32_t fenceCount, const VkFence* pFences); -typedef VkResult (VKAPI_PTR *PFN_vkGetFenceStatus)(VkDevice device, VkFence fence); -typedef VkResult (VKAPI_PTR *PFN_vkWaitForFences)(VkDevice device, uint32_t fenceCount, const VkFence* pFences, VkBool32 waitAll, uint64_t timeout); -typedef VkResult (VKAPI_PTR *PFN_vkCreateSemaphore)(VkDevice device, const VkSemaphoreCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkSemaphore* pSemaphore); -typedef void (VKAPI_PTR *PFN_vkDestroySemaphore)(VkDevice device, VkSemaphore semaphore, const VkAllocationCallbacks* pAllocator); -typedef VkResult (VKAPI_PTR *PFN_vkCreateEvent)(VkDevice device, const VkEventCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkEvent* pEvent); -typedef void (VKAPI_PTR *PFN_vkDestroyEvent)(VkDevice device, VkEvent event, const VkAllocationCallbacks* pAllocator); -typedef VkResult (VKAPI_PTR *PFN_vkGetEventStatus)(VkDevice device, VkEvent event); -typedef VkResult (VKAPI_PTR *PFN_vkSetEvent)(VkDevice device, VkEvent event); -typedef VkResult (VKAPI_PTR *PFN_vkResetEvent)(VkDevice device, VkEvent event); -typedef VkResult (VKAPI_PTR *PFN_vkCreateQueryPool)(VkDevice device, const VkQueryPoolCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkQueryPool* pQueryPool); -typedef void (VKAPI_PTR *PFN_vkDestroyQueryPool)(VkDevice device, VkQueryPool queryPool, const VkAllocationCallbacks* pAllocator); -typedef VkResult (VKAPI_PTR *PFN_vkGetQueryPoolResults)(VkDevice device, VkQueryPool queryPool, uint32_t firstQuery, uint32_t queryCount, size_t dataSize, void* pData, VkDeviceSize stride, VkQueryResultFlags flags); -typedef VkResult (VKAPI_PTR *PFN_vkCreateBuffer)(VkDevice device, const VkBufferCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkBuffer* pBuffer); -typedef void (VKAPI_PTR *PFN_vkDestroyBuffer)(VkDevice device, VkBuffer buffer, const VkAllocationCallbacks* pAllocator); -typedef VkResult (VKAPI_PTR *PFN_vkCreateBufferView)(VkDevice device, const VkBufferViewCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkBufferView* pView); -typedef void (VKAPI_PTR *PFN_vkDestroyBufferView)(VkDevice device, VkBufferView bufferView, const VkAllocationCallbacks* pAllocator); -typedef VkResult (VKAPI_PTR *PFN_vkCreateImage)(VkDevice device, const VkImageCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkImage* pImage); -typedef void (VKAPI_PTR *PFN_vkDestroyImage)(VkDevice device, VkImage image, const VkAllocationCallbacks* pAllocator); -typedef void (VKAPI_PTR *PFN_vkGetImageSubresourceLayout)(VkDevice device, VkImage image, const VkImageSubresource* pSubresource, VkSubresourceLayout* pLayout); -typedef VkResult (VKAPI_PTR *PFN_vkCreateImageView)(VkDevice device, const VkImageViewCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkImageView* pView); -typedef void (VKAPI_PTR *PFN_vkDestroyImageView)(VkDevice device, VkImageView imageView, const VkAllocationCallbacks* pAllocator); -typedef VkResult (VKAPI_PTR *PFN_vkCreateShaderModule)(VkDevice device, const VkShaderModuleCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkShaderModule* pShaderModule); -typedef void (VKAPI_PTR *PFN_vkDestroyShaderModule)(VkDevice device, VkShaderModule shaderModule, const VkAllocationCallbacks* pAllocator); -typedef VkResult (VKAPI_PTR *PFN_vkCreatePipelineCache)(VkDevice device, const VkPipelineCacheCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkPipelineCache* pPipelineCache); -typedef void (VKAPI_PTR *PFN_vkDestroyPipelineCache)(VkDevice device, VkPipelineCache pipelineCache, const VkAllocationCallbacks* pAllocator); -typedef VkResult (VKAPI_PTR *PFN_vkGetPipelineCacheData)(VkDevice device, VkPipelineCache pipelineCache, size_t* pDataSize, void* pData); -typedef VkResult (VKAPI_PTR *PFN_vkMergePipelineCaches)(VkDevice device, VkPipelineCache dstCache, uint32_t srcCacheCount, const VkPipelineCache* pSrcCaches); -typedef VkResult (VKAPI_PTR *PFN_vkCreateGraphicsPipelines)(VkDevice device, VkPipelineCache pipelineCache, uint32_t createInfoCount, const VkGraphicsPipelineCreateInfo* pCreateInfos, const VkAllocationCallbacks* pAllocator, VkPipeline* pPipelines); -typedef VkResult (VKAPI_PTR *PFN_vkCreateComputePipelines)(VkDevice device, VkPipelineCache pipelineCache, uint32_t createInfoCount, const VkComputePipelineCreateInfo* pCreateInfos, const VkAllocationCallbacks* pAllocator, VkPipeline* pPipelines); -typedef void (VKAPI_PTR *PFN_vkDestroyPipeline)(VkDevice device, VkPipeline pipeline, const VkAllocationCallbacks* pAllocator); -typedef VkResult (VKAPI_PTR *PFN_vkCreatePipelineLayout)(VkDevice device, const VkPipelineLayoutCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkPipelineLayout* pPipelineLayout); -typedef void (VKAPI_PTR *PFN_vkDestroyPipelineLayout)(VkDevice device, VkPipelineLayout pipelineLayout, const VkAllocationCallbacks* pAllocator); -typedef VkResult (VKAPI_PTR *PFN_vkCreateSampler)(VkDevice device, const VkSamplerCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkSampler* pSampler); -typedef void (VKAPI_PTR *PFN_vkDestroySampler)(VkDevice device, VkSampler sampler, const VkAllocationCallbacks* pAllocator); -typedef VkResult (VKAPI_PTR *PFN_vkCreateDescriptorSetLayout)(VkDevice device, const VkDescriptorSetLayoutCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkDescriptorSetLayout* pSetLayout); -typedef void (VKAPI_PTR *PFN_vkDestroyDescriptorSetLayout)(VkDevice device, VkDescriptorSetLayout descriptorSetLayout, const VkAllocationCallbacks* pAllocator); -typedef VkResult (VKAPI_PTR *PFN_vkCreateDescriptorPool)(VkDevice device, const VkDescriptorPoolCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkDescriptorPool* pDescriptorPool); -typedef void (VKAPI_PTR *PFN_vkDestroyDescriptorPool)(VkDevice device, VkDescriptorPool descriptorPool, const VkAllocationCallbacks* pAllocator); -typedef VkResult (VKAPI_PTR *PFN_vkResetDescriptorPool)(VkDevice device, VkDescriptorPool descriptorPool, VkDescriptorPoolResetFlags flags); -typedef VkResult (VKAPI_PTR *PFN_vkAllocateDescriptorSets)(VkDevice device, const VkDescriptorSetAllocateInfo* pAllocateInfo, VkDescriptorSet* pDescriptorSets); -typedef VkResult (VKAPI_PTR *PFN_vkFreeDescriptorSets)(VkDevice device, VkDescriptorPool descriptorPool, uint32_t descriptorSetCount, const VkDescriptorSet* pDescriptorSets); -typedef void (VKAPI_PTR *PFN_vkUpdateDescriptorSets)(VkDevice device, uint32_t descriptorWriteCount, const VkWriteDescriptorSet* pDescriptorWrites, uint32_t descriptorCopyCount, const VkCopyDescriptorSet* pDescriptorCopies); -typedef VkResult (VKAPI_PTR *PFN_vkCreateFramebuffer)(VkDevice device, const VkFramebufferCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkFramebuffer* pFramebuffer); -typedef void (VKAPI_PTR *PFN_vkDestroyFramebuffer)(VkDevice device, VkFramebuffer framebuffer, const VkAllocationCallbacks* pAllocator); -typedef VkResult (VKAPI_PTR *PFN_vkCreateRenderPass)(VkDevice device, const VkRenderPassCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkRenderPass* pRenderPass); -typedef void (VKAPI_PTR *PFN_vkDestroyRenderPass)(VkDevice device, VkRenderPass renderPass, const VkAllocationCallbacks* pAllocator); -typedef void (VKAPI_PTR *PFN_vkGetRenderAreaGranularity)(VkDevice device, VkRenderPass renderPass, VkExtent2D* pGranularity); -typedef VkResult (VKAPI_PTR *PFN_vkCreateCommandPool)(VkDevice device, const VkCommandPoolCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkCommandPool* pCommandPool); -typedef void (VKAPI_PTR *PFN_vkDestroyCommandPool)(VkDevice device, VkCommandPool commandPool, const VkAllocationCallbacks* pAllocator); -typedef VkResult (VKAPI_PTR *PFN_vkResetCommandPool)(VkDevice device, VkCommandPool commandPool, VkCommandPoolResetFlags flags); -typedef VkResult (VKAPI_PTR *PFN_vkAllocateCommandBuffers)(VkDevice device, const VkCommandBufferAllocateInfo* pAllocateInfo, VkCommandBuffer* pCommandBuffers); -typedef void (VKAPI_PTR *PFN_vkFreeCommandBuffers)(VkDevice device, VkCommandPool commandPool, uint32_t commandBufferCount, const VkCommandBuffer* pCommandBuffers); -typedef VkResult (VKAPI_PTR *PFN_vkBeginCommandBuffer)(VkCommandBuffer commandBuffer, const VkCommandBufferBeginInfo* pBeginInfo); -typedef VkResult (VKAPI_PTR *PFN_vkEndCommandBuffer)(VkCommandBuffer commandBuffer); -typedef VkResult (VKAPI_PTR *PFN_vkResetCommandBuffer)(VkCommandBuffer commandBuffer, VkCommandBufferResetFlags flags); -typedef void (VKAPI_PTR *PFN_vkCmdBindPipeline)(VkCommandBuffer commandBuffer, VkPipelineBindPoint pipelineBindPoint, VkPipeline pipeline); -typedef void (VKAPI_PTR *PFN_vkCmdSetViewport)(VkCommandBuffer commandBuffer, uint32_t firstViewport, uint32_t viewportCount, const VkViewport* pViewports); -typedef void (VKAPI_PTR *PFN_vkCmdSetScissor)(VkCommandBuffer commandBuffer, uint32_t firstScissor, uint32_t scissorCount, const VkRect2D* pScissors); -typedef void (VKAPI_PTR *PFN_vkCmdSetLineWidth)(VkCommandBuffer commandBuffer, float lineWidth); -typedef void (VKAPI_PTR *PFN_vkCmdSetDepthBias)(VkCommandBuffer commandBuffer, float depthBiasConstantFactor, float depthBiasClamp, float depthBiasSlopeFactor); -typedef void (VKAPI_PTR *PFN_vkCmdSetBlendConstants)(VkCommandBuffer commandBuffer, const float blendConstants[4]); -typedef void (VKAPI_PTR *PFN_vkCmdSetDepthBounds)(VkCommandBuffer commandBuffer, float minDepthBounds, float maxDepthBounds); -typedef void (VKAPI_PTR *PFN_vkCmdSetStencilCompareMask)(VkCommandBuffer commandBuffer, VkStencilFaceFlags faceMask, uint32_t compareMask); -typedef void (VKAPI_PTR *PFN_vkCmdSetStencilWriteMask)(VkCommandBuffer commandBuffer, VkStencilFaceFlags faceMask, uint32_t writeMask); -typedef void (VKAPI_PTR *PFN_vkCmdSetStencilReference)(VkCommandBuffer commandBuffer, VkStencilFaceFlags faceMask, uint32_t reference); -typedef void (VKAPI_PTR *PFN_vkCmdBindDescriptorSets)(VkCommandBuffer commandBuffer, VkPipelineBindPoint pipelineBindPoint, VkPipelineLayout layout, uint32_t firstSet, uint32_t descriptorSetCount, const VkDescriptorSet* pDescriptorSets, uint32_t dynamicOffsetCount, const uint32_t* pDynamicOffsets); -typedef void (VKAPI_PTR *PFN_vkCmdBindIndexBuffer)(VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset, VkIndexType indexType); -typedef void (VKAPI_PTR *PFN_vkCmdBindVertexBuffers)(VkCommandBuffer commandBuffer, uint32_t firstBinding, uint32_t bindingCount, const VkBuffer* pBuffers, const VkDeviceSize* pOffsets); -typedef void (VKAPI_PTR *PFN_vkCmdDraw)(VkCommandBuffer commandBuffer, uint32_t vertexCount, uint32_t instanceCount, uint32_t firstVertex, uint32_t firstInstance); -typedef void (VKAPI_PTR *PFN_vkCmdDrawIndexed)(VkCommandBuffer commandBuffer, uint32_t indexCount, uint32_t instanceCount, uint32_t firstIndex, int32_t vertexOffset, uint32_t firstInstance); -typedef void (VKAPI_PTR *PFN_vkCmdDrawIndirect)(VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset, uint32_t drawCount, uint32_t stride); -typedef void (VKAPI_PTR *PFN_vkCmdDrawIndexedIndirect)(VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset, uint32_t drawCount, uint32_t stride); -typedef void (VKAPI_PTR *PFN_vkCmdDispatch)(VkCommandBuffer commandBuffer, uint32_t groupCountX, uint32_t groupCountY, uint32_t groupCountZ); -typedef void (VKAPI_PTR *PFN_vkCmdDispatchIndirect)(VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset); -typedef void (VKAPI_PTR *PFN_vkCmdCopyBuffer)(VkCommandBuffer commandBuffer, VkBuffer srcBuffer, VkBuffer dstBuffer, uint32_t regionCount, const VkBufferCopy* pRegions); -typedef void (VKAPI_PTR *PFN_vkCmdCopyImage)(VkCommandBuffer commandBuffer, VkImage srcImage, VkImageLayout srcImageLayout, VkImage dstImage, VkImageLayout dstImageLayout, uint32_t regionCount, const VkImageCopy* pRegions); -typedef void (VKAPI_PTR *PFN_vkCmdBlitImage)(VkCommandBuffer commandBuffer, VkImage srcImage, VkImageLayout srcImageLayout, VkImage dstImage, VkImageLayout dstImageLayout, uint32_t regionCount, const VkImageBlit* pRegions, VkFilter filter); -typedef void (VKAPI_PTR *PFN_vkCmdCopyBufferToImage)(VkCommandBuffer commandBuffer, VkBuffer srcBuffer, VkImage dstImage, VkImageLayout dstImageLayout, uint32_t regionCount, const VkBufferImageCopy* pRegions); -typedef void (VKAPI_PTR *PFN_vkCmdCopyImageToBuffer)(VkCommandBuffer commandBuffer, VkImage srcImage, VkImageLayout srcImageLayout, VkBuffer dstBuffer, uint32_t regionCount, const VkBufferImageCopy* pRegions); -typedef void (VKAPI_PTR *PFN_vkCmdUpdateBuffer)(VkCommandBuffer commandBuffer, VkBuffer dstBuffer, VkDeviceSize dstOffset, VkDeviceSize dataSize, const void* pData); -typedef void (VKAPI_PTR *PFN_vkCmdFillBuffer)(VkCommandBuffer commandBuffer, VkBuffer dstBuffer, VkDeviceSize dstOffset, VkDeviceSize size, uint32_t data); -typedef void (VKAPI_PTR *PFN_vkCmdClearColorImage)(VkCommandBuffer commandBuffer, VkImage image, VkImageLayout imageLayout, const VkClearColorValue* pColor, uint32_t rangeCount, const VkImageSubresourceRange* pRanges); -typedef void (VKAPI_PTR *PFN_vkCmdClearDepthStencilImage)(VkCommandBuffer commandBuffer, VkImage image, VkImageLayout imageLayout, const VkClearDepthStencilValue* pDepthStencil, uint32_t rangeCount, const VkImageSubresourceRange* pRanges); -typedef void (VKAPI_PTR *PFN_vkCmdClearAttachments)(VkCommandBuffer commandBuffer, uint32_t attachmentCount, const VkClearAttachment* pAttachments, uint32_t rectCount, const VkClearRect* pRects); -typedef void (VKAPI_PTR *PFN_vkCmdResolveImage)(VkCommandBuffer commandBuffer, VkImage srcImage, VkImageLayout srcImageLayout, VkImage dstImage, VkImageLayout dstImageLayout, uint32_t regionCount, const VkImageResolve* pRegions); -typedef void (VKAPI_PTR *PFN_vkCmdSetEvent)(VkCommandBuffer commandBuffer, VkEvent event, VkPipelineStageFlags stageMask); -typedef void (VKAPI_PTR *PFN_vkCmdResetEvent)(VkCommandBuffer commandBuffer, VkEvent event, VkPipelineStageFlags stageMask); -typedef void (VKAPI_PTR *PFN_vkCmdWaitEvents)(VkCommandBuffer commandBuffer, uint32_t eventCount, const VkEvent* pEvents, VkPipelineStageFlags srcStageMask, VkPipelineStageFlags dstStageMask, uint32_t memoryBarrierCount, const VkMemoryBarrier* pMemoryBarriers, uint32_t bufferMemoryBarrierCount, const VkBufferMemoryBarrier* pBufferMemoryBarriers, uint32_t imageMemoryBarrierCount, const VkImageMemoryBarrier* pImageMemoryBarriers); -typedef void (VKAPI_PTR *PFN_vkCmdPipelineBarrier)(VkCommandBuffer commandBuffer, VkPipelineStageFlags srcStageMask, VkPipelineStageFlags dstStageMask, VkDependencyFlags dependencyFlags, uint32_t memoryBarrierCount, const VkMemoryBarrier* pMemoryBarriers, uint32_t bufferMemoryBarrierCount, const VkBufferMemoryBarrier* pBufferMemoryBarriers, uint32_t imageMemoryBarrierCount, const VkImageMemoryBarrier* pImageMemoryBarriers); -typedef void (VKAPI_PTR *PFN_vkCmdBeginQuery)(VkCommandBuffer commandBuffer, VkQueryPool queryPool, uint32_t query, VkQueryControlFlags flags); -typedef void (VKAPI_PTR *PFN_vkCmdEndQuery)(VkCommandBuffer commandBuffer, VkQueryPool queryPool, uint32_t query); -typedef void (VKAPI_PTR *PFN_vkCmdResetQueryPool)(VkCommandBuffer commandBuffer, VkQueryPool queryPool, uint32_t firstQuery, uint32_t queryCount); -typedef void (VKAPI_PTR *PFN_vkCmdWriteTimestamp)(VkCommandBuffer commandBuffer, VkPipelineStageFlagBits pipelineStage, VkQueryPool queryPool, uint32_t query); -typedef void (VKAPI_PTR *PFN_vkCmdCopyQueryPoolResults)(VkCommandBuffer commandBuffer, VkQueryPool queryPool, uint32_t firstQuery, uint32_t queryCount, VkBuffer dstBuffer, VkDeviceSize dstOffset, VkDeviceSize stride, VkQueryResultFlags flags); -typedef void (VKAPI_PTR *PFN_vkCmdPushConstants)(VkCommandBuffer commandBuffer, VkPipelineLayout layout, VkShaderStageFlags stageFlags, uint32_t offset, uint32_t size, const void* pValues); -typedef void (VKAPI_PTR *PFN_vkCmdBeginRenderPass)(VkCommandBuffer commandBuffer, const VkRenderPassBeginInfo* pRenderPassBegin, VkSubpassContents contents); -typedef void (VKAPI_PTR *PFN_vkCmdNextSubpass)(VkCommandBuffer commandBuffer, VkSubpassContents contents); -typedef void (VKAPI_PTR *PFN_vkCmdEndRenderPass)(VkCommandBuffer commandBuffer); -typedef void (VKAPI_PTR *PFN_vkCmdExecuteCommands)(VkCommandBuffer commandBuffer, uint32_t commandBufferCount, const VkCommandBuffer* pCommandBuffers); - -#ifndef VK_NO_PROTOTYPES -VKAPI_ATTR VkResult VKAPI_CALL vkCreateInstance( - const VkInstanceCreateInfo* pCreateInfo, - const VkAllocationCallbacks* pAllocator, - VkInstance* pInstance); - -VKAPI_ATTR void VKAPI_CALL vkDestroyInstance( - VkInstance instance, - const VkAllocationCallbacks* pAllocator); - -VKAPI_ATTR VkResult VKAPI_CALL vkEnumeratePhysicalDevices( - VkInstance instance, - uint32_t* pPhysicalDeviceCount, - VkPhysicalDevice* pPhysicalDevices); - -VKAPI_ATTR void VKAPI_CALL vkGetPhysicalDeviceFeatures( - VkPhysicalDevice physicalDevice, - VkPhysicalDeviceFeatures* pFeatures); - -VKAPI_ATTR void VKAPI_CALL vkGetPhysicalDeviceFormatProperties( - VkPhysicalDevice physicalDevice, - VkFormat format, - VkFormatProperties* pFormatProperties); - -VKAPI_ATTR VkResult VKAPI_CALL vkGetPhysicalDeviceImageFormatProperties( - VkPhysicalDevice physicalDevice, - VkFormat format, - VkImageType type, - VkImageTiling tiling, - VkImageUsageFlags usage, - VkImageCreateFlags flags, - VkImageFormatProperties* pImageFormatProperties); - -VKAPI_ATTR void VKAPI_CALL vkGetPhysicalDeviceProperties( - VkPhysicalDevice physicalDevice, - VkPhysicalDeviceProperties* pProperties); - -VKAPI_ATTR void VKAPI_CALL vkGetPhysicalDeviceQueueFamilyProperties( - VkPhysicalDevice physicalDevice, - uint32_t* pQueueFamilyPropertyCount, - VkQueueFamilyProperties* pQueueFamilyProperties); - -VKAPI_ATTR void VKAPI_CALL vkGetPhysicalDeviceMemoryProperties( - VkPhysicalDevice physicalDevice, - VkPhysicalDeviceMemoryProperties* pMemoryProperties); - -VKAPI_ATTR PFN_vkVoidFunction VKAPI_CALL vkGetInstanceProcAddr( - VkInstance instance, - const char* pName); - -VKAPI_ATTR PFN_vkVoidFunction VKAPI_CALL vkGetDeviceProcAddr( - VkDevice device, - const char* pName); - -VKAPI_ATTR VkResult VKAPI_CALL vkCreateDevice( - VkPhysicalDevice physicalDevice, - const VkDeviceCreateInfo* pCreateInfo, - const VkAllocationCallbacks* pAllocator, - VkDevice* pDevice); - -VKAPI_ATTR void VKAPI_CALL vkDestroyDevice( - VkDevice device, - const VkAllocationCallbacks* pAllocator); - -VKAPI_ATTR VkResult VKAPI_CALL vkEnumerateInstanceExtensionProperties( - const char* pLayerName, - uint32_t* pPropertyCount, - VkExtensionProperties* pProperties); - -VKAPI_ATTR VkResult VKAPI_CALL vkEnumerateDeviceExtensionProperties( - VkPhysicalDevice physicalDevice, - const char* pLayerName, - uint32_t* pPropertyCount, - VkExtensionProperties* pProperties); - -VKAPI_ATTR VkResult VKAPI_CALL vkEnumerateInstanceLayerProperties( - uint32_t* pPropertyCount, - VkLayerProperties* pProperties); - -VKAPI_ATTR VkResult VKAPI_CALL vkEnumerateDeviceLayerProperties( - VkPhysicalDevice physicalDevice, - uint32_t* pPropertyCount, - VkLayerProperties* pProperties); - -VKAPI_ATTR void VKAPI_CALL vkGetDeviceQueue( - VkDevice device, - uint32_t queueFamilyIndex, - uint32_t queueIndex, - VkQueue* pQueue); - -VKAPI_ATTR VkResult VKAPI_CALL vkQueueSubmit( - VkQueue queue, - uint32_t submitCount, - const VkSubmitInfo* pSubmits, - VkFence fence); - -VKAPI_ATTR VkResult VKAPI_CALL vkQueueWaitIdle( - VkQueue queue); - -VKAPI_ATTR VkResult VKAPI_CALL vkDeviceWaitIdle( - VkDevice device); - -VKAPI_ATTR VkResult VKAPI_CALL vkAllocateMemory( - VkDevice device, - const VkMemoryAllocateInfo* pAllocateInfo, - const VkAllocationCallbacks* pAllocator, - VkDeviceMemory* pMemory); - -VKAPI_ATTR void VKAPI_CALL vkFreeMemory( - VkDevice device, - VkDeviceMemory memory, - const VkAllocationCallbacks* pAllocator); - -VKAPI_ATTR VkResult VKAPI_CALL vkMapMemory( - VkDevice device, - VkDeviceMemory memory, - VkDeviceSize offset, - VkDeviceSize size, - VkMemoryMapFlags flags, - void** ppData); - -VKAPI_ATTR void VKAPI_CALL vkUnmapMemory( - VkDevice device, - VkDeviceMemory memory); - -VKAPI_ATTR VkResult VKAPI_CALL vkFlushMappedMemoryRanges( - VkDevice device, - uint32_t memoryRangeCount, - const VkMappedMemoryRange* pMemoryRanges); - -VKAPI_ATTR VkResult VKAPI_CALL vkInvalidateMappedMemoryRanges( - VkDevice device, - uint32_t memoryRangeCount, - const VkMappedMemoryRange* pMemoryRanges); - -VKAPI_ATTR void VKAPI_CALL vkGetDeviceMemoryCommitment( - VkDevice device, - VkDeviceMemory memory, - VkDeviceSize* pCommittedMemoryInBytes); - -VKAPI_ATTR VkResult VKAPI_CALL vkBindBufferMemory( - VkDevice device, - VkBuffer buffer, - VkDeviceMemory memory, - VkDeviceSize memoryOffset); - -VKAPI_ATTR VkResult VKAPI_CALL vkBindImageMemory( - VkDevice device, - VkImage image, - VkDeviceMemory memory, - VkDeviceSize memoryOffset); - -VKAPI_ATTR void VKAPI_CALL vkGetBufferMemoryRequirements( - VkDevice device, - VkBuffer buffer, - VkMemoryRequirements* pMemoryRequirements); - -VKAPI_ATTR void VKAPI_CALL vkGetImageMemoryRequirements( - VkDevice device, - VkImage image, - VkMemoryRequirements* pMemoryRequirements); - -VKAPI_ATTR void VKAPI_CALL vkGetImageSparseMemoryRequirements( - VkDevice device, - VkImage image, - uint32_t* pSparseMemoryRequirementCount, - VkSparseImageMemoryRequirements* pSparseMemoryRequirements); - -VKAPI_ATTR void VKAPI_CALL vkGetPhysicalDeviceSparseImageFormatProperties( - VkPhysicalDevice physicalDevice, - VkFormat format, - VkImageType type, - VkSampleCountFlagBits samples, - VkImageUsageFlags usage, - VkImageTiling tiling, - uint32_t* pPropertyCount, - VkSparseImageFormatProperties* pProperties); - -VKAPI_ATTR VkResult VKAPI_CALL vkQueueBindSparse( - VkQueue queue, - uint32_t bindInfoCount, - const VkBindSparseInfo* pBindInfo, - VkFence fence); - -VKAPI_ATTR VkResult VKAPI_CALL vkCreateFence( - VkDevice device, - const VkFenceCreateInfo* pCreateInfo, - const VkAllocationCallbacks* pAllocator, - VkFence* pFence); - -VKAPI_ATTR void VKAPI_CALL vkDestroyFence( - VkDevice device, - VkFence fence, - const VkAllocationCallbacks* pAllocator); - -VKAPI_ATTR VkResult VKAPI_CALL vkResetFences( - VkDevice device, - uint32_t fenceCount, - const VkFence* pFences); - -VKAPI_ATTR VkResult VKAPI_CALL vkGetFenceStatus( - VkDevice device, - VkFence fence); - -VKAPI_ATTR VkResult VKAPI_CALL vkWaitForFences( - VkDevice device, - uint32_t fenceCount, - const VkFence* pFences, - VkBool32 waitAll, - uint64_t timeout); - -VKAPI_ATTR VkResult VKAPI_CALL vkCreateSemaphore( - VkDevice device, - const VkSemaphoreCreateInfo* pCreateInfo, - const VkAllocationCallbacks* pAllocator, - VkSemaphore* pSemaphore); - -VKAPI_ATTR void VKAPI_CALL vkDestroySemaphore( - VkDevice device, - VkSemaphore semaphore, - const VkAllocationCallbacks* pAllocator); - -VKAPI_ATTR VkResult VKAPI_CALL vkCreateEvent( - VkDevice device, - const VkEventCreateInfo* pCreateInfo, - const VkAllocationCallbacks* pAllocator, - VkEvent* pEvent); - -VKAPI_ATTR void VKAPI_CALL vkDestroyEvent( - VkDevice device, - VkEvent event, - const VkAllocationCallbacks* pAllocator); - -VKAPI_ATTR VkResult VKAPI_CALL vkGetEventStatus( - VkDevice device, - VkEvent event); - -VKAPI_ATTR VkResult VKAPI_CALL vkSetEvent( - VkDevice device, - VkEvent event); - -VKAPI_ATTR VkResult VKAPI_CALL vkResetEvent( - VkDevice device, - VkEvent event); - -VKAPI_ATTR VkResult VKAPI_CALL vkCreateQueryPool( - VkDevice device, - const VkQueryPoolCreateInfo* pCreateInfo, - const VkAllocationCallbacks* pAllocator, - VkQueryPool* pQueryPool); - -VKAPI_ATTR void VKAPI_CALL vkDestroyQueryPool( - VkDevice device, - VkQueryPool queryPool, - const VkAllocationCallbacks* pAllocator); - -VKAPI_ATTR VkResult VKAPI_CALL vkGetQueryPoolResults( - VkDevice device, - VkQueryPool queryPool, - uint32_t firstQuery, - uint32_t queryCount, - size_t dataSize, - void* pData, - VkDeviceSize stride, - VkQueryResultFlags flags); - -VKAPI_ATTR VkResult VKAPI_CALL vkCreateBuffer( - VkDevice device, - const VkBufferCreateInfo* pCreateInfo, - const VkAllocationCallbacks* pAllocator, - VkBuffer* pBuffer); - -VKAPI_ATTR void VKAPI_CALL vkDestroyBuffer( - VkDevice device, - VkBuffer buffer, - const VkAllocationCallbacks* pAllocator); - -VKAPI_ATTR VkResult VKAPI_CALL vkCreateBufferView( - VkDevice device, - const VkBufferViewCreateInfo* pCreateInfo, - const VkAllocationCallbacks* pAllocator, - VkBufferView* pView); - -VKAPI_ATTR void VKAPI_CALL vkDestroyBufferView( - VkDevice device, - VkBufferView bufferView, - const VkAllocationCallbacks* pAllocator); - -VKAPI_ATTR VkResult VKAPI_CALL vkCreateImage( - VkDevice device, - const VkImageCreateInfo* pCreateInfo, - const VkAllocationCallbacks* pAllocator, - VkImage* pImage); - -VKAPI_ATTR void VKAPI_CALL vkDestroyImage( - VkDevice device, - VkImage image, - const VkAllocationCallbacks* pAllocator); - -VKAPI_ATTR void VKAPI_CALL vkGetImageSubresourceLayout( - VkDevice device, - VkImage image, - const VkImageSubresource* pSubresource, - VkSubresourceLayout* pLayout); - -VKAPI_ATTR VkResult VKAPI_CALL vkCreateImageView( - VkDevice device, - const VkImageViewCreateInfo* pCreateInfo, - const VkAllocationCallbacks* pAllocator, - VkImageView* pView); - -VKAPI_ATTR void VKAPI_CALL vkDestroyImageView( - VkDevice device, - VkImageView imageView, - const VkAllocationCallbacks* pAllocator); - -VKAPI_ATTR VkResult VKAPI_CALL vkCreateShaderModule( - VkDevice device, - const VkShaderModuleCreateInfo* pCreateInfo, - const VkAllocationCallbacks* pAllocator, - VkShaderModule* pShaderModule); - -VKAPI_ATTR void VKAPI_CALL vkDestroyShaderModule( - VkDevice device, - VkShaderModule shaderModule, - const VkAllocationCallbacks* pAllocator); - -VKAPI_ATTR VkResult VKAPI_CALL vkCreatePipelineCache( - VkDevice device, - const VkPipelineCacheCreateInfo* pCreateInfo, - const VkAllocationCallbacks* pAllocator, - VkPipelineCache* pPipelineCache); - -VKAPI_ATTR void VKAPI_CALL vkDestroyPipelineCache( - VkDevice device, - VkPipelineCache pipelineCache, - const VkAllocationCallbacks* pAllocator); - -VKAPI_ATTR VkResult VKAPI_CALL vkGetPipelineCacheData( - VkDevice device, - VkPipelineCache pipelineCache, - size_t* pDataSize, - void* pData); - -VKAPI_ATTR VkResult VKAPI_CALL vkMergePipelineCaches( - VkDevice device, - VkPipelineCache dstCache, - uint32_t srcCacheCount, - const VkPipelineCache* pSrcCaches); - -VKAPI_ATTR VkResult VKAPI_CALL vkCreateGraphicsPipelines( - VkDevice device, - VkPipelineCache pipelineCache, - uint32_t createInfoCount, - const VkGraphicsPipelineCreateInfo* pCreateInfos, - const VkAllocationCallbacks* pAllocator, - VkPipeline* pPipelines); - -VKAPI_ATTR VkResult VKAPI_CALL vkCreateComputePipelines( - VkDevice device, - VkPipelineCache pipelineCache, - uint32_t createInfoCount, - const VkComputePipelineCreateInfo* pCreateInfos, - const VkAllocationCallbacks* pAllocator, - VkPipeline* pPipelines); - -VKAPI_ATTR void VKAPI_CALL vkDestroyPipeline( - VkDevice device, - VkPipeline pipeline, - const VkAllocationCallbacks* pAllocator); - -VKAPI_ATTR VkResult VKAPI_CALL vkCreatePipelineLayout( - VkDevice device, - const VkPipelineLayoutCreateInfo* pCreateInfo, - const VkAllocationCallbacks* pAllocator, - VkPipelineLayout* pPipelineLayout); - -VKAPI_ATTR void VKAPI_CALL vkDestroyPipelineLayout( - VkDevice device, - VkPipelineLayout pipelineLayout, - const VkAllocationCallbacks* pAllocator); - -VKAPI_ATTR VkResult VKAPI_CALL vkCreateSampler( - VkDevice device, - const VkSamplerCreateInfo* pCreateInfo, - const VkAllocationCallbacks* pAllocator, - VkSampler* pSampler); - -VKAPI_ATTR void VKAPI_CALL vkDestroySampler( - VkDevice device, - VkSampler sampler, - const VkAllocationCallbacks* pAllocator); - -VKAPI_ATTR VkResult VKAPI_CALL vkCreateDescriptorSetLayout( - VkDevice device, - const VkDescriptorSetLayoutCreateInfo* pCreateInfo, - const VkAllocationCallbacks* pAllocator, - VkDescriptorSetLayout* pSetLayout); - -VKAPI_ATTR void VKAPI_CALL vkDestroyDescriptorSetLayout( - VkDevice device, - VkDescriptorSetLayout descriptorSetLayout, - const VkAllocationCallbacks* pAllocator); - -VKAPI_ATTR VkResult VKAPI_CALL vkCreateDescriptorPool( - VkDevice device, - const VkDescriptorPoolCreateInfo* pCreateInfo, - const VkAllocationCallbacks* pAllocator, - VkDescriptorPool* pDescriptorPool); - -VKAPI_ATTR void VKAPI_CALL vkDestroyDescriptorPool( - VkDevice device, - VkDescriptorPool descriptorPool, - const VkAllocationCallbacks* pAllocator); - -VKAPI_ATTR VkResult VKAPI_CALL vkResetDescriptorPool( - VkDevice device, - VkDescriptorPool descriptorPool, - VkDescriptorPoolResetFlags flags); - -VKAPI_ATTR VkResult VKAPI_CALL vkAllocateDescriptorSets( - VkDevice device, - const VkDescriptorSetAllocateInfo* pAllocateInfo, - VkDescriptorSet* pDescriptorSets); - -VKAPI_ATTR VkResult VKAPI_CALL vkFreeDescriptorSets( - VkDevice device, - VkDescriptorPool descriptorPool, - uint32_t descriptorSetCount, - const VkDescriptorSet* pDescriptorSets); - -VKAPI_ATTR void VKAPI_CALL vkUpdateDescriptorSets( - VkDevice device, - uint32_t descriptorWriteCount, - const VkWriteDescriptorSet* pDescriptorWrites, - uint32_t descriptorCopyCount, - const VkCopyDescriptorSet* pDescriptorCopies); - -VKAPI_ATTR VkResult VKAPI_CALL vkCreateFramebuffer( - VkDevice device, - const VkFramebufferCreateInfo* pCreateInfo, - const VkAllocationCallbacks* pAllocator, - VkFramebuffer* pFramebuffer); - -VKAPI_ATTR void VKAPI_CALL vkDestroyFramebuffer( - VkDevice device, - VkFramebuffer framebuffer, - const VkAllocationCallbacks* pAllocator); - -VKAPI_ATTR VkResult VKAPI_CALL vkCreateRenderPass( - VkDevice device, - const VkRenderPassCreateInfo* pCreateInfo, - const VkAllocationCallbacks* pAllocator, - VkRenderPass* pRenderPass); - -VKAPI_ATTR void VKAPI_CALL vkDestroyRenderPass( - VkDevice device, - VkRenderPass renderPass, - const VkAllocationCallbacks* pAllocator); - -VKAPI_ATTR void VKAPI_CALL vkGetRenderAreaGranularity( - VkDevice device, - VkRenderPass renderPass, - VkExtent2D* pGranularity); - -VKAPI_ATTR VkResult VKAPI_CALL vkCreateCommandPool( - VkDevice device, - const VkCommandPoolCreateInfo* pCreateInfo, - const VkAllocationCallbacks* pAllocator, - VkCommandPool* pCommandPool); - -VKAPI_ATTR void VKAPI_CALL vkDestroyCommandPool( - VkDevice device, - VkCommandPool commandPool, - const VkAllocationCallbacks* pAllocator); - -VKAPI_ATTR VkResult VKAPI_CALL vkResetCommandPool( - VkDevice device, - VkCommandPool commandPool, - VkCommandPoolResetFlags flags); - -VKAPI_ATTR VkResult VKAPI_CALL vkAllocateCommandBuffers( - VkDevice device, - const VkCommandBufferAllocateInfo* pAllocateInfo, - VkCommandBuffer* pCommandBuffers); - -VKAPI_ATTR void VKAPI_CALL vkFreeCommandBuffers( - VkDevice device, - VkCommandPool commandPool, - uint32_t commandBufferCount, - const VkCommandBuffer* pCommandBuffers); - -VKAPI_ATTR VkResult VKAPI_CALL vkBeginCommandBuffer( - VkCommandBuffer commandBuffer, - const VkCommandBufferBeginInfo* pBeginInfo); - -VKAPI_ATTR VkResult VKAPI_CALL vkEndCommandBuffer( - VkCommandBuffer commandBuffer); - -VKAPI_ATTR VkResult VKAPI_CALL vkResetCommandBuffer( - VkCommandBuffer commandBuffer, - VkCommandBufferResetFlags flags); - -VKAPI_ATTR void VKAPI_CALL vkCmdBindPipeline( - VkCommandBuffer commandBuffer, - VkPipelineBindPoint pipelineBindPoint, - VkPipeline pipeline); - -VKAPI_ATTR void VKAPI_CALL vkCmdSetViewport( - VkCommandBuffer commandBuffer, - uint32_t firstViewport, - uint32_t viewportCount, - const VkViewport* pViewports); - -VKAPI_ATTR void VKAPI_CALL vkCmdSetScissor( - VkCommandBuffer commandBuffer, - uint32_t firstScissor, - uint32_t scissorCount, - const VkRect2D* pScissors); - -VKAPI_ATTR void VKAPI_CALL vkCmdSetLineWidth( - VkCommandBuffer commandBuffer, - float lineWidth); - -VKAPI_ATTR void VKAPI_CALL vkCmdSetDepthBias( - VkCommandBuffer commandBuffer, - float depthBiasConstantFactor, - float depthBiasClamp, - float depthBiasSlopeFactor); - -VKAPI_ATTR void VKAPI_CALL vkCmdSetBlendConstants( - VkCommandBuffer commandBuffer, - const float blendConstants[4]); - -VKAPI_ATTR void VKAPI_CALL vkCmdSetDepthBounds( - VkCommandBuffer commandBuffer, - float minDepthBounds, - float maxDepthBounds); - -VKAPI_ATTR void VKAPI_CALL vkCmdSetStencilCompareMask( - VkCommandBuffer commandBuffer, - VkStencilFaceFlags faceMask, - uint32_t compareMask); - -VKAPI_ATTR void VKAPI_CALL vkCmdSetStencilWriteMask( - VkCommandBuffer commandBuffer, - VkStencilFaceFlags faceMask, - uint32_t writeMask); - -VKAPI_ATTR void VKAPI_CALL vkCmdSetStencilReference( - VkCommandBuffer commandBuffer, - VkStencilFaceFlags faceMask, - uint32_t reference); - -VKAPI_ATTR void VKAPI_CALL vkCmdBindDescriptorSets( - VkCommandBuffer commandBuffer, - VkPipelineBindPoint pipelineBindPoint, - VkPipelineLayout layout, - uint32_t firstSet, - uint32_t descriptorSetCount, - const VkDescriptorSet* pDescriptorSets, - uint32_t dynamicOffsetCount, - const uint32_t* pDynamicOffsets); - -VKAPI_ATTR void VKAPI_CALL vkCmdBindIndexBuffer( - VkCommandBuffer commandBuffer, - VkBuffer buffer, - VkDeviceSize offset, - VkIndexType indexType); - -VKAPI_ATTR void VKAPI_CALL vkCmdBindVertexBuffers( - VkCommandBuffer commandBuffer, - uint32_t firstBinding, - uint32_t bindingCount, - const VkBuffer* pBuffers, - const VkDeviceSize* pOffsets); - -VKAPI_ATTR void VKAPI_CALL vkCmdDraw( - VkCommandBuffer commandBuffer, - uint32_t vertexCount, - uint32_t instanceCount, - uint32_t firstVertex, - uint32_t firstInstance); - -VKAPI_ATTR void VKAPI_CALL vkCmdDrawIndexed( - VkCommandBuffer commandBuffer, - uint32_t indexCount, - uint32_t instanceCount, - uint32_t firstIndex, - int32_t vertexOffset, - uint32_t firstInstance); - -VKAPI_ATTR void VKAPI_CALL vkCmdDrawIndirect( - VkCommandBuffer commandBuffer, - VkBuffer buffer, - VkDeviceSize offset, - uint32_t drawCount, - uint32_t stride); - -VKAPI_ATTR void VKAPI_CALL vkCmdDrawIndexedIndirect( - VkCommandBuffer commandBuffer, - VkBuffer buffer, - VkDeviceSize offset, - uint32_t drawCount, - uint32_t stride); - -VKAPI_ATTR void VKAPI_CALL vkCmdDispatch( - VkCommandBuffer commandBuffer, - uint32_t groupCountX, - uint32_t groupCountY, - uint32_t groupCountZ); - -VKAPI_ATTR void VKAPI_CALL vkCmdDispatchIndirect( - VkCommandBuffer commandBuffer, - VkBuffer buffer, - VkDeviceSize offset); - -VKAPI_ATTR void VKAPI_CALL vkCmdCopyBuffer( - VkCommandBuffer commandBuffer, - VkBuffer srcBuffer, - VkBuffer dstBuffer, - uint32_t regionCount, - const VkBufferCopy* pRegions); - -VKAPI_ATTR void VKAPI_CALL vkCmdCopyImage( - VkCommandBuffer commandBuffer, - VkImage srcImage, - VkImageLayout srcImageLayout, - VkImage dstImage, - VkImageLayout dstImageLayout, - uint32_t regionCount, - const VkImageCopy* pRegions); - -VKAPI_ATTR void VKAPI_CALL vkCmdBlitImage( - VkCommandBuffer commandBuffer, - VkImage srcImage, - VkImageLayout srcImageLayout, - VkImage dstImage, - VkImageLayout dstImageLayout, - uint32_t regionCount, - const VkImageBlit* pRegions, - VkFilter filter); - -VKAPI_ATTR void VKAPI_CALL vkCmdCopyBufferToImage( - VkCommandBuffer commandBuffer, - VkBuffer srcBuffer, - VkImage dstImage, - VkImageLayout dstImageLayout, - uint32_t regionCount, - const VkBufferImageCopy* pRegions); - -VKAPI_ATTR void VKAPI_CALL vkCmdCopyImageToBuffer( - VkCommandBuffer commandBuffer, - VkImage srcImage, - VkImageLayout srcImageLayout, - VkBuffer dstBuffer, - uint32_t regionCount, - const VkBufferImageCopy* pRegions); - -VKAPI_ATTR void VKAPI_CALL vkCmdUpdateBuffer( - VkCommandBuffer commandBuffer, - VkBuffer dstBuffer, - VkDeviceSize dstOffset, - VkDeviceSize dataSize, - const void* pData); - -VKAPI_ATTR void VKAPI_CALL vkCmdFillBuffer( - VkCommandBuffer commandBuffer, - VkBuffer dstBuffer, - VkDeviceSize dstOffset, - VkDeviceSize size, - uint32_t data); - -VKAPI_ATTR void VKAPI_CALL vkCmdClearColorImage( - VkCommandBuffer commandBuffer, - VkImage image, - VkImageLayout imageLayout, - const VkClearColorValue* pColor, - uint32_t rangeCount, - const VkImageSubresourceRange* pRanges); - -VKAPI_ATTR void VKAPI_CALL vkCmdClearDepthStencilImage( - VkCommandBuffer commandBuffer, - VkImage image, - VkImageLayout imageLayout, - const VkClearDepthStencilValue* pDepthStencil, - uint32_t rangeCount, - const VkImageSubresourceRange* pRanges); - -VKAPI_ATTR void VKAPI_CALL vkCmdClearAttachments( - VkCommandBuffer commandBuffer, - uint32_t attachmentCount, - const VkClearAttachment* pAttachments, - uint32_t rectCount, - const VkClearRect* pRects); - -VKAPI_ATTR void VKAPI_CALL vkCmdResolveImage( - VkCommandBuffer commandBuffer, - VkImage srcImage, - VkImageLayout srcImageLayout, - VkImage dstImage, - VkImageLayout dstImageLayout, - uint32_t regionCount, - const VkImageResolve* pRegions); - -VKAPI_ATTR void VKAPI_CALL vkCmdSetEvent( - VkCommandBuffer commandBuffer, - VkEvent event, - VkPipelineStageFlags stageMask); - -VKAPI_ATTR void VKAPI_CALL vkCmdResetEvent( - VkCommandBuffer commandBuffer, - VkEvent event, - VkPipelineStageFlags stageMask); - -VKAPI_ATTR void VKAPI_CALL vkCmdWaitEvents( - VkCommandBuffer commandBuffer, - uint32_t eventCount, - const VkEvent* pEvents, - VkPipelineStageFlags srcStageMask, - VkPipelineStageFlags dstStageMask, - uint32_t memoryBarrierCount, - const VkMemoryBarrier* pMemoryBarriers, - uint32_t bufferMemoryBarrierCount, - const VkBufferMemoryBarrier* pBufferMemoryBarriers, - uint32_t imageMemoryBarrierCount, - const VkImageMemoryBarrier* pImageMemoryBarriers); - -VKAPI_ATTR void VKAPI_CALL vkCmdPipelineBarrier( - VkCommandBuffer commandBuffer, - VkPipelineStageFlags srcStageMask, - VkPipelineStageFlags dstStageMask, - VkDependencyFlags dependencyFlags, - uint32_t memoryBarrierCount, - const VkMemoryBarrier* pMemoryBarriers, - uint32_t bufferMemoryBarrierCount, - const VkBufferMemoryBarrier* pBufferMemoryBarriers, - uint32_t imageMemoryBarrierCount, - const VkImageMemoryBarrier* pImageMemoryBarriers); - -VKAPI_ATTR void VKAPI_CALL vkCmdBeginQuery( - VkCommandBuffer commandBuffer, - VkQueryPool queryPool, - uint32_t query, - VkQueryControlFlags flags); - -VKAPI_ATTR void VKAPI_CALL vkCmdEndQuery( - VkCommandBuffer commandBuffer, - VkQueryPool queryPool, - uint32_t query); - -VKAPI_ATTR void VKAPI_CALL vkCmdResetQueryPool( - VkCommandBuffer commandBuffer, - VkQueryPool queryPool, - uint32_t firstQuery, - uint32_t queryCount); - -VKAPI_ATTR void VKAPI_CALL vkCmdWriteTimestamp( - VkCommandBuffer commandBuffer, - VkPipelineStageFlagBits pipelineStage, - VkQueryPool queryPool, - uint32_t query); - -VKAPI_ATTR void VKAPI_CALL vkCmdCopyQueryPoolResults( - VkCommandBuffer commandBuffer, - VkQueryPool queryPool, - uint32_t firstQuery, - uint32_t queryCount, - VkBuffer dstBuffer, - VkDeviceSize dstOffset, - VkDeviceSize stride, - VkQueryResultFlags flags); - -VKAPI_ATTR void VKAPI_CALL vkCmdPushConstants( - VkCommandBuffer commandBuffer, - VkPipelineLayout layout, - VkShaderStageFlags stageFlags, - uint32_t offset, - uint32_t size, - const void* pValues); - -VKAPI_ATTR void VKAPI_CALL vkCmdBeginRenderPass( - VkCommandBuffer commandBuffer, - const VkRenderPassBeginInfo* pRenderPassBegin, - VkSubpassContents contents); - -VKAPI_ATTR void VKAPI_CALL vkCmdNextSubpass( - VkCommandBuffer commandBuffer, - VkSubpassContents contents); - -VKAPI_ATTR void VKAPI_CALL vkCmdEndRenderPass( - VkCommandBuffer commandBuffer); - -VKAPI_ATTR void VKAPI_CALL vkCmdExecuteCommands( - VkCommandBuffer commandBuffer, - uint32_t commandBufferCount, - const VkCommandBuffer* pCommandBuffers); -#endif - -#define VK_KHR_surface 1 -VK_DEFINE_NON_DISPATCHABLE_HANDLE(VkSurfaceKHR) - -#define VK_KHR_SURFACE_SPEC_VERSION 25 -#define VK_KHR_SURFACE_EXTENSION_NAME "VK_KHR_surface" -#define VK_COLORSPACE_SRGB_NONLINEAR_KHR VK_COLOR_SPACE_SRGB_NONLINEAR_KHR - - -typedef enum VkColorSpaceKHR { - VK_COLOR_SPACE_SRGB_NONLINEAR_KHR = 0, - VK_COLOR_SPACE_DISPLAY_P3_NONLINEAR_EXT = 1000104001, - VK_COLOR_SPACE_EXTENDED_SRGB_LINEAR_EXT = 1000104002, - VK_COLOR_SPACE_DCI_P3_LINEAR_EXT = 1000104003, - VK_COLOR_SPACE_DCI_P3_NONLINEAR_EXT = 1000104004, - VK_COLOR_SPACE_BT709_LINEAR_EXT = 1000104005, - VK_COLOR_SPACE_BT709_NONLINEAR_EXT = 1000104006, - VK_COLOR_SPACE_BT2020_LINEAR_EXT = 1000104007, - VK_COLOR_SPACE_HDR10_ST2084_EXT = 1000104008, - VK_COLOR_SPACE_DOLBYVISION_EXT = 1000104009, - VK_COLOR_SPACE_HDR10_HLG_EXT = 1000104010, - VK_COLOR_SPACE_ADOBERGB_LINEAR_EXT = 1000104011, - VK_COLOR_SPACE_ADOBERGB_NONLINEAR_EXT = 1000104012, - VK_COLOR_SPACE_PASS_THROUGH_EXT = 1000104013, - VK_COLOR_SPACE_EXTENDED_SRGB_NONLINEAR_EXT = 1000104014, - VK_COLOR_SPACE_BEGIN_RANGE_KHR = VK_COLOR_SPACE_SRGB_NONLINEAR_KHR, - VK_COLOR_SPACE_END_RANGE_KHR = VK_COLOR_SPACE_SRGB_NONLINEAR_KHR, - VK_COLOR_SPACE_RANGE_SIZE_KHR = (VK_COLOR_SPACE_SRGB_NONLINEAR_KHR - VK_COLOR_SPACE_SRGB_NONLINEAR_KHR + 1), - VK_COLOR_SPACE_MAX_ENUM_KHR = 0x7FFFFFFF -} VkColorSpaceKHR; - -typedef enum VkPresentModeKHR { - VK_PRESENT_MODE_IMMEDIATE_KHR = 0, - VK_PRESENT_MODE_MAILBOX_KHR = 1, - VK_PRESENT_MODE_FIFO_KHR = 2, - VK_PRESENT_MODE_FIFO_RELAXED_KHR = 3, - VK_PRESENT_MODE_SHARED_DEMAND_REFRESH_KHR = 1000111000, - VK_PRESENT_MODE_SHARED_CONTINUOUS_REFRESH_KHR = 1000111001, - VK_PRESENT_MODE_BEGIN_RANGE_KHR = VK_PRESENT_MODE_IMMEDIATE_KHR, - VK_PRESENT_MODE_END_RANGE_KHR = VK_PRESENT_MODE_FIFO_RELAXED_KHR, - VK_PRESENT_MODE_RANGE_SIZE_KHR = (VK_PRESENT_MODE_FIFO_RELAXED_KHR - VK_PRESENT_MODE_IMMEDIATE_KHR + 1), - VK_PRESENT_MODE_MAX_ENUM_KHR = 0x7FFFFFFF -} VkPresentModeKHR; - - -typedef enum VkSurfaceTransformFlagBitsKHR { - VK_SURFACE_TRANSFORM_IDENTITY_BIT_KHR = 0x00000001, - VK_SURFACE_TRANSFORM_ROTATE_90_BIT_KHR = 0x00000002, - VK_SURFACE_TRANSFORM_ROTATE_180_BIT_KHR = 0x00000004, - VK_SURFACE_TRANSFORM_ROTATE_270_BIT_KHR = 0x00000008, - VK_SURFACE_TRANSFORM_HORIZONTAL_MIRROR_BIT_KHR = 0x00000010, - VK_SURFACE_TRANSFORM_HORIZONTAL_MIRROR_ROTATE_90_BIT_KHR = 0x00000020, - VK_SURFACE_TRANSFORM_HORIZONTAL_MIRROR_ROTATE_180_BIT_KHR = 0x00000040, - VK_SURFACE_TRANSFORM_HORIZONTAL_MIRROR_ROTATE_270_BIT_KHR = 0x00000080, - VK_SURFACE_TRANSFORM_INHERIT_BIT_KHR = 0x00000100, - VK_SURFACE_TRANSFORM_FLAG_BITS_MAX_ENUM_KHR = 0x7FFFFFFF -} VkSurfaceTransformFlagBitsKHR; -typedef VkFlags VkSurfaceTransformFlagsKHR; - -typedef enum VkCompositeAlphaFlagBitsKHR { - VK_COMPOSITE_ALPHA_OPAQUE_BIT_KHR = 0x00000001, - VK_COMPOSITE_ALPHA_PRE_MULTIPLIED_BIT_KHR = 0x00000002, - VK_COMPOSITE_ALPHA_POST_MULTIPLIED_BIT_KHR = 0x00000004, - VK_COMPOSITE_ALPHA_INHERIT_BIT_KHR = 0x00000008, - VK_COMPOSITE_ALPHA_FLAG_BITS_MAX_ENUM_KHR = 0x7FFFFFFF -} VkCompositeAlphaFlagBitsKHR; -typedef VkFlags VkCompositeAlphaFlagsKHR; - -typedef struct VkSurfaceCapabilitiesKHR { - uint32_t minImageCount; - uint32_t maxImageCount; - VkExtent2D currentExtent; - VkExtent2D minImageExtent; - VkExtent2D maxImageExtent; - uint32_t maxImageArrayLayers; - VkSurfaceTransformFlagsKHR supportedTransforms; - VkSurfaceTransformFlagBitsKHR currentTransform; - VkCompositeAlphaFlagsKHR supportedCompositeAlpha; - VkImageUsageFlags supportedUsageFlags; -} VkSurfaceCapabilitiesKHR; - -typedef struct VkSurfaceFormatKHR { - VkFormat format; - VkColorSpaceKHR colorSpace; -} VkSurfaceFormatKHR; - - -typedef void (VKAPI_PTR *PFN_vkDestroySurfaceKHR)(VkInstance instance, VkSurfaceKHR surface, const VkAllocationCallbacks* pAllocator); -typedef VkResult (VKAPI_PTR *PFN_vkGetPhysicalDeviceSurfaceSupportKHR)(VkPhysicalDevice physicalDevice, uint32_t queueFamilyIndex, VkSurfaceKHR surface, VkBool32* pSupported); -typedef VkResult (VKAPI_PTR *PFN_vkGetPhysicalDeviceSurfaceCapabilitiesKHR)(VkPhysicalDevice physicalDevice, VkSurfaceKHR surface, VkSurfaceCapabilitiesKHR* pSurfaceCapabilities); -typedef VkResult (VKAPI_PTR *PFN_vkGetPhysicalDeviceSurfaceFormatsKHR)(VkPhysicalDevice physicalDevice, VkSurfaceKHR surface, uint32_t* pSurfaceFormatCount, VkSurfaceFormatKHR* pSurfaceFormats); -typedef VkResult (VKAPI_PTR *PFN_vkGetPhysicalDeviceSurfacePresentModesKHR)(VkPhysicalDevice physicalDevice, VkSurfaceKHR surface, uint32_t* pPresentModeCount, VkPresentModeKHR* pPresentModes); - -#ifndef VK_NO_PROTOTYPES -VKAPI_ATTR void VKAPI_CALL vkDestroySurfaceKHR( - VkInstance instance, - VkSurfaceKHR surface, - const VkAllocationCallbacks* pAllocator); - -VKAPI_ATTR VkResult VKAPI_CALL vkGetPhysicalDeviceSurfaceSupportKHR( - VkPhysicalDevice physicalDevice, - uint32_t queueFamilyIndex, - VkSurfaceKHR surface, - VkBool32* pSupported); - -VKAPI_ATTR VkResult VKAPI_CALL vkGetPhysicalDeviceSurfaceCapabilitiesKHR( - VkPhysicalDevice physicalDevice, - VkSurfaceKHR surface, - VkSurfaceCapabilitiesKHR* pSurfaceCapabilities); - -VKAPI_ATTR VkResult VKAPI_CALL vkGetPhysicalDeviceSurfaceFormatsKHR( - VkPhysicalDevice physicalDevice, - VkSurfaceKHR surface, - uint32_t* pSurfaceFormatCount, - VkSurfaceFormatKHR* pSurfaceFormats); - -VKAPI_ATTR VkResult VKAPI_CALL vkGetPhysicalDeviceSurfacePresentModesKHR( - VkPhysicalDevice physicalDevice, - VkSurfaceKHR surface, - uint32_t* pPresentModeCount, - VkPresentModeKHR* pPresentModes); -#endif - -#define VK_KHR_swapchain 1 -VK_DEFINE_NON_DISPATCHABLE_HANDLE(VkSwapchainKHR) - -#define VK_KHR_SWAPCHAIN_SPEC_VERSION 68 -#define VK_KHR_SWAPCHAIN_EXTENSION_NAME "VK_KHR_swapchain" - - -typedef enum VkSwapchainCreateFlagBitsKHR { - VK_SWAPCHAIN_CREATE_BIND_SFR_BIT_KHX = 0x00000001, - VK_SWAPCHAIN_CREATE_FLAG_BITS_MAX_ENUM_KHR = 0x7FFFFFFF -} VkSwapchainCreateFlagBitsKHR; -typedef VkFlags VkSwapchainCreateFlagsKHR; - -typedef struct VkSwapchainCreateInfoKHR { - VkStructureType sType; - const void* pNext; - VkSwapchainCreateFlagsKHR flags; - VkSurfaceKHR surface; - uint32_t minImageCount; - VkFormat imageFormat; - VkColorSpaceKHR imageColorSpace; - VkExtent2D imageExtent; - uint32_t imageArrayLayers; - VkImageUsageFlags imageUsage; - VkSharingMode imageSharingMode; - uint32_t queueFamilyIndexCount; - const uint32_t* pQueueFamilyIndices; - VkSurfaceTransformFlagBitsKHR preTransform; - VkCompositeAlphaFlagBitsKHR compositeAlpha; - VkPresentModeKHR presentMode; - VkBool32 clipped; - VkSwapchainKHR oldSwapchain; -} VkSwapchainCreateInfoKHR; - -typedef struct VkPresentInfoKHR { - VkStructureType sType; - const void* pNext; - uint32_t waitSemaphoreCount; - const VkSemaphore* pWaitSemaphores; - uint32_t swapchainCount; - const VkSwapchainKHR* pSwapchains; - const uint32_t* pImageIndices; - VkResult* pResults; -} VkPresentInfoKHR; - - -typedef VkResult (VKAPI_PTR *PFN_vkCreateSwapchainKHR)(VkDevice device, const VkSwapchainCreateInfoKHR* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkSwapchainKHR* pSwapchain); -typedef void (VKAPI_PTR *PFN_vkDestroySwapchainKHR)(VkDevice device, VkSwapchainKHR swapchain, const VkAllocationCallbacks* pAllocator); -typedef VkResult (VKAPI_PTR *PFN_vkGetSwapchainImagesKHR)(VkDevice device, VkSwapchainKHR swapchain, uint32_t* pSwapchainImageCount, VkImage* pSwapchainImages); -typedef VkResult (VKAPI_PTR *PFN_vkAcquireNextImageKHR)(VkDevice device, VkSwapchainKHR swapchain, uint64_t timeout, VkSemaphore semaphore, VkFence fence, uint32_t* pImageIndex); -typedef VkResult (VKAPI_PTR *PFN_vkQueuePresentKHR)(VkQueue queue, const VkPresentInfoKHR* pPresentInfo); - -#ifndef VK_NO_PROTOTYPES -VKAPI_ATTR VkResult VKAPI_CALL vkCreateSwapchainKHR( - VkDevice device, - const VkSwapchainCreateInfoKHR* pCreateInfo, - const VkAllocationCallbacks* pAllocator, - VkSwapchainKHR* pSwapchain); - -VKAPI_ATTR void VKAPI_CALL vkDestroySwapchainKHR( - VkDevice device, - VkSwapchainKHR swapchain, - const VkAllocationCallbacks* pAllocator); - -VKAPI_ATTR VkResult VKAPI_CALL vkGetSwapchainImagesKHR( - VkDevice device, - VkSwapchainKHR swapchain, - uint32_t* pSwapchainImageCount, - VkImage* pSwapchainImages); - -VKAPI_ATTR VkResult VKAPI_CALL vkAcquireNextImageKHR( - VkDevice device, - VkSwapchainKHR swapchain, - uint64_t timeout, - VkSemaphore semaphore, - VkFence fence, - uint32_t* pImageIndex); - -VKAPI_ATTR VkResult VKAPI_CALL vkQueuePresentKHR( - VkQueue queue, - const VkPresentInfoKHR* pPresentInfo); -#endif - -#define VK_KHR_display 1 -VK_DEFINE_NON_DISPATCHABLE_HANDLE(VkDisplayKHR) -VK_DEFINE_NON_DISPATCHABLE_HANDLE(VkDisplayModeKHR) - -#define VK_KHR_DISPLAY_SPEC_VERSION 21 -#define VK_KHR_DISPLAY_EXTENSION_NAME "VK_KHR_display" - - -typedef enum VkDisplayPlaneAlphaFlagBitsKHR { - VK_DISPLAY_PLANE_ALPHA_OPAQUE_BIT_KHR = 0x00000001, - VK_DISPLAY_PLANE_ALPHA_GLOBAL_BIT_KHR = 0x00000002, - VK_DISPLAY_PLANE_ALPHA_PER_PIXEL_BIT_KHR = 0x00000004, - VK_DISPLAY_PLANE_ALPHA_PER_PIXEL_PREMULTIPLIED_BIT_KHR = 0x00000008, - VK_DISPLAY_PLANE_ALPHA_FLAG_BITS_MAX_ENUM_KHR = 0x7FFFFFFF -} VkDisplayPlaneAlphaFlagBitsKHR; -typedef VkFlags VkDisplayPlaneAlphaFlagsKHR; -typedef VkFlags VkDisplayModeCreateFlagsKHR; -typedef VkFlags VkDisplaySurfaceCreateFlagsKHR; - -typedef struct VkDisplayPropertiesKHR { - VkDisplayKHR display; - const char* displayName; - VkExtent2D physicalDimensions; - VkExtent2D physicalResolution; - VkSurfaceTransformFlagsKHR supportedTransforms; - VkBool32 planeReorderPossible; - VkBool32 persistentContent; -} VkDisplayPropertiesKHR; - -typedef struct VkDisplayModeParametersKHR { - VkExtent2D visibleRegion; - uint32_t refreshRate; -} VkDisplayModeParametersKHR; - -typedef struct VkDisplayModePropertiesKHR { - VkDisplayModeKHR displayMode; - VkDisplayModeParametersKHR parameters; -} VkDisplayModePropertiesKHR; - -typedef struct VkDisplayModeCreateInfoKHR { - VkStructureType sType; - const void* pNext; - VkDisplayModeCreateFlagsKHR flags; - VkDisplayModeParametersKHR parameters; -} VkDisplayModeCreateInfoKHR; - -typedef struct VkDisplayPlaneCapabilitiesKHR { - VkDisplayPlaneAlphaFlagsKHR supportedAlpha; - VkOffset2D minSrcPosition; - VkOffset2D maxSrcPosition; - VkExtent2D minSrcExtent; - VkExtent2D maxSrcExtent; - VkOffset2D minDstPosition; - VkOffset2D maxDstPosition; - VkExtent2D minDstExtent; - VkExtent2D maxDstExtent; -} VkDisplayPlaneCapabilitiesKHR; - -typedef struct VkDisplayPlanePropertiesKHR { - VkDisplayKHR currentDisplay; - uint32_t currentStackIndex; -} VkDisplayPlanePropertiesKHR; - -typedef struct VkDisplaySurfaceCreateInfoKHR { - VkStructureType sType; - const void* pNext; - VkDisplaySurfaceCreateFlagsKHR flags; - VkDisplayModeKHR displayMode; - uint32_t planeIndex; - uint32_t planeStackIndex; - VkSurfaceTransformFlagBitsKHR transform; - float globalAlpha; - VkDisplayPlaneAlphaFlagBitsKHR alphaMode; - VkExtent2D imageExtent; -} VkDisplaySurfaceCreateInfoKHR; - - -typedef VkResult (VKAPI_PTR *PFN_vkGetPhysicalDeviceDisplayPropertiesKHR)(VkPhysicalDevice physicalDevice, uint32_t* pPropertyCount, VkDisplayPropertiesKHR* pProperties); -typedef VkResult (VKAPI_PTR *PFN_vkGetPhysicalDeviceDisplayPlanePropertiesKHR)(VkPhysicalDevice physicalDevice, uint32_t* pPropertyCount, VkDisplayPlanePropertiesKHR* pProperties); -typedef VkResult (VKAPI_PTR *PFN_vkGetDisplayPlaneSupportedDisplaysKHR)(VkPhysicalDevice physicalDevice, uint32_t planeIndex, uint32_t* pDisplayCount, VkDisplayKHR* pDisplays); -typedef VkResult (VKAPI_PTR *PFN_vkGetDisplayModePropertiesKHR)(VkPhysicalDevice physicalDevice, VkDisplayKHR display, uint32_t* pPropertyCount, VkDisplayModePropertiesKHR* pProperties); -typedef VkResult (VKAPI_PTR *PFN_vkCreateDisplayModeKHR)(VkPhysicalDevice physicalDevice, VkDisplayKHR display, const VkDisplayModeCreateInfoKHR* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkDisplayModeKHR* pMode); -typedef VkResult (VKAPI_PTR *PFN_vkGetDisplayPlaneCapabilitiesKHR)(VkPhysicalDevice physicalDevice, VkDisplayModeKHR mode, uint32_t planeIndex, VkDisplayPlaneCapabilitiesKHR* pCapabilities); -typedef VkResult (VKAPI_PTR *PFN_vkCreateDisplayPlaneSurfaceKHR)(VkInstance instance, const VkDisplaySurfaceCreateInfoKHR* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkSurfaceKHR* pSurface); - -#ifndef VK_NO_PROTOTYPES -VKAPI_ATTR VkResult VKAPI_CALL vkGetPhysicalDeviceDisplayPropertiesKHR( - VkPhysicalDevice physicalDevice, - uint32_t* pPropertyCount, - VkDisplayPropertiesKHR* pProperties); - -VKAPI_ATTR VkResult VKAPI_CALL vkGetPhysicalDeviceDisplayPlanePropertiesKHR( - VkPhysicalDevice physicalDevice, - uint32_t* pPropertyCount, - VkDisplayPlanePropertiesKHR* pProperties); - -VKAPI_ATTR VkResult VKAPI_CALL vkGetDisplayPlaneSupportedDisplaysKHR( - VkPhysicalDevice physicalDevice, - uint32_t planeIndex, - uint32_t* pDisplayCount, - VkDisplayKHR* pDisplays); - -VKAPI_ATTR VkResult VKAPI_CALL vkGetDisplayModePropertiesKHR( - VkPhysicalDevice physicalDevice, - VkDisplayKHR display, - uint32_t* pPropertyCount, - VkDisplayModePropertiesKHR* pProperties); - -VKAPI_ATTR VkResult VKAPI_CALL vkCreateDisplayModeKHR( - VkPhysicalDevice physicalDevice, - VkDisplayKHR display, - const VkDisplayModeCreateInfoKHR* pCreateInfo, - const VkAllocationCallbacks* pAllocator, - VkDisplayModeKHR* pMode); - -VKAPI_ATTR VkResult VKAPI_CALL vkGetDisplayPlaneCapabilitiesKHR( - VkPhysicalDevice physicalDevice, - VkDisplayModeKHR mode, - uint32_t planeIndex, - VkDisplayPlaneCapabilitiesKHR* pCapabilities); - -VKAPI_ATTR VkResult VKAPI_CALL vkCreateDisplayPlaneSurfaceKHR( - VkInstance instance, - const VkDisplaySurfaceCreateInfoKHR* pCreateInfo, - const VkAllocationCallbacks* pAllocator, - VkSurfaceKHR* pSurface); -#endif - -#define VK_KHR_display_swapchain 1 -#define VK_KHR_DISPLAY_SWAPCHAIN_SPEC_VERSION 9 -#define VK_KHR_DISPLAY_SWAPCHAIN_EXTENSION_NAME "VK_KHR_display_swapchain" - -typedef struct VkDisplayPresentInfoKHR { - VkStructureType sType; - const void* pNext; - VkRect2D srcRect; - VkRect2D dstRect; - VkBool32 persistent; -} VkDisplayPresentInfoKHR; - - -typedef VkResult (VKAPI_PTR *PFN_vkCreateSharedSwapchainsKHR)(VkDevice device, uint32_t swapchainCount, const VkSwapchainCreateInfoKHR* pCreateInfos, const VkAllocationCallbacks* pAllocator, VkSwapchainKHR* pSwapchains); - -#ifndef VK_NO_PROTOTYPES -VKAPI_ATTR VkResult VKAPI_CALL vkCreateSharedSwapchainsKHR( - VkDevice device, - uint32_t swapchainCount, - const VkSwapchainCreateInfoKHR* pCreateInfos, - const VkAllocationCallbacks* pAllocator, - VkSwapchainKHR* pSwapchains); -#endif - -#ifdef VK_USE_PLATFORM_XLIB_KHR -#define VK_KHR_xlib_surface 1 -#include - -#define VK_KHR_XLIB_SURFACE_SPEC_VERSION 6 -#define VK_KHR_XLIB_SURFACE_EXTENSION_NAME "VK_KHR_xlib_surface" - -typedef VkFlags VkXlibSurfaceCreateFlagsKHR; - -typedef struct VkXlibSurfaceCreateInfoKHR { - VkStructureType sType; - const void* pNext; - VkXlibSurfaceCreateFlagsKHR flags; - Display* dpy; - Window window; -} VkXlibSurfaceCreateInfoKHR; - - -typedef VkResult (VKAPI_PTR *PFN_vkCreateXlibSurfaceKHR)(VkInstance instance, const VkXlibSurfaceCreateInfoKHR* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkSurfaceKHR* pSurface); -typedef VkBool32 (VKAPI_PTR *PFN_vkGetPhysicalDeviceXlibPresentationSupportKHR)(VkPhysicalDevice physicalDevice, uint32_t queueFamilyIndex, Display* dpy, VisualID visualID); - -#ifndef VK_NO_PROTOTYPES -VKAPI_ATTR VkResult VKAPI_CALL vkCreateXlibSurfaceKHR( - VkInstance instance, - const VkXlibSurfaceCreateInfoKHR* pCreateInfo, - const VkAllocationCallbacks* pAllocator, - VkSurfaceKHR* pSurface); - -VKAPI_ATTR VkBool32 VKAPI_CALL vkGetPhysicalDeviceXlibPresentationSupportKHR( - VkPhysicalDevice physicalDevice, - uint32_t queueFamilyIndex, - Display* dpy, - VisualID visualID); -#endif -#endif /* VK_USE_PLATFORM_XLIB_KHR */ - -#ifdef VK_USE_PLATFORM_XCB_KHR -#define VK_KHR_xcb_surface 1 -#include - -#define VK_KHR_XCB_SURFACE_SPEC_VERSION 6 -#define VK_KHR_XCB_SURFACE_EXTENSION_NAME "VK_KHR_xcb_surface" - -typedef VkFlags VkXcbSurfaceCreateFlagsKHR; - -typedef struct VkXcbSurfaceCreateInfoKHR { - VkStructureType sType; - const void* pNext; - VkXcbSurfaceCreateFlagsKHR flags; - xcb_connection_t* connection; - xcb_window_t window; -} VkXcbSurfaceCreateInfoKHR; - - -typedef VkResult (VKAPI_PTR *PFN_vkCreateXcbSurfaceKHR)(VkInstance instance, const VkXcbSurfaceCreateInfoKHR* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkSurfaceKHR* pSurface); -typedef VkBool32 (VKAPI_PTR *PFN_vkGetPhysicalDeviceXcbPresentationSupportKHR)(VkPhysicalDevice physicalDevice, uint32_t queueFamilyIndex, xcb_connection_t* connection, xcb_visualid_t visual_id); - -#ifndef VK_NO_PROTOTYPES -VKAPI_ATTR VkResult VKAPI_CALL vkCreateXcbSurfaceKHR( - VkInstance instance, - const VkXcbSurfaceCreateInfoKHR* pCreateInfo, - const VkAllocationCallbacks* pAllocator, - VkSurfaceKHR* pSurface); - -VKAPI_ATTR VkBool32 VKAPI_CALL vkGetPhysicalDeviceXcbPresentationSupportKHR( - VkPhysicalDevice physicalDevice, - uint32_t queueFamilyIndex, - xcb_connection_t* connection, - xcb_visualid_t visual_id); -#endif -#endif /* VK_USE_PLATFORM_XCB_KHR */ - -#ifdef VK_USE_PLATFORM_WAYLAND_KHR -#define VK_KHR_wayland_surface 1 -#include - -#define VK_KHR_WAYLAND_SURFACE_SPEC_VERSION 6 -#define VK_KHR_WAYLAND_SURFACE_EXTENSION_NAME "VK_KHR_wayland_surface" - -typedef VkFlags VkWaylandSurfaceCreateFlagsKHR; - -typedef struct VkWaylandSurfaceCreateInfoKHR { - VkStructureType sType; - const void* pNext; - VkWaylandSurfaceCreateFlagsKHR flags; - struct wl_display* display; - struct wl_surface* surface; -} VkWaylandSurfaceCreateInfoKHR; - - -typedef VkResult (VKAPI_PTR *PFN_vkCreateWaylandSurfaceKHR)(VkInstance instance, const VkWaylandSurfaceCreateInfoKHR* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkSurfaceKHR* pSurface); -typedef VkBool32 (VKAPI_PTR *PFN_vkGetPhysicalDeviceWaylandPresentationSupportKHR)(VkPhysicalDevice physicalDevice, uint32_t queueFamilyIndex, struct wl_display* display); - -#ifndef VK_NO_PROTOTYPES -VKAPI_ATTR VkResult VKAPI_CALL vkCreateWaylandSurfaceKHR( - VkInstance instance, - const VkWaylandSurfaceCreateInfoKHR* pCreateInfo, - const VkAllocationCallbacks* pAllocator, - VkSurfaceKHR* pSurface); - -VKAPI_ATTR VkBool32 VKAPI_CALL vkGetPhysicalDeviceWaylandPresentationSupportKHR( - VkPhysicalDevice physicalDevice, - uint32_t queueFamilyIndex, - struct wl_display* display); -#endif -#endif /* VK_USE_PLATFORM_WAYLAND_KHR */ - -#ifdef VK_USE_PLATFORM_MIR_KHR -#define VK_KHR_mir_surface 1 -#include - -#define VK_KHR_MIR_SURFACE_SPEC_VERSION 4 -#define VK_KHR_MIR_SURFACE_EXTENSION_NAME "VK_KHR_mir_surface" - -typedef VkFlags VkMirSurfaceCreateFlagsKHR; - -typedef struct VkMirSurfaceCreateInfoKHR { - VkStructureType sType; - const void* pNext; - VkMirSurfaceCreateFlagsKHR flags; - MirConnection* connection; - MirSurface* mirSurface; -} VkMirSurfaceCreateInfoKHR; - - -typedef VkResult (VKAPI_PTR *PFN_vkCreateMirSurfaceKHR)(VkInstance instance, const VkMirSurfaceCreateInfoKHR* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkSurfaceKHR* pSurface); -typedef VkBool32 (VKAPI_PTR *PFN_vkGetPhysicalDeviceMirPresentationSupportKHR)(VkPhysicalDevice physicalDevice, uint32_t queueFamilyIndex, MirConnection* connection); - -#ifndef VK_NO_PROTOTYPES -VKAPI_ATTR VkResult VKAPI_CALL vkCreateMirSurfaceKHR( - VkInstance instance, - const VkMirSurfaceCreateInfoKHR* pCreateInfo, - const VkAllocationCallbacks* pAllocator, - VkSurfaceKHR* pSurface); - -VKAPI_ATTR VkBool32 VKAPI_CALL vkGetPhysicalDeviceMirPresentationSupportKHR( - VkPhysicalDevice physicalDevice, - uint32_t queueFamilyIndex, - MirConnection* connection); -#endif -#endif /* VK_USE_PLATFORM_MIR_KHR */ +#include "vk_platform.h" +#include "vulkan_core.h" #ifdef VK_USE_PLATFORM_ANDROID_KHR -#define VK_KHR_android_surface 1 -#include - -#define VK_KHR_ANDROID_SURFACE_SPEC_VERSION 6 -#define VK_KHR_ANDROID_SURFACE_EXTENSION_NAME "VK_KHR_android_surface" - -typedef VkFlags VkAndroidSurfaceCreateFlagsKHR; - -typedef struct VkAndroidSurfaceCreateInfoKHR { - VkStructureType sType; - const void* pNext; - VkAndroidSurfaceCreateFlagsKHR flags; - ANativeWindow* window; -} VkAndroidSurfaceCreateInfoKHR; - - -typedef VkResult (VKAPI_PTR *PFN_vkCreateAndroidSurfaceKHR)(VkInstance instance, const VkAndroidSurfaceCreateInfoKHR* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkSurfaceKHR* pSurface); - -#ifndef VK_NO_PROTOTYPES -VKAPI_ATTR VkResult VKAPI_CALL vkCreateAndroidSurfaceKHR( - VkInstance instance, - const VkAndroidSurfaceCreateInfoKHR* pCreateInfo, - const VkAllocationCallbacks* pAllocator, - VkSurfaceKHR* pSurface); -#endif -#endif /* VK_USE_PLATFORM_ANDROID_KHR */ - -#ifdef VK_USE_PLATFORM_WIN32_KHR -#define VK_KHR_win32_surface 1 -#include - -#define VK_KHR_WIN32_SURFACE_SPEC_VERSION 6 -#define VK_KHR_WIN32_SURFACE_EXTENSION_NAME "VK_KHR_win32_surface" - -typedef VkFlags VkWin32SurfaceCreateFlagsKHR; - -typedef struct VkWin32SurfaceCreateInfoKHR { - VkStructureType sType; - const void* pNext; - VkWin32SurfaceCreateFlagsKHR flags; - HINSTANCE hinstance; - HWND hwnd; -} VkWin32SurfaceCreateInfoKHR; - - -typedef VkResult (VKAPI_PTR *PFN_vkCreateWin32SurfaceKHR)(VkInstance instance, const VkWin32SurfaceCreateInfoKHR* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkSurfaceKHR* pSurface); -typedef VkBool32 (VKAPI_PTR *PFN_vkGetPhysicalDeviceWin32PresentationSupportKHR)(VkPhysicalDevice physicalDevice, uint32_t queueFamilyIndex); - -#ifndef VK_NO_PROTOTYPES -VKAPI_ATTR VkResult VKAPI_CALL vkCreateWin32SurfaceKHR( - VkInstance instance, - const VkWin32SurfaceCreateInfoKHR* pCreateInfo, - const VkAllocationCallbacks* pAllocator, - VkSurfaceKHR* pSurface); - -VKAPI_ATTR VkBool32 VKAPI_CALL vkGetPhysicalDeviceWin32PresentationSupportKHR( - VkPhysicalDevice physicalDevice, - uint32_t queueFamilyIndex); -#endif -#endif /* VK_USE_PLATFORM_WIN32_KHR */ - -#define VK_KHR_sampler_mirror_clamp_to_edge 1 -#define VK_KHR_SAMPLER_MIRROR_CLAMP_TO_EDGE_SPEC_VERSION 1 -#define VK_KHR_SAMPLER_MIRROR_CLAMP_TO_EDGE_EXTENSION_NAME "VK_KHR_sampler_mirror_clamp_to_edge" - - -#define VK_KHR_get_physical_device_properties2 1 -#define VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_SPEC_VERSION 1 -#define VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME "VK_KHR_get_physical_device_properties2" - -typedef struct VkPhysicalDeviceFeatures2KHR { - VkStructureType sType; - void* pNext; - VkPhysicalDeviceFeatures features; -} VkPhysicalDeviceFeatures2KHR; - -typedef struct VkPhysicalDeviceProperties2KHR { - VkStructureType sType; - void* pNext; - VkPhysicalDeviceProperties properties; -} VkPhysicalDeviceProperties2KHR; - -typedef struct VkFormatProperties2KHR { - VkStructureType sType; - void* pNext; - VkFormatProperties formatProperties; -} VkFormatProperties2KHR; - -typedef struct VkImageFormatProperties2KHR { - VkStructureType sType; - void* pNext; - VkImageFormatProperties imageFormatProperties; -} VkImageFormatProperties2KHR; - -typedef struct VkPhysicalDeviceImageFormatInfo2KHR { - VkStructureType sType; - const void* pNext; - VkFormat format; - VkImageType type; - VkImageTiling tiling; - VkImageUsageFlags usage; - VkImageCreateFlags flags; -} VkPhysicalDeviceImageFormatInfo2KHR; - -typedef struct VkQueueFamilyProperties2KHR { - VkStructureType sType; - void* pNext; - VkQueueFamilyProperties queueFamilyProperties; -} VkQueueFamilyProperties2KHR; - -typedef struct VkPhysicalDeviceMemoryProperties2KHR { - VkStructureType sType; - void* pNext; - VkPhysicalDeviceMemoryProperties memoryProperties; -} VkPhysicalDeviceMemoryProperties2KHR; - -typedef struct VkSparseImageFormatProperties2KHR { - VkStructureType sType; - void* pNext; - VkSparseImageFormatProperties properties; -} VkSparseImageFormatProperties2KHR; - -typedef struct VkPhysicalDeviceSparseImageFormatInfo2KHR { - VkStructureType sType; - const void* pNext; - VkFormat format; - VkImageType type; - VkSampleCountFlagBits samples; - VkImageUsageFlags usage; - VkImageTiling tiling; -} VkPhysicalDeviceSparseImageFormatInfo2KHR; - - -typedef void (VKAPI_PTR *PFN_vkGetPhysicalDeviceFeatures2KHR)(VkPhysicalDevice physicalDevice, VkPhysicalDeviceFeatures2KHR* pFeatures); -typedef void (VKAPI_PTR *PFN_vkGetPhysicalDeviceProperties2KHR)(VkPhysicalDevice physicalDevice, VkPhysicalDeviceProperties2KHR* pProperties); -typedef void (VKAPI_PTR *PFN_vkGetPhysicalDeviceFormatProperties2KHR)(VkPhysicalDevice physicalDevice, VkFormat format, VkFormatProperties2KHR* pFormatProperties); -typedef VkResult (VKAPI_PTR *PFN_vkGetPhysicalDeviceImageFormatProperties2KHR)(VkPhysicalDevice physicalDevice, const VkPhysicalDeviceImageFormatInfo2KHR* pImageFormatInfo, VkImageFormatProperties2KHR* pImageFormatProperties); -typedef void (VKAPI_PTR *PFN_vkGetPhysicalDeviceQueueFamilyProperties2KHR)(VkPhysicalDevice physicalDevice, uint32_t* pQueueFamilyPropertyCount, VkQueueFamilyProperties2KHR* pQueueFamilyProperties); -typedef void (VKAPI_PTR *PFN_vkGetPhysicalDeviceMemoryProperties2KHR)(VkPhysicalDevice physicalDevice, VkPhysicalDeviceMemoryProperties2KHR* pMemoryProperties); -typedef void (VKAPI_PTR *PFN_vkGetPhysicalDeviceSparseImageFormatProperties2KHR)(VkPhysicalDevice physicalDevice, const VkPhysicalDeviceSparseImageFormatInfo2KHR* pFormatInfo, uint32_t* pPropertyCount, VkSparseImageFormatProperties2KHR* pProperties); - -#ifndef VK_NO_PROTOTYPES -VKAPI_ATTR void VKAPI_CALL vkGetPhysicalDeviceFeatures2KHR( - VkPhysicalDevice physicalDevice, - VkPhysicalDeviceFeatures2KHR* pFeatures); - -VKAPI_ATTR void VKAPI_CALL vkGetPhysicalDeviceProperties2KHR( - VkPhysicalDevice physicalDevice, - VkPhysicalDeviceProperties2KHR* pProperties); - -VKAPI_ATTR void VKAPI_CALL vkGetPhysicalDeviceFormatProperties2KHR( - VkPhysicalDevice physicalDevice, - VkFormat format, - VkFormatProperties2KHR* pFormatProperties); - -VKAPI_ATTR VkResult VKAPI_CALL vkGetPhysicalDeviceImageFormatProperties2KHR( - VkPhysicalDevice physicalDevice, - const VkPhysicalDeviceImageFormatInfo2KHR* pImageFormatInfo, - VkImageFormatProperties2KHR* pImageFormatProperties); - -VKAPI_ATTR void VKAPI_CALL vkGetPhysicalDeviceQueueFamilyProperties2KHR( - VkPhysicalDevice physicalDevice, - uint32_t* pQueueFamilyPropertyCount, - VkQueueFamilyProperties2KHR* pQueueFamilyProperties); - -VKAPI_ATTR void VKAPI_CALL vkGetPhysicalDeviceMemoryProperties2KHR( - VkPhysicalDevice physicalDevice, - VkPhysicalDeviceMemoryProperties2KHR* pMemoryProperties); - -VKAPI_ATTR void VKAPI_CALL vkGetPhysicalDeviceSparseImageFormatProperties2KHR( - VkPhysicalDevice physicalDevice, - const VkPhysicalDeviceSparseImageFormatInfo2KHR* pFormatInfo, - uint32_t* pPropertyCount, - VkSparseImageFormatProperties2KHR* pProperties); +#include "vulkan_android.h" #endif -#define VK_KHR_shader_draw_parameters 1 -#define VK_KHR_SHADER_DRAW_PARAMETERS_SPEC_VERSION 1 -#define VK_KHR_SHADER_DRAW_PARAMETERS_EXTENSION_NAME "VK_KHR_shader_draw_parameters" - - -#define VK_KHR_maintenance1 1 -#define VK_KHR_MAINTENANCE1_SPEC_VERSION 1 -#define VK_KHR_MAINTENANCE1_EXTENSION_NAME "VK_KHR_maintenance1" - -typedef VkFlags VkCommandPoolTrimFlagsKHR; - -typedef void (VKAPI_PTR *PFN_vkTrimCommandPoolKHR)(VkDevice device, VkCommandPool commandPool, VkCommandPoolTrimFlagsKHR flags); - -#ifndef VK_NO_PROTOTYPES -VKAPI_ATTR void VKAPI_CALL vkTrimCommandPoolKHR( - VkDevice device, - VkCommandPool commandPool, - VkCommandPoolTrimFlagsKHR flags); -#endif - -#define VK_KHR_external_memory_capabilities 1 -#define VK_LUID_SIZE_KHR 8 -#define VK_KHR_EXTERNAL_MEMORY_CAPABILITIES_SPEC_VERSION 1 -#define VK_KHR_EXTERNAL_MEMORY_CAPABILITIES_EXTENSION_NAME "VK_KHR_external_memory_capabilities" - - -typedef enum VkExternalMemoryHandleTypeFlagBitsKHR { - VK_EXTERNAL_MEMORY_HANDLE_TYPE_OPAQUE_FD_BIT_KHR = 0x00000001, - VK_EXTERNAL_MEMORY_HANDLE_TYPE_OPAQUE_WIN32_BIT_KHR = 0x00000002, - VK_EXTERNAL_MEMORY_HANDLE_TYPE_OPAQUE_WIN32_KMT_BIT_KHR = 0x00000004, - VK_EXTERNAL_MEMORY_HANDLE_TYPE_D3D11_TEXTURE_BIT_KHR = 0x00000008, - VK_EXTERNAL_MEMORY_HANDLE_TYPE_D3D11_TEXTURE_KMT_BIT_KHR = 0x00000010, - VK_EXTERNAL_MEMORY_HANDLE_TYPE_D3D12_HEAP_BIT_KHR = 0x00000020, - VK_EXTERNAL_MEMORY_HANDLE_TYPE_D3D12_RESOURCE_BIT_KHR = 0x00000040, - VK_EXTERNAL_MEMORY_HANDLE_TYPE_FLAG_BITS_MAX_ENUM_KHR = 0x7FFFFFFF -} VkExternalMemoryHandleTypeFlagBitsKHR; -typedef VkFlags VkExternalMemoryHandleTypeFlagsKHR; - -typedef enum VkExternalMemoryFeatureFlagBitsKHR { - VK_EXTERNAL_MEMORY_FEATURE_DEDICATED_ONLY_BIT_KHR = 0x00000001, - VK_EXTERNAL_MEMORY_FEATURE_EXPORTABLE_BIT_KHR = 0x00000002, - VK_EXTERNAL_MEMORY_FEATURE_IMPORTABLE_BIT_KHR = 0x00000004, - VK_EXTERNAL_MEMORY_FEATURE_FLAG_BITS_MAX_ENUM_KHR = 0x7FFFFFFF -} VkExternalMemoryFeatureFlagBitsKHR; -typedef VkFlags VkExternalMemoryFeatureFlagsKHR; - -typedef struct VkExternalMemoryPropertiesKHR { - VkExternalMemoryFeatureFlagsKHR externalMemoryFeatures; - VkExternalMemoryHandleTypeFlagsKHR exportFromImportedHandleTypes; - VkExternalMemoryHandleTypeFlagsKHR compatibleHandleTypes; -} VkExternalMemoryPropertiesKHR; - -typedef struct VkPhysicalDeviceExternalImageFormatInfoKHR { - VkStructureType sType; - const void* pNext; - VkExternalMemoryHandleTypeFlagBitsKHR handleType; -} VkPhysicalDeviceExternalImageFormatInfoKHR; - -typedef struct VkExternalImageFormatPropertiesKHR { - VkStructureType sType; - void* pNext; - VkExternalMemoryPropertiesKHR externalMemoryProperties; -} VkExternalImageFormatPropertiesKHR; - -typedef struct VkPhysicalDeviceExternalBufferInfoKHR { - VkStructureType sType; - const void* pNext; - VkBufferCreateFlags flags; - VkBufferUsageFlags usage; - VkExternalMemoryHandleTypeFlagBitsKHR handleType; -} VkPhysicalDeviceExternalBufferInfoKHR; - -typedef struct VkExternalBufferPropertiesKHR { - VkStructureType sType; - void* pNext; - VkExternalMemoryPropertiesKHR externalMemoryProperties; -} VkExternalBufferPropertiesKHR; - -typedef struct VkPhysicalDeviceIDPropertiesKHR { - VkStructureType sType; - void* pNext; - uint8_t deviceUUID[VK_UUID_SIZE]; - uint8_t driverUUID[VK_UUID_SIZE]; - uint8_t deviceLUID[VK_LUID_SIZE_KHR]; - uint32_t deviceNodeMask; - VkBool32 deviceLUIDValid; -} VkPhysicalDeviceIDPropertiesKHR; - - -typedef void (VKAPI_PTR *PFN_vkGetPhysicalDeviceExternalBufferPropertiesKHR)(VkPhysicalDevice physicalDevice, const VkPhysicalDeviceExternalBufferInfoKHR* pExternalBufferInfo, VkExternalBufferPropertiesKHR* pExternalBufferProperties); - -#ifndef VK_NO_PROTOTYPES -VKAPI_ATTR void VKAPI_CALL vkGetPhysicalDeviceExternalBufferPropertiesKHR( - VkPhysicalDevice physicalDevice, - const VkPhysicalDeviceExternalBufferInfoKHR* pExternalBufferInfo, - VkExternalBufferPropertiesKHR* pExternalBufferProperties); -#endif - -#define VK_KHR_external_memory 1 -#define VK_KHR_EXTERNAL_MEMORY_SPEC_VERSION 1 -#define VK_KHR_EXTERNAL_MEMORY_EXTENSION_NAME "VK_KHR_external_memory" -#define VK_QUEUE_FAMILY_EXTERNAL_KHR (~0U-1) - -typedef struct VkExternalMemoryImageCreateInfoKHR { - VkStructureType sType; - const void* pNext; - VkExternalMemoryHandleTypeFlagsKHR handleTypes; -} VkExternalMemoryImageCreateInfoKHR; - -typedef struct VkExternalMemoryBufferCreateInfoKHR { - VkStructureType sType; - const void* pNext; - VkExternalMemoryHandleTypeFlagsKHR handleTypes; -} VkExternalMemoryBufferCreateInfoKHR; - -typedef struct VkExportMemoryAllocateInfoKHR { - VkStructureType sType; - const void* pNext; - VkExternalMemoryHandleTypeFlagsKHR handleTypes; -} VkExportMemoryAllocateInfoKHR; - - - -#ifdef VK_USE_PLATFORM_WIN32_KHR -#define VK_KHR_external_memory_win32 1 -#define VK_KHR_EXTERNAL_MEMORY_WIN32_SPEC_VERSION 1 -#define VK_KHR_EXTERNAL_MEMORY_WIN32_EXTENSION_NAME "VK_KHR_external_memory_win32" - -typedef struct VkImportMemoryWin32HandleInfoKHR { - VkStructureType sType; - const void* pNext; - VkExternalMemoryHandleTypeFlagBitsKHR handleType; - HANDLE handle; - LPCWSTR name; -} VkImportMemoryWin32HandleInfoKHR; - -typedef struct VkExportMemoryWin32HandleInfoKHR { - VkStructureType sType; - const void* pNext; - const SECURITY_ATTRIBUTES* pAttributes; - DWORD dwAccess; - LPCWSTR name; -} VkExportMemoryWin32HandleInfoKHR; - -typedef struct VkMemoryWin32HandlePropertiesKHR { - VkStructureType sType; - void* pNext; - uint32_t memoryTypeBits; -} VkMemoryWin32HandlePropertiesKHR; - -typedef struct VkMemoryGetWin32HandleInfoKHR { - VkStructureType sType; - const void* pNext; - VkDeviceMemory memory; - VkExternalMemoryHandleTypeFlagBitsKHR handleType; -} VkMemoryGetWin32HandleInfoKHR; - - -typedef VkResult (VKAPI_PTR *PFN_vkGetMemoryWin32HandleKHR)(VkDevice device, const VkMemoryGetWin32HandleInfoKHR* pGetWin32HandleInfo, HANDLE* pHandle); -typedef VkResult (VKAPI_PTR *PFN_vkGetMemoryWin32HandlePropertiesKHR)(VkDevice device, VkExternalMemoryHandleTypeFlagBitsKHR handleType, HANDLE handle, VkMemoryWin32HandlePropertiesKHR* pMemoryWin32HandleProperties); - -#ifndef VK_NO_PROTOTYPES -VKAPI_ATTR VkResult VKAPI_CALL vkGetMemoryWin32HandleKHR( - VkDevice device, - const VkMemoryGetWin32HandleInfoKHR* pGetWin32HandleInfo, - HANDLE* pHandle); - -VKAPI_ATTR VkResult VKAPI_CALL vkGetMemoryWin32HandlePropertiesKHR( - VkDevice device, - VkExternalMemoryHandleTypeFlagBitsKHR handleType, - HANDLE handle, - VkMemoryWin32HandlePropertiesKHR* pMemoryWin32HandleProperties); -#endif -#endif /* VK_USE_PLATFORM_WIN32_KHR */ - -#define VK_KHR_external_memory_fd 1 -#define VK_KHR_EXTERNAL_MEMORY_FD_SPEC_VERSION 1 -#define VK_KHR_EXTERNAL_MEMORY_FD_EXTENSION_NAME "VK_KHR_external_memory_fd" - -typedef struct VkImportMemoryFdInfoKHR { - VkStructureType sType; - const void* pNext; - VkExternalMemoryHandleTypeFlagBitsKHR handleType; - int fd; -} VkImportMemoryFdInfoKHR; - -typedef struct VkMemoryFdPropertiesKHR { - VkStructureType sType; - void* pNext; - uint32_t memoryTypeBits; -} VkMemoryFdPropertiesKHR; - -typedef struct VkMemoryGetFdInfoKHR { - VkStructureType sType; - const void* pNext; - VkDeviceMemory memory; - VkExternalMemoryHandleTypeFlagBitsKHR handleType; -} VkMemoryGetFdInfoKHR; - - -typedef VkResult (VKAPI_PTR *PFN_vkGetMemoryFdKHR)(VkDevice device, const VkMemoryGetFdInfoKHR* pGetFdInfo, int* pFd); -typedef VkResult (VKAPI_PTR *PFN_vkGetMemoryFdPropertiesKHR)(VkDevice device, VkExternalMemoryHandleTypeFlagBitsKHR handleType, int fd, VkMemoryFdPropertiesKHR* pMemoryFdProperties); - -#ifndef VK_NO_PROTOTYPES -VKAPI_ATTR VkResult VKAPI_CALL vkGetMemoryFdKHR( - VkDevice device, - const VkMemoryGetFdInfoKHR* pGetFdInfo, - int* pFd); - -VKAPI_ATTR VkResult VKAPI_CALL vkGetMemoryFdPropertiesKHR( - VkDevice device, - VkExternalMemoryHandleTypeFlagBitsKHR handleType, - int fd, - VkMemoryFdPropertiesKHR* pMemoryFdProperties); -#endif - -#ifdef VK_USE_PLATFORM_WIN32_KHR -#define VK_KHR_win32_keyed_mutex 1 -#define VK_KHR_WIN32_KEYED_MUTEX_SPEC_VERSION 1 -#define VK_KHR_WIN32_KEYED_MUTEX_EXTENSION_NAME "VK_KHR_win32_keyed_mutex" - -typedef struct VkWin32KeyedMutexAcquireReleaseInfoKHR { - VkStructureType sType; - const void* pNext; - uint32_t acquireCount; - const VkDeviceMemory* pAcquireSyncs; - const uint64_t* pAcquireKeys; - const uint32_t* pAcquireTimeouts; - uint32_t releaseCount; - const VkDeviceMemory* pReleaseSyncs; - const uint64_t* pReleaseKeys; -} VkWin32KeyedMutexAcquireReleaseInfoKHR; - - -#endif /* VK_USE_PLATFORM_WIN32_KHR */ - -#define VK_KHR_external_semaphore_capabilities 1 -#define VK_KHR_EXTERNAL_SEMAPHORE_CAPABILITIES_SPEC_VERSION 1 -#define VK_KHR_EXTERNAL_SEMAPHORE_CAPABILITIES_EXTENSION_NAME "VK_KHR_external_semaphore_capabilities" - - -typedef enum VkExternalSemaphoreHandleTypeFlagBitsKHR { - VK_EXTERNAL_SEMAPHORE_HANDLE_TYPE_OPAQUE_FD_BIT_KHR = 0x00000001, - VK_EXTERNAL_SEMAPHORE_HANDLE_TYPE_OPAQUE_WIN32_BIT_KHR = 0x00000002, - VK_EXTERNAL_SEMAPHORE_HANDLE_TYPE_OPAQUE_WIN32_KMT_BIT_KHR = 0x00000004, - VK_EXTERNAL_SEMAPHORE_HANDLE_TYPE_D3D12_FENCE_BIT_KHR = 0x00000008, - VK_EXTERNAL_SEMAPHORE_HANDLE_TYPE_SYNC_FD_BIT_KHR = 0x00000010, - VK_EXTERNAL_SEMAPHORE_HANDLE_TYPE_FLAG_BITS_MAX_ENUM_KHR = 0x7FFFFFFF -} VkExternalSemaphoreHandleTypeFlagBitsKHR; -typedef VkFlags VkExternalSemaphoreHandleTypeFlagsKHR; - -typedef enum VkExternalSemaphoreFeatureFlagBitsKHR { - VK_EXTERNAL_SEMAPHORE_FEATURE_EXPORTABLE_BIT_KHR = 0x00000001, - VK_EXTERNAL_SEMAPHORE_FEATURE_IMPORTABLE_BIT_KHR = 0x00000002, - VK_EXTERNAL_SEMAPHORE_FEATURE_FLAG_BITS_MAX_ENUM_KHR = 0x7FFFFFFF -} VkExternalSemaphoreFeatureFlagBitsKHR; -typedef VkFlags VkExternalSemaphoreFeatureFlagsKHR; - -typedef struct VkPhysicalDeviceExternalSemaphoreInfoKHR { - VkStructureType sType; - const void* pNext; - VkExternalSemaphoreHandleTypeFlagBitsKHR handleType; -} VkPhysicalDeviceExternalSemaphoreInfoKHR; - -typedef struct VkExternalSemaphorePropertiesKHR { - VkStructureType sType; - void* pNext; - VkExternalSemaphoreHandleTypeFlagsKHR exportFromImportedHandleTypes; - VkExternalSemaphoreHandleTypeFlagsKHR compatibleHandleTypes; - VkExternalSemaphoreFeatureFlagsKHR externalSemaphoreFeatures; -} VkExternalSemaphorePropertiesKHR; - - -typedef void (VKAPI_PTR *PFN_vkGetPhysicalDeviceExternalSemaphorePropertiesKHR)(VkPhysicalDevice physicalDevice, const VkPhysicalDeviceExternalSemaphoreInfoKHR* pExternalSemaphoreInfo, VkExternalSemaphorePropertiesKHR* pExternalSemaphoreProperties); - -#ifndef VK_NO_PROTOTYPES -VKAPI_ATTR void VKAPI_CALL vkGetPhysicalDeviceExternalSemaphorePropertiesKHR( - VkPhysicalDevice physicalDevice, - const VkPhysicalDeviceExternalSemaphoreInfoKHR* pExternalSemaphoreInfo, - VkExternalSemaphorePropertiesKHR* pExternalSemaphoreProperties); -#endif - -#define VK_KHR_external_semaphore 1 -#define VK_KHR_EXTERNAL_SEMAPHORE_SPEC_VERSION 1 -#define VK_KHR_EXTERNAL_SEMAPHORE_EXTENSION_NAME "VK_KHR_external_semaphore" - - -typedef enum VkSemaphoreImportFlagBitsKHR { - VK_SEMAPHORE_IMPORT_TEMPORARY_BIT_KHR = 0x00000001, - VK_SEMAPHORE_IMPORT_FLAG_BITS_MAX_ENUM_KHR = 0x7FFFFFFF -} VkSemaphoreImportFlagBitsKHR; -typedef VkFlags VkSemaphoreImportFlagsKHR; - -typedef struct VkExportSemaphoreCreateInfoKHR { - VkStructureType sType; - const void* pNext; - VkExternalSemaphoreHandleTypeFlagsKHR handleTypes; -} VkExportSemaphoreCreateInfoKHR; - - - -#ifdef VK_USE_PLATFORM_WIN32_KHR -#define VK_KHR_external_semaphore_win32 1 -#define VK_KHR_EXTERNAL_SEMAPHORE_WIN32_SPEC_VERSION 1 -#define VK_KHR_EXTERNAL_SEMAPHORE_WIN32_EXTENSION_NAME "VK_KHR_external_semaphore_win32" - -typedef struct VkImportSemaphoreWin32HandleInfoKHR { - VkStructureType sType; - const void* pNext; - VkSemaphore semaphore; - VkSemaphoreImportFlagsKHR flags; - VkExternalSemaphoreHandleTypeFlagBitsKHR handleType; - HANDLE handle; - LPCWSTR name; -} VkImportSemaphoreWin32HandleInfoKHR; - -typedef struct VkExportSemaphoreWin32HandleInfoKHR { - VkStructureType sType; - const void* pNext; - const SECURITY_ATTRIBUTES* pAttributes; - DWORD dwAccess; - LPCWSTR name; -} VkExportSemaphoreWin32HandleInfoKHR; - -typedef struct VkD3D12FenceSubmitInfoKHR { - VkStructureType sType; - const void* pNext; - uint32_t waitSemaphoreValuesCount; - const uint64_t* pWaitSemaphoreValues; - uint32_t signalSemaphoreValuesCount; - const uint64_t* pSignalSemaphoreValues; -} VkD3D12FenceSubmitInfoKHR; - -typedef struct VkSemaphoreGetWin32HandleInfoKHR { - VkStructureType sType; - const void* pNext; - VkSemaphore semaphore; - VkExternalSemaphoreHandleTypeFlagBitsKHR handleType; -} VkSemaphoreGetWin32HandleInfoKHR; - - -typedef VkResult (VKAPI_PTR *PFN_vkImportSemaphoreWin32HandleKHR)(VkDevice device, const VkImportSemaphoreWin32HandleInfoKHR* pImportSemaphoreWin32HandleInfo); -typedef VkResult (VKAPI_PTR *PFN_vkGetSemaphoreWin32HandleKHR)(VkDevice device, const VkSemaphoreGetWin32HandleInfoKHR* pGetWin32HandleInfo, HANDLE* pHandle); - -#ifndef VK_NO_PROTOTYPES -VKAPI_ATTR VkResult VKAPI_CALL vkImportSemaphoreWin32HandleKHR( - VkDevice device, - const VkImportSemaphoreWin32HandleInfoKHR* pImportSemaphoreWin32HandleInfo); - -VKAPI_ATTR VkResult VKAPI_CALL vkGetSemaphoreWin32HandleKHR( - VkDevice device, - const VkSemaphoreGetWin32HandleInfoKHR* pGetWin32HandleInfo, - HANDLE* pHandle); -#endif -#endif /* VK_USE_PLATFORM_WIN32_KHR */ - -#define VK_KHR_external_semaphore_fd 1 -#define VK_KHR_EXTERNAL_SEMAPHORE_FD_SPEC_VERSION 1 -#define VK_KHR_EXTERNAL_SEMAPHORE_FD_EXTENSION_NAME "VK_KHR_external_semaphore_fd" - -typedef struct VkImportSemaphoreFdInfoKHR { - VkStructureType sType; - const void* pNext; - VkSemaphore semaphore; - VkSemaphoreImportFlagsKHR flags; - VkExternalSemaphoreHandleTypeFlagBitsKHR handleType; - int fd; -} VkImportSemaphoreFdInfoKHR; - -typedef struct VkSemaphoreGetFdInfoKHR { - VkStructureType sType; - const void* pNext; - VkSemaphore semaphore; - VkExternalSemaphoreHandleTypeFlagBitsKHR handleType; -} VkSemaphoreGetFdInfoKHR; - - -typedef VkResult (VKAPI_PTR *PFN_vkImportSemaphoreFdKHR)(VkDevice device, const VkImportSemaphoreFdInfoKHR* pImportSemaphoreFdInfo); -typedef VkResult (VKAPI_PTR *PFN_vkGetSemaphoreFdKHR)(VkDevice device, const VkSemaphoreGetFdInfoKHR* pGetFdInfo, int* pFd); - -#ifndef VK_NO_PROTOTYPES -VKAPI_ATTR VkResult VKAPI_CALL vkImportSemaphoreFdKHR( - VkDevice device, - const VkImportSemaphoreFdInfoKHR* pImportSemaphoreFdInfo); - -VKAPI_ATTR VkResult VKAPI_CALL vkGetSemaphoreFdKHR( - VkDevice device, - const VkSemaphoreGetFdInfoKHR* pGetFdInfo, - int* pFd); -#endif - -#define VK_KHR_push_descriptor 1 -#define VK_KHR_PUSH_DESCRIPTOR_SPEC_VERSION 1 -#define VK_KHR_PUSH_DESCRIPTOR_EXTENSION_NAME "VK_KHR_push_descriptor" - -typedef struct VkPhysicalDevicePushDescriptorPropertiesKHR { - VkStructureType sType; - void* pNext; - uint32_t maxPushDescriptors; -} VkPhysicalDevicePushDescriptorPropertiesKHR; - - -typedef void (VKAPI_PTR *PFN_vkCmdPushDescriptorSetKHR)(VkCommandBuffer commandBuffer, VkPipelineBindPoint pipelineBindPoint, VkPipelineLayout layout, uint32_t set, uint32_t descriptorWriteCount, const VkWriteDescriptorSet* pDescriptorWrites); - -#ifndef VK_NO_PROTOTYPES -VKAPI_ATTR void VKAPI_CALL vkCmdPushDescriptorSetKHR( - VkCommandBuffer commandBuffer, - VkPipelineBindPoint pipelineBindPoint, - VkPipelineLayout layout, - uint32_t set, - uint32_t descriptorWriteCount, - const VkWriteDescriptorSet* pDescriptorWrites); -#endif - -#define VK_KHR_16bit_storage 1 -#define VK_KHR_16BIT_STORAGE_SPEC_VERSION 1 -#define VK_KHR_16BIT_STORAGE_EXTENSION_NAME "VK_KHR_16bit_storage" - -typedef struct VkPhysicalDevice16BitStorageFeaturesKHR { - VkStructureType sType; - void* pNext; - VkBool32 storageBuffer16BitAccess; - VkBool32 uniformAndStorageBuffer16BitAccess; - VkBool32 storagePushConstant16; - VkBool32 storageInputOutput16; -} VkPhysicalDevice16BitStorageFeaturesKHR; - - - -#define VK_KHR_incremental_present 1 -#define VK_KHR_INCREMENTAL_PRESENT_SPEC_VERSION 1 -#define VK_KHR_INCREMENTAL_PRESENT_EXTENSION_NAME "VK_KHR_incremental_present" - -typedef struct VkRectLayerKHR { - VkOffset2D offset; - VkExtent2D extent; - uint32_t layer; -} VkRectLayerKHR; - -typedef struct VkPresentRegionKHR { - uint32_t rectangleCount; - const VkRectLayerKHR* pRectangles; -} VkPresentRegionKHR; - -typedef struct VkPresentRegionsKHR { - VkStructureType sType; - const void* pNext; - uint32_t swapchainCount; - const VkPresentRegionKHR* pRegions; -} VkPresentRegionsKHR; - - - -#define VK_KHR_descriptor_update_template 1 -VK_DEFINE_NON_DISPATCHABLE_HANDLE(VkDescriptorUpdateTemplateKHR) - -#define VK_KHR_DESCRIPTOR_UPDATE_TEMPLATE_SPEC_VERSION 1 -#define VK_KHR_DESCRIPTOR_UPDATE_TEMPLATE_EXTENSION_NAME "VK_KHR_descriptor_update_template" - - -typedef enum VkDescriptorUpdateTemplateTypeKHR { - VK_DESCRIPTOR_UPDATE_TEMPLATE_TYPE_DESCRIPTOR_SET_KHR = 0, - VK_DESCRIPTOR_UPDATE_TEMPLATE_TYPE_PUSH_DESCRIPTORS_KHR = 1, - VK_DESCRIPTOR_UPDATE_TEMPLATE_TYPE_BEGIN_RANGE_KHR = VK_DESCRIPTOR_UPDATE_TEMPLATE_TYPE_DESCRIPTOR_SET_KHR, - VK_DESCRIPTOR_UPDATE_TEMPLATE_TYPE_END_RANGE_KHR = VK_DESCRIPTOR_UPDATE_TEMPLATE_TYPE_PUSH_DESCRIPTORS_KHR, - VK_DESCRIPTOR_UPDATE_TEMPLATE_TYPE_RANGE_SIZE_KHR = (VK_DESCRIPTOR_UPDATE_TEMPLATE_TYPE_PUSH_DESCRIPTORS_KHR - VK_DESCRIPTOR_UPDATE_TEMPLATE_TYPE_DESCRIPTOR_SET_KHR + 1), - VK_DESCRIPTOR_UPDATE_TEMPLATE_TYPE_MAX_ENUM_KHR = 0x7FFFFFFF -} VkDescriptorUpdateTemplateTypeKHR; - -typedef VkFlags VkDescriptorUpdateTemplateCreateFlagsKHR; - -typedef struct VkDescriptorUpdateTemplateEntryKHR { - uint32_t dstBinding; - uint32_t dstArrayElement; - uint32_t descriptorCount; - VkDescriptorType descriptorType; - size_t offset; - size_t stride; -} VkDescriptorUpdateTemplateEntryKHR; - -typedef struct VkDescriptorUpdateTemplateCreateInfoKHR { - VkStructureType sType; - void* pNext; - VkDescriptorUpdateTemplateCreateFlagsKHR flags; - uint32_t descriptorUpdateEntryCount; - const VkDescriptorUpdateTemplateEntryKHR* pDescriptorUpdateEntries; - VkDescriptorUpdateTemplateTypeKHR templateType; - VkDescriptorSetLayout descriptorSetLayout; - VkPipelineBindPoint pipelineBindPoint; - VkPipelineLayout pipelineLayout; - uint32_t set; -} VkDescriptorUpdateTemplateCreateInfoKHR; - - -typedef VkResult (VKAPI_PTR *PFN_vkCreateDescriptorUpdateTemplateKHR)(VkDevice device, const VkDescriptorUpdateTemplateCreateInfoKHR* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkDescriptorUpdateTemplateKHR* pDescriptorUpdateTemplate); -typedef void (VKAPI_PTR *PFN_vkDestroyDescriptorUpdateTemplateKHR)(VkDevice device, VkDescriptorUpdateTemplateKHR descriptorUpdateTemplate, const VkAllocationCallbacks* pAllocator); -typedef void (VKAPI_PTR *PFN_vkUpdateDescriptorSetWithTemplateKHR)(VkDevice device, VkDescriptorSet descriptorSet, VkDescriptorUpdateTemplateKHR descriptorUpdateTemplate, const void* pData); -typedef void (VKAPI_PTR *PFN_vkCmdPushDescriptorSetWithTemplateKHR)(VkCommandBuffer commandBuffer, VkDescriptorUpdateTemplateKHR descriptorUpdateTemplate, VkPipelineLayout layout, uint32_t set, const void* pData); - -#ifndef VK_NO_PROTOTYPES -VKAPI_ATTR VkResult VKAPI_CALL vkCreateDescriptorUpdateTemplateKHR( - VkDevice device, - const VkDescriptorUpdateTemplateCreateInfoKHR* pCreateInfo, - const VkAllocationCallbacks* pAllocator, - VkDescriptorUpdateTemplateKHR* pDescriptorUpdateTemplate); - -VKAPI_ATTR void VKAPI_CALL vkDestroyDescriptorUpdateTemplateKHR( - VkDevice device, - VkDescriptorUpdateTemplateKHR descriptorUpdateTemplate, - const VkAllocationCallbacks* pAllocator); - -VKAPI_ATTR void VKAPI_CALL vkUpdateDescriptorSetWithTemplateKHR( - VkDevice device, - VkDescriptorSet descriptorSet, - VkDescriptorUpdateTemplateKHR descriptorUpdateTemplate, - const void* pData); - -VKAPI_ATTR void VKAPI_CALL vkCmdPushDescriptorSetWithTemplateKHR( - VkCommandBuffer commandBuffer, - VkDescriptorUpdateTemplateKHR descriptorUpdateTemplate, - VkPipelineLayout layout, - uint32_t set, - const void* pData); -#endif - -#define VK_KHR_shared_presentable_image 1 -#define VK_KHR_SHARED_PRESENTABLE_IMAGE_SPEC_VERSION 1 -#define VK_KHR_SHARED_PRESENTABLE_IMAGE_EXTENSION_NAME "VK_KHR_shared_presentable_image" - -typedef struct VkSharedPresentSurfaceCapabilitiesKHR { - VkStructureType sType; - void* pNext; - VkImageUsageFlags sharedPresentSupportedUsageFlags; -} VkSharedPresentSurfaceCapabilitiesKHR; - - -typedef VkResult (VKAPI_PTR *PFN_vkGetSwapchainStatusKHR)(VkDevice device, VkSwapchainKHR swapchain); - -#ifndef VK_NO_PROTOTYPES -VKAPI_ATTR VkResult VKAPI_CALL vkGetSwapchainStatusKHR( - VkDevice device, - VkSwapchainKHR swapchain); -#endif - -#define VK_KHR_external_fence_capabilities 1 -#define VK_KHR_EXTERNAL_FENCE_CAPABILITIES_SPEC_VERSION 1 -#define VK_KHR_EXTERNAL_FENCE_CAPABILITIES_EXTENSION_NAME "VK_KHR_external_fence_capabilities" - - -typedef enum VkExternalFenceHandleTypeFlagBitsKHR { - VK_EXTERNAL_FENCE_HANDLE_TYPE_OPAQUE_FD_BIT_KHR = 0x00000001, - VK_EXTERNAL_FENCE_HANDLE_TYPE_OPAQUE_WIN32_BIT_KHR = 0x00000002, - VK_EXTERNAL_FENCE_HANDLE_TYPE_OPAQUE_WIN32_KMT_BIT_KHR = 0x00000004, - VK_EXTERNAL_FENCE_HANDLE_TYPE_SYNC_FD_BIT_KHR = 0x00000008, - VK_EXTERNAL_FENCE_HANDLE_TYPE_FLAG_BITS_MAX_ENUM_KHR = 0x7FFFFFFF -} VkExternalFenceHandleTypeFlagBitsKHR; -typedef VkFlags VkExternalFenceHandleTypeFlagsKHR; - -typedef enum VkExternalFenceFeatureFlagBitsKHR { - VK_EXTERNAL_FENCE_FEATURE_EXPORTABLE_BIT_KHR = 0x00000001, - VK_EXTERNAL_FENCE_FEATURE_IMPORTABLE_BIT_KHR = 0x00000002, - VK_EXTERNAL_FENCE_FEATURE_FLAG_BITS_MAX_ENUM_KHR = 0x7FFFFFFF -} VkExternalFenceFeatureFlagBitsKHR; -typedef VkFlags VkExternalFenceFeatureFlagsKHR; - -typedef struct VkPhysicalDeviceExternalFenceInfoKHR { - VkStructureType sType; - const void* pNext; - VkExternalFenceHandleTypeFlagBitsKHR handleType; -} VkPhysicalDeviceExternalFenceInfoKHR; - -typedef struct VkExternalFencePropertiesKHR { - VkStructureType sType; - void* pNext; - VkExternalFenceHandleTypeFlagsKHR exportFromImportedHandleTypes; - VkExternalFenceHandleTypeFlagsKHR compatibleHandleTypes; - VkExternalFenceFeatureFlagsKHR externalFenceFeatures; -} VkExternalFencePropertiesKHR; - - -typedef void (VKAPI_PTR *PFN_vkGetPhysicalDeviceExternalFencePropertiesKHR)(VkPhysicalDevice physicalDevice, const VkPhysicalDeviceExternalFenceInfoKHR* pExternalFenceInfo, VkExternalFencePropertiesKHR* pExternalFenceProperties); - -#ifndef VK_NO_PROTOTYPES -VKAPI_ATTR void VKAPI_CALL vkGetPhysicalDeviceExternalFencePropertiesKHR( - VkPhysicalDevice physicalDevice, - const VkPhysicalDeviceExternalFenceInfoKHR* pExternalFenceInfo, - VkExternalFencePropertiesKHR* pExternalFenceProperties); -#endif - -#define VK_KHR_external_fence 1 -#define VK_KHR_EXTERNAL_FENCE_SPEC_VERSION 1 -#define VK_KHR_EXTERNAL_FENCE_EXTENSION_NAME "VK_KHR_external_fence" - - -typedef enum VkFenceImportFlagBitsKHR { - VK_FENCE_IMPORT_TEMPORARY_BIT_KHR = 0x00000001, - VK_FENCE_IMPORT_FLAG_BITS_MAX_ENUM_KHR = 0x7FFFFFFF -} VkFenceImportFlagBitsKHR; -typedef VkFlags VkFenceImportFlagsKHR; - -typedef struct VkExportFenceCreateInfoKHR { - VkStructureType sType; - const void* pNext; - VkExternalFenceHandleTypeFlagsKHR handleTypes; -} VkExportFenceCreateInfoKHR; - - - -#ifdef VK_USE_PLATFORM_WIN32_KHR -#define VK_KHR_external_fence_win32 1 -#define VK_KHR_EXTERNAL_FENCE_WIN32_SPEC_VERSION 1 -#define VK_KHR_EXTERNAL_FENCE_WIN32_EXTENSION_NAME "VK_KHR_external_fence_win32" - -typedef struct VkImportFenceWin32HandleInfoKHR { - VkStructureType sType; - const void* pNext; - VkFence fence; - VkFenceImportFlagsKHR flags; - VkExternalFenceHandleTypeFlagBitsKHR handleType; - HANDLE handle; - LPCWSTR name; -} VkImportFenceWin32HandleInfoKHR; - -typedef struct VkExportFenceWin32HandleInfoKHR { - VkStructureType sType; - const void* pNext; - const SECURITY_ATTRIBUTES* pAttributes; - DWORD dwAccess; - LPCWSTR name; -} VkExportFenceWin32HandleInfoKHR; - -typedef struct VkFenceGetWin32HandleInfoKHR { - VkStructureType sType; - const void* pNext; - VkFence fence; - VkExternalFenceHandleTypeFlagBitsKHR handleType; -} VkFenceGetWin32HandleInfoKHR; - - -typedef VkResult (VKAPI_PTR *PFN_vkImportFenceWin32HandleKHR)(VkDevice device, const VkImportFenceWin32HandleInfoKHR* pImportFenceWin32HandleInfo); -typedef VkResult (VKAPI_PTR *PFN_vkGetFenceWin32HandleKHR)(VkDevice device, const VkFenceGetWin32HandleInfoKHR* pGetWin32HandleInfo, HANDLE* pHandle); - -#ifndef VK_NO_PROTOTYPES -VKAPI_ATTR VkResult VKAPI_CALL vkImportFenceWin32HandleKHR( - VkDevice device, - const VkImportFenceWin32HandleInfoKHR* pImportFenceWin32HandleInfo); - -VKAPI_ATTR VkResult VKAPI_CALL vkGetFenceWin32HandleKHR( - VkDevice device, - const VkFenceGetWin32HandleInfoKHR* pGetWin32HandleInfo, - HANDLE* pHandle); -#endif -#endif /* VK_USE_PLATFORM_WIN32_KHR */ - -#define VK_KHR_external_fence_fd 1 -#define VK_KHR_EXTERNAL_FENCE_FD_SPEC_VERSION 1 -#define VK_KHR_EXTERNAL_FENCE_FD_EXTENSION_NAME "VK_KHR_external_fence_fd" - -typedef struct VkImportFenceFdInfoKHR { - VkStructureType sType; - const void* pNext; - VkFence fence; - VkFenceImportFlagsKHR flags; - VkExternalFenceHandleTypeFlagBitsKHR handleType; - int fd; -} VkImportFenceFdInfoKHR; - -typedef struct VkFenceGetFdInfoKHR { - VkStructureType sType; - const void* pNext; - VkFence fence; - VkExternalFenceHandleTypeFlagBitsKHR handleType; -} VkFenceGetFdInfoKHR; - - -typedef VkResult (VKAPI_PTR *PFN_vkImportFenceFdKHR)(VkDevice device, const VkImportFenceFdInfoKHR* pImportFenceFdInfo); -typedef VkResult (VKAPI_PTR *PFN_vkGetFenceFdKHR)(VkDevice device, const VkFenceGetFdInfoKHR* pGetFdInfo, int* pFd); - -#ifndef VK_NO_PROTOTYPES -VKAPI_ATTR VkResult VKAPI_CALL vkImportFenceFdKHR( - VkDevice device, - const VkImportFenceFdInfoKHR* pImportFenceFdInfo); - -VKAPI_ATTR VkResult VKAPI_CALL vkGetFenceFdKHR( - VkDevice device, - const VkFenceGetFdInfoKHR* pGetFdInfo, - int* pFd); -#endif - -#define VK_KHR_get_surface_capabilities2 1 -#define VK_KHR_GET_SURFACE_CAPABILITIES_2_SPEC_VERSION 1 -#define VK_KHR_GET_SURFACE_CAPABILITIES_2_EXTENSION_NAME "VK_KHR_get_surface_capabilities2" - -typedef struct VkPhysicalDeviceSurfaceInfo2KHR { - VkStructureType sType; - const void* pNext; - VkSurfaceKHR surface; -} VkPhysicalDeviceSurfaceInfo2KHR; - -typedef struct VkSurfaceCapabilities2KHR { - VkStructureType sType; - void* pNext; - VkSurfaceCapabilitiesKHR surfaceCapabilities; -} VkSurfaceCapabilities2KHR; - -typedef struct VkSurfaceFormat2KHR { - VkStructureType sType; - void* pNext; - VkSurfaceFormatKHR surfaceFormat; -} VkSurfaceFormat2KHR; - - -typedef VkResult (VKAPI_PTR *PFN_vkGetPhysicalDeviceSurfaceCapabilities2KHR)(VkPhysicalDevice physicalDevice, const VkPhysicalDeviceSurfaceInfo2KHR* pSurfaceInfo, VkSurfaceCapabilities2KHR* pSurfaceCapabilities); -typedef VkResult (VKAPI_PTR *PFN_vkGetPhysicalDeviceSurfaceFormats2KHR)(VkPhysicalDevice physicalDevice, const VkPhysicalDeviceSurfaceInfo2KHR* pSurfaceInfo, uint32_t* pSurfaceFormatCount, VkSurfaceFormat2KHR* pSurfaceFormats); - -#ifndef VK_NO_PROTOTYPES -VKAPI_ATTR VkResult VKAPI_CALL vkGetPhysicalDeviceSurfaceCapabilities2KHR( - VkPhysicalDevice physicalDevice, - const VkPhysicalDeviceSurfaceInfo2KHR* pSurfaceInfo, - VkSurfaceCapabilities2KHR* pSurfaceCapabilities); - -VKAPI_ATTR VkResult VKAPI_CALL vkGetPhysicalDeviceSurfaceFormats2KHR( - VkPhysicalDevice physicalDevice, - const VkPhysicalDeviceSurfaceInfo2KHR* pSurfaceInfo, - uint32_t* pSurfaceFormatCount, - VkSurfaceFormat2KHR* pSurfaceFormats); -#endif - -#define VK_KHR_variable_pointers 1 -#define VK_KHR_VARIABLE_POINTERS_SPEC_VERSION 1 -#define VK_KHR_VARIABLE_POINTERS_EXTENSION_NAME "VK_KHR_variable_pointers" - -typedef struct VkPhysicalDeviceVariablePointerFeaturesKHR { - VkStructureType sType; - void* pNext; - VkBool32 variablePointersStorageBuffer; - VkBool32 variablePointers; -} VkPhysicalDeviceVariablePointerFeaturesKHR; - - - -#define VK_KHR_dedicated_allocation 1 -#define VK_KHR_DEDICATED_ALLOCATION_SPEC_VERSION 3 -#define VK_KHR_DEDICATED_ALLOCATION_EXTENSION_NAME "VK_KHR_dedicated_allocation" - -typedef struct VkMemoryDedicatedRequirementsKHR { - VkStructureType sType; - void* pNext; - VkBool32 prefersDedicatedAllocation; - VkBool32 requiresDedicatedAllocation; -} VkMemoryDedicatedRequirementsKHR; - -typedef struct VkMemoryDedicatedAllocateInfoKHR { - VkStructureType sType; - const void* pNext; - VkImage image; - VkBuffer buffer; -} VkMemoryDedicatedAllocateInfoKHR; - - - -#define VK_KHR_storage_buffer_storage_class 1 -#define VK_KHR_STORAGE_BUFFER_STORAGE_CLASS_SPEC_VERSION 1 -#define VK_KHR_STORAGE_BUFFER_STORAGE_CLASS_EXTENSION_NAME "VK_KHR_storage_buffer_storage_class" - - -#define VK_KHR_relaxed_block_layout 1 -#define VK_KHR_RELAXED_BLOCK_LAYOUT_SPEC_VERSION 1 -#define VK_KHR_RELAXED_BLOCK_LAYOUT_EXTENSION_NAME "VK_KHR_relaxed_block_layout" - - -#define VK_KHR_get_memory_requirements2 1 -#define VK_KHR_GET_MEMORY_REQUIREMENTS_2_SPEC_VERSION 1 -#define VK_KHR_GET_MEMORY_REQUIREMENTS_2_EXTENSION_NAME "VK_KHR_get_memory_requirements2" - -typedef struct VkBufferMemoryRequirementsInfo2KHR { - VkStructureType sType; - const void* pNext; - VkBuffer buffer; -} VkBufferMemoryRequirementsInfo2KHR; - -typedef struct VkImageMemoryRequirementsInfo2KHR { - VkStructureType sType; - const void* pNext; - VkImage image; -} VkImageMemoryRequirementsInfo2KHR; - -typedef struct VkImageSparseMemoryRequirementsInfo2KHR { - VkStructureType sType; - const void* pNext; - VkImage image; -} VkImageSparseMemoryRequirementsInfo2KHR; - -typedef struct VkMemoryRequirements2KHR { - VkStructureType sType; - void* pNext; - VkMemoryRequirements memoryRequirements; -} VkMemoryRequirements2KHR; - -typedef struct VkSparseImageMemoryRequirements2KHR { - VkStructureType sType; - void* pNext; - VkSparseImageMemoryRequirements memoryRequirements; -} VkSparseImageMemoryRequirements2KHR; - - -typedef void (VKAPI_PTR *PFN_vkGetImageMemoryRequirements2KHR)(VkDevice device, const VkImageMemoryRequirementsInfo2KHR* pInfo, VkMemoryRequirements2KHR* pMemoryRequirements); -typedef void (VKAPI_PTR *PFN_vkGetBufferMemoryRequirements2KHR)(VkDevice device, const VkBufferMemoryRequirementsInfo2KHR* pInfo, VkMemoryRequirements2KHR* pMemoryRequirements); -typedef void (VKAPI_PTR *PFN_vkGetImageSparseMemoryRequirements2KHR)(VkDevice device, const VkImageSparseMemoryRequirementsInfo2KHR* pInfo, uint32_t* pSparseMemoryRequirementCount, VkSparseImageMemoryRequirements2KHR* pSparseMemoryRequirements); - -#ifndef VK_NO_PROTOTYPES -VKAPI_ATTR void VKAPI_CALL vkGetImageMemoryRequirements2KHR( - VkDevice device, - const VkImageMemoryRequirementsInfo2KHR* pInfo, - VkMemoryRequirements2KHR* pMemoryRequirements); - -VKAPI_ATTR void VKAPI_CALL vkGetBufferMemoryRequirements2KHR( - VkDevice device, - const VkBufferMemoryRequirementsInfo2KHR* pInfo, - VkMemoryRequirements2KHR* pMemoryRequirements); - -VKAPI_ATTR void VKAPI_CALL vkGetImageSparseMemoryRequirements2KHR( - VkDevice device, - const VkImageSparseMemoryRequirementsInfo2KHR* pInfo, - uint32_t* pSparseMemoryRequirementCount, - VkSparseImageMemoryRequirements2KHR* pSparseMemoryRequirements); -#endif - -#define VK_EXT_debug_report 1 -VK_DEFINE_NON_DISPATCHABLE_HANDLE(VkDebugReportCallbackEXT) - -#define VK_EXT_DEBUG_REPORT_SPEC_VERSION 8 -#define VK_EXT_DEBUG_REPORT_EXTENSION_NAME "VK_EXT_debug_report" -#define VK_STRUCTURE_TYPE_DEBUG_REPORT_CREATE_INFO_EXT VK_STRUCTURE_TYPE_DEBUG_REPORT_CALLBACK_CREATE_INFO_EXT -#define VK_DEBUG_REPORT_OBJECT_TYPE_DEBUG_REPORT_EXT VK_DEBUG_REPORT_OBJECT_TYPE_DEBUG_REPORT_CALLBACK_EXT_EXT - - -typedef enum VkDebugReportObjectTypeEXT { - VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT = 0, - VK_DEBUG_REPORT_OBJECT_TYPE_INSTANCE_EXT = 1, - VK_DEBUG_REPORT_OBJECT_TYPE_PHYSICAL_DEVICE_EXT = 2, - VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT = 3, - VK_DEBUG_REPORT_OBJECT_TYPE_QUEUE_EXT = 4, - VK_DEBUG_REPORT_OBJECT_TYPE_SEMAPHORE_EXT = 5, - VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT = 6, - VK_DEBUG_REPORT_OBJECT_TYPE_FENCE_EXT = 7, - VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_MEMORY_EXT = 8, - VK_DEBUG_REPORT_OBJECT_TYPE_BUFFER_EXT = 9, - VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_EXT = 10, - VK_DEBUG_REPORT_OBJECT_TYPE_EVENT_EXT = 11, - VK_DEBUG_REPORT_OBJECT_TYPE_QUERY_POOL_EXT = 12, - VK_DEBUG_REPORT_OBJECT_TYPE_BUFFER_VIEW_EXT = 13, - VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_VIEW_EXT = 14, - VK_DEBUG_REPORT_OBJECT_TYPE_SHADER_MODULE_EXT = 15, - VK_DEBUG_REPORT_OBJECT_TYPE_PIPELINE_CACHE_EXT = 16, - VK_DEBUG_REPORT_OBJECT_TYPE_PIPELINE_LAYOUT_EXT = 17, - VK_DEBUG_REPORT_OBJECT_TYPE_RENDER_PASS_EXT = 18, - VK_DEBUG_REPORT_OBJECT_TYPE_PIPELINE_EXT = 19, - VK_DEBUG_REPORT_OBJECT_TYPE_DESCRIPTOR_SET_LAYOUT_EXT = 20, - VK_DEBUG_REPORT_OBJECT_TYPE_SAMPLER_EXT = 21, - VK_DEBUG_REPORT_OBJECT_TYPE_DESCRIPTOR_POOL_EXT = 22, - VK_DEBUG_REPORT_OBJECT_TYPE_DESCRIPTOR_SET_EXT = 23, - VK_DEBUG_REPORT_OBJECT_TYPE_FRAMEBUFFER_EXT = 24, - VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_POOL_EXT = 25, - VK_DEBUG_REPORT_OBJECT_TYPE_SURFACE_KHR_EXT = 26, - VK_DEBUG_REPORT_OBJECT_TYPE_SWAPCHAIN_KHR_EXT = 27, - VK_DEBUG_REPORT_OBJECT_TYPE_DEBUG_REPORT_CALLBACK_EXT_EXT = 28, - VK_DEBUG_REPORT_OBJECT_TYPE_DISPLAY_KHR_EXT = 29, - VK_DEBUG_REPORT_OBJECT_TYPE_DISPLAY_MODE_KHR_EXT = 30, - VK_DEBUG_REPORT_OBJECT_TYPE_OBJECT_TABLE_NVX_EXT = 31, - VK_DEBUG_REPORT_OBJECT_TYPE_INDIRECT_COMMANDS_LAYOUT_NVX_EXT = 32, - VK_DEBUG_REPORT_OBJECT_TYPE_DESCRIPTOR_UPDATE_TEMPLATE_KHR_EXT = 1000085000, - VK_DEBUG_REPORT_OBJECT_TYPE_BEGIN_RANGE_EXT = VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, - VK_DEBUG_REPORT_OBJECT_TYPE_END_RANGE_EXT = VK_DEBUG_REPORT_OBJECT_TYPE_INDIRECT_COMMANDS_LAYOUT_NVX_EXT, - VK_DEBUG_REPORT_OBJECT_TYPE_RANGE_SIZE_EXT = (VK_DEBUG_REPORT_OBJECT_TYPE_INDIRECT_COMMANDS_LAYOUT_NVX_EXT - VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT + 1), - VK_DEBUG_REPORT_OBJECT_TYPE_MAX_ENUM_EXT = 0x7FFFFFFF -} VkDebugReportObjectTypeEXT; - - -typedef enum VkDebugReportFlagBitsEXT { - VK_DEBUG_REPORT_INFORMATION_BIT_EXT = 0x00000001, - VK_DEBUG_REPORT_WARNING_BIT_EXT = 0x00000002, - VK_DEBUG_REPORT_PERFORMANCE_WARNING_BIT_EXT = 0x00000004, - VK_DEBUG_REPORT_ERROR_BIT_EXT = 0x00000008, - VK_DEBUG_REPORT_DEBUG_BIT_EXT = 0x00000010, - VK_DEBUG_REPORT_FLAG_BITS_MAX_ENUM_EXT = 0x7FFFFFFF -} VkDebugReportFlagBitsEXT; -typedef VkFlags VkDebugReportFlagsEXT; - -typedef VkBool32 (VKAPI_PTR *PFN_vkDebugReportCallbackEXT)( - VkDebugReportFlagsEXT flags, - VkDebugReportObjectTypeEXT objectType, - uint64_t object, - size_t location, - int32_t messageCode, - const char* pLayerPrefix, - const char* pMessage, - void* pUserData); - -typedef struct VkDebugReportCallbackCreateInfoEXT { - VkStructureType sType; - const void* pNext; - VkDebugReportFlagsEXT flags; - PFN_vkDebugReportCallbackEXT pfnCallback; - void* pUserData; -} VkDebugReportCallbackCreateInfoEXT; - - -typedef VkResult (VKAPI_PTR *PFN_vkCreateDebugReportCallbackEXT)(VkInstance instance, const VkDebugReportCallbackCreateInfoEXT* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkDebugReportCallbackEXT* pCallback); -typedef void (VKAPI_PTR *PFN_vkDestroyDebugReportCallbackEXT)(VkInstance instance, VkDebugReportCallbackEXT callback, const VkAllocationCallbacks* pAllocator); -typedef void (VKAPI_PTR *PFN_vkDebugReportMessageEXT)(VkInstance instance, VkDebugReportFlagsEXT flags, VkDebugReportObjectTypeEXT objectType, uint64_t object, size_t location, int32_t messageCode, const char* pLayerPrefix, const char* pMessage); - -#ifndef VK_NO_PROTOTYPES -VKAPI_ATTR VkResult VKAPI_CALL vkCreateDebugReportCallbackEXT( - VkInstance instance, - const VkDebugReportCallbackCreateInfoEXT* pCreateInfo, - const VkAllocationCallbacks* pAllocator, - VkDebugReportCallbackEXT* pCallback); - -VKAPI_ATTR void VKAPI_CALL vkDestroyDebugReportCallbackEXT( - VkInstance instance, - VkDebugReportCallbackEXT callback, - const VkAllocationCallbacks* pAllocator); - -VKAPI_ATTR void VKAPI_CALL vkDebugReportMessageEXT( - VkInstance instance, - VkDebugReportFlagsEXT flags, - VkDebugReportObjectTypeEXT objectType, - uint64_t object, - size_t location, - int32_t messageCode, - const char* pLayerPrefix, - const char* pMessage); -#endif - -#define VK_NV_glsl_shader 1 -#define VK_NV_GLSL_SHADER_SPEC_VERSION 1 -#define VK_NV_GLSL_SHADER_EXTENSION_NAME "VK_NV_glsl_shader" - - -#define VK_EXT_depth_range_unrestricted 1 -#define VK_EXT_DEPTH_RANGE_UNRESTRICTED_SPEC_VERSION 1 -#define VK_EXT_DEPTH_RANGE_UNRESTRICTED_EXTENSION_NAME "VK_EXT_depth_range_unrestricted" - - -#define VK_IMG_filter_cubic 1 -#define VK_IMG_FILTER_CUBIC_SPEC_VERSION 1 -#define VK_IMG_FILTER_CUBIC_EXTENSION_NAME "VK_IMG_filter_cubic" - - -#define VK_AMD_rasterization_order 1 -#define VK_AMD_RASTERIZATION_ORDER_SPEC_VERSION 1 -#define VK_AMD_RASTERIZATION_ORDER_EXTENSION_NAME "VK_AMD_rasterization_order" - - -typedef enum VkRasterizationOrderAMD { - VK_RASTERIZATION_ORDER_STRICT_AMD = 0, - VK_RASTERIZATION_ORDER_RELAXED_AMD = 1, - VK_RASTERIZATION_ORDER_BEGIN_RANGE_AMD = VK_RASTERIZATION_ORDER_STRICT_AMD, - VK_RASTERIZATION_ORDER_END_RANGE_AMD = VK_RASTERIZATION_ORDER_RELAXED_AMD, - VK_RASTERIZATION_ORDER_RANGE_SIZE_AMD = (VK_RASTERIZATION_ORDER_RELAXED_AMD - VK_RASTERIZATION_ORDER_STRICT_AMD + 1), - VK_RASTERIZATION_ORDER_MAX_ENUM_AMD = 0x7FFFFFFF -} VkRasterizationOrderAMD; - -typedef struct VkPipelineRasterizationStateRasterizationOrderAMD { - VkStructureType sType; - const void* pNext; - VkRasterizationOrderAMD rasterizationOrder; -} VkPipelineRasterizationStateRasterizationOrderAMD; - - - -#define VK_AMD_shader_trinary_minmax 1 -#define VK_AMD_SHADER_TRINARY_MINMAX_SPEC_VERSION 1 -#define VK_AMD_SHADER_TRINARY_MINMAX_EXTENSION_NAME "VK_AMD_shader_trinary_minmax" - - -#define VK_AMD_shader_explicit_vertex_parameter 1 -#define VK_AMD_SHADER_EXPLICIT_VERTEX_PARAMETER_SPEC_VERSION 1 -#define VK_AMD_SHADER_EXPLICIT_VERTEX_PARAMETER_EXTENSION_NAME "VK_AMD_shader_explicit_vertex_parameter" - - -#define VK_EXT_debug_marker 1 -#define VK_EXT_DEBUG_MARKER_SPEC_VERSION 4 -#define VK_EXT_DEBUG_MARKER_EXTENSION_NAME "VK_EXT_debug_marker" - -typedef struct VkDebugMarkerObjectNameInfoEXT { - VkStructureType sType; - const void* pNext; - VkDebugReportObjectTypeEXT objectType; - uint64_t object; - const char* pObjectName; -} VkDebugMarkerObjectNameInfoEXT; - -typedef struct VkDebugMarkerObjectTagInfoEXT { - VkStructureType sType; - const void* pNext; - VkDebugReportObjectTypeEXT objectType; - uint64_t object; - uint64_t tagName; - size_t tagSize; - const void* pTag; -} VkDebugMarkerObjectTagInfoEXT; - -typedef struct VkDebugMarkerMarkerInfoEXT { - VkStructureType sType; - const void* pNext; - const char* pMarkerName; - float color[4]; -} VkDebugMarkerMarkerInfoEXT; - - -typedef VkResult (VKAPI_PTR *PFN_vkDebugMarkerSetObjectTagEXT)(VkDevice device, const VkDebugMarkerObjectTagInfoEXT* pTagInfo); -typedef VkResult (VKAPI_PTR *PFN_vkDebugMarkerSetObjectNameEXT)(VkDevice device, const VkDebugMarkerObjectNameInfoEXT* pNameInfo); -typedef void (VKAPI_PTR *PFN_vkCmdDebugMarkerBeginEXT)(VkCommandBuffer commandBuffer, const VkDebugMarkerMarkerInfoEXT* pMarkerInfo); -typedef void (VKAPI_PTR *PFN_vkCmdDebugMarkerEndEXT)(VkCommandBuffer commandBuffer); -typedef void (VKAPI_PTR *PFN_vkCmdDebugMarkerInsertEXT)(VkCommandBuffer commandBuffer, const VkDebugMarkerMarkerInfoEXT* pMarkerInfo); - -#ifndef VK_NO_PROTOTYPES -VKAPI_ATTR VkResult VKAPI_CALL vkDebugMarkerSetObjectTagEXT( - VkDevice device, - const VkDebugMarkerObjectTagInfoEXT* pTagInfo); - -VKAPI_ATTR VkResult VKAPI_CALL vkDebugMarkerSetObjectNameEXT( - VkDevice device, - const VkDebugMarkerObjectNameInfoEXT* pNameInfo); - -VKAPI_ATTR void VKAPI_CALL vkCmdDebugMarkerBeginEXT( - VkCommandBuffer commandBuffer, - const VkDebugMarkerMarkerInfoEXT* pMarkerInfo); - -VKAPI_ATTR void VKAPI_CALL vkCmdDebugMarkerEndEXT( - VkCommandBuffer commandBuffer); - -VKAPI_ATTR void VKAPI_CALL vkCmdDebugMarkerInsertEXT( - VkCommandBuffer commandBuffer, - const VkDebugMarkerMarkerInfoEXT* pMarkerInfo); -#endif - -#define VK_AMD_gcn_shader 1 -#define VK_AMD_GCN_SHADER_SPEC_VERSION 1 -#define VK_AMD_GCN_SHADER_EXTENSION_NAME "VK_AMD_gcn_shader" - - -#define VK_NV_dedicated_allocation 1 -#define VK_NV_DEDICATED_ALLOCATION_SPEC_VERSION 1 -#define VK_NV_DEDICATED_ALLOCATION_EXTENSION_NAME "VK_NV_dedicated_allocation" - -typedef struct VkDedicatedAllocationImageCreateInfoNV { - VkStructureType sType; - const void* pNext; - VkBool32 dedicatedAllocation; -} VkDedicatedAllocationImageCreateInfoNV; - -typedef struct VkDedicatedAllocationBufferCreateInfoNV { - VkStructureType sType; - const void* pNext; - VkBool32 dedicatedAllocation; -} VkDedicatedAllocationBufferCreateInfoNV; - -typedef struct VkDedicatedAllocationMemoryAllocateInfoNV { - VkStructureType sType; - const void* pNext; - VkImage image; - VkBuffer buffer; -} VkDedicatedAllocationMemoryAllocateInfoNV; - - - -#define VK_AMD_draw_indirect_count 1 -#define VK_AMD_DRAW_INDIRECT_COUNT_SPEC_VERSION 1 -#define VK_AMD_DRAW_INDIRECT_COUNT_EXTENSION_NAME "VK_AMD_draw_indirect_count" - -typedef void (VKAPI_PTR *PFN_vkCmdDrawIndirectCountAMD)(VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset, VkBuffer countBuffer, VkDeviceSize countBufferOffset, uint32_t maxDrawCount, uint32_t stride); -typedef void (VKAPI_PTR *PFN_vkCmdDrawIndexedIndirectCountAMD)(VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset, VkBuffer countBuffer, VkDeviceSize countBufferOffset, uint32_t maxDrawCount, uint32_t stride); - -#ifndef VK_NO_PROTOTYPES -VKAPI_ATTR void VKAPI_CALL vkCmdDrawIndirectCountAMD( - VkCommandBuffer commandBuffer, - VkBuffer buffer, - VkDeviceSize offset, - VkBuffer countBuffer, - VkDeviceSize countBufferOffset, - uint32_t maxDrawCount, - uint32_t stride); - -VKAPI_ATTR void VKAPI_CALL vkCmdDrawIndexedIndirectCountAMD( - VkCommandBuffer commandBuffer, - VkBuffer buffer, - VkDeviceSize offset, - VkBuffer countBuffer, - VkDeviceSize countBufferOffset, - uint32_t maxDrawCount, - uint32_t stride); -#endif - -#define VK_AMD_negative_viewport_height 1 -#define VK_AMD_NEGATIVE_VIEWPORT_HEIGHT_SPEC_VERSION 1 -#define VK_AMD_NEGATIVE_VIEWPORT_HEIGHT_EXTENSION_NAME "VK_AMD_negative_viewport_height" - - -#define VK_AMD_gpu_shader_half_float 1 -#define VK_AMD_GPU_SHADER_HALF_FLOAT_SPEC_VERSION 1 -#define VK_AMD_GPU_SHADER_HALF_FLOAT_EXTENSION_NAME "VK_AMD_gpu_shader_half_float" - - -#define VK_AMD_shader_ballot 1 -#define VK_AMD_SHADER_BALLOT_SPEC_VERSION 1 -#define VK_AMD_SHADER_BALLOT_EXTENSION_NAME "VK_AMD_shader_ballot" - - -#define VK_AMD_texture_gather_bias_lod 1 -#define VK_AMD_TEXTURE_GATHER_BIAS_LOD_SPEC_VERSION 1 -#define VK_AMD_TEXTURE_GATHER_BIAS_LOD_EXTENSION_NAME "VK_AMD_texture_gather_bias_lod" - -typedef struct VkTextureLODGatherFormatPropertiesAMD { - VkStructureType sType; - void* pNext; - VkBool32 supportsTextureGatherLODBiasAMD; -} VkTextureLODGatherFormatPropertiesAMD; - - - -#define VK_KHX_multiview 1 -#define VK_KHX_MULTIVIEW_SPEC_VERSION 1 -#define VK_KHX_MULTIVIEW_EXTENSION_NAME "VK_KHX_multiview" - -typedef struct VkRenderPassMultiviewCreateInfoKHX { - VkStructureType sType; - const void* pNext; - uint32_t subpassCount; - const uint32_t* pViewMasks; - uint32_t dependencyCount; - const int32_t* pViewOffsets; - uint32_t correlationMaskCount; - const uint32_t* pCorrelationMasks; -} VkRenderPassMultiviewCreateInfoKHX; - -typedef struct VkPhysicalDeviceMultiviewFeaturesKHX { - VkStructureType sType; - void* pNext; - VkBool32 multiview; - VkBool32 multiviewGeometryShader; - VkBool32 multiviewTessellationShader; -} VkPhysicalDeviceMultiviewFeaturesKHX; - -typedef struct VkPhysicalDeviceMultiviewPropertiesKHX { - VkStructureType sType; - void* pNext; - uint32_t maxMultiviewViewCount; - uint32_t maxMultiviewInstanceIndex; -} VkPhysicalDeviceMultiviewPropertiesKHX; - - - -#define VK_IMG_format_pvrtc 1 -#define VK_IMG_FORMAT_PVRTC_SPEC_VERSION 1 -#define VK_IMG_FORMAT_PVRTC_EXTENSION_NAME "VK_IMG_format_pvrtc" - - -#define VK_NV_external_memory_capabilities 1 -#define VK_NV_EXTERNAL_MEMORY_CAPABILITIES_SPEC_VERSION 1 -#define VK_NV_EXTERNAL_MEMORY_CAPABILITIES_EXTENSION_NAME "VK_NV_external_memory_capabilities" - - -typedef enum VkExternalMemoryHandleTypeFlagBitsNV { - VK_EXTERNAL_MEMORY_HANDLE_TYPE_OPAQUE_WIN32_BIT_NV = 0x00000001, - VK_EXTERNAL_MEMORY_HANDLE_TYPE_OPAQUE_WIN32_KMT_BIT_NV = 0x00000002, - VK_EXTERNAL_MEMORY_HANDLE_TYPE_D3D11_IMAGE_BIT_NV = 0x00000004, - VK_EXTERNAL_MEMORY_HANDLE_TYPE_D3D11_IMAGE_KMT_BIT_NV = 0x00000008, - VK_EXTERNAL_MEMORY_HANDLE_TYPE_FLAG_BITS_MAX_ENUM_NV = 0x7FFFFFFF -} VkExternalMemoryHandleTypeFlagBitsNV; -typedef VkFlags VkExternalMemoryHandleTypeFlagsNV; - -typedef enum VkExternalMemoryFeatureFlagBitsNV { - VK_EXTERNAL_MEMORY_FEATURE_DEDICATED_ONLY_BIT_NV = 0x00000001, - VK_EXTERNAL_MEMORY_FEATURE_EXPORTABLE_BIT_NV = 0x00000002, - VK_EXTERNAL_MEMORY_FEATURE_IMPORTABLE_BIT_NV = 0x00000004, - VK_EXTERNAL_MEMORY_FEATURE_FLAG_BITS_MAX_ENUM_NV = 0x7FFFFFFF -} VkExternalMemoryFeatureFlagBitsNV; -typedef VkFlags VkExternalMemoryFeatureFlagsNV; - -typedef struct VkExternalImageFormatPropertiesNV { - VkImageFormatProperties imageFormatProperties; - VkExternalMemoryFeatureFlagsNV externalMemoryFeatures; - VkExternalMemoryHandleTypeFlagsNV exportFromImportedHandleTypes; - VkExternalMemoryHandleTypeFlagsNV compatibleHandleTypes; -} VkExternalImageFormatPropertiesNV; - - -typedef VkResult (VKAPI_PTR *PFN_vkGetPhysicalDeviceExternalImageFormatPropertiesNV)(VkPhysicalDevice physicalDevice, VkFormat format, VkImageType type, VkImageTiling tiling, VkImageUsageFlags usage, VkImageCreateFlags flags, VkExternalMemoryHandleTypeFlagsNV externalHandleType, VkExternalImageFormatPropertiesNV* pExternalImageFormatProperties); - -#ifndef VK_NO_PROTOTYPES -VKAPI_ATTR VkResult VKAPI_CALL vkGetPhysicalDeviceExternalImageFormatPropertiesNV( - VkPhysicalDevice physicalDevice, - VkFormat format, - VkImageType type, - VkImageTiling tiling, - VkImageUsageFlags usage, - VkImageCreateFlags flags, - VkExternalMemoryHandleTypeFlagsNV externalHandleType, - VkExternalImageFormatPropertiesNV* pExternalImageFormatProperties); -#endif - -#define VK_NV_external_memory 1 -#define VK_NV_EXTERNAL_MEMORY_SPEC_VERSION 1 -#define VK_NV_EXTERNAL_MEMORY_EXTENSION_NAME "VK_NV_external_memory" - -typedef struct VkExternalMemoryImageCreateInfoNV { - VkStructureType sType; - const void* pNext; - VkExternalMemoryHandleTypeFlagsNV handleTypes; -} VkExternalMemoryImageCreateInfoNV; - -typedef struct VkExportMemoryAllocateInfoNV { - VkStructureType sType; - const void* pNext; - VkExternalMemoryHandleTypeFlagsNV handleTypes; -} VkExportMemoryAllocateInfoNV; - - - -#ifdef VK_USE_PLATFORM_WIN32_KHR -#define VK_NV_external_memory_win32 1 -#define VK_NV_EXTERNAL_MEMORY_WIN32_SPEC_VERSION 1 -#define VK_NV_EXTERNAL_MEMORY_WIN32_EXTENSION_NAME "VK_NV_external_memory_win32" - -typedef struct VkImportMemoryWin32HandleInfoNV { - VkStructureType sType; - const void* pNext; - VkExternalMemoryHandleTypeFlagsNV handleType; - HANDLE handle; -} VkImportMemoryWin32HandleInfoNV; - -typedef struct VkExportMemoryWin32HandleInfoNV { - VkStructureType sType; - const void* pNext; - const SECURITY_ATTRIBUTES* pAttributes; - DWORD dwAccess; -} VkExportMemoryWin32HandleInfoNV; - - -typedef VkResult (VKAPI_PTR *PFN_vkGetMemoryWin32HandleNV)(VkDevice device, VkDeviceMemory memory, VkExternalMemoryHandleTypeFlagsNV handleType, HANDLE* pHandle); - -#ifndef VK_NO_PROTOTYPES -VKAPI_ATTR VkResult VKAPI_CALL vkGetMemoryWin32HandleNV( - VkDevice device, - VkDeviceMemory memory, - VkExternalMemoryHandleTypeFlagsNV handleType, - HANDLE* pHandle); -#endif -#endif /* VK_USE_PLATFORM_WIN32_KHR */ - -#ifdef VK_USE_PLATFORM_WIN32_KHR -#define VK_NV_win32_keyed_mutex 1 -#define VK_NV_WIN32_KEYED_MUTEX_SPEC_VERSION 1 -#define VK_NV_WIN32_KEYED_MUTEX_EXTENSION_NAME "VK_NV_win32_keyed_mutex" - -typedef struct VkWin32KeyedMutexAcquireReleaseInfoNV { - VkStructureType sType; - const void* pNext; - uint32_t acquireCount; - const VkDeviceMemory* pAcquireSyncs; - const uint64_t* pAcquireKeys; - const uint32_t* pAcquireTimeoutMilliseconds; - uint32_t releaseCount; - const VkDeviceMemory* pReleaseSyncs; - const uint64_t* pReleaseKeys; -} VkWin32KeyedMutexAcquireReleaseInfoNV; - - -#endif /* VK_USE_PLATFORM_WIN32_KHR */ - -#define VK_KHX_device_group 1 -#define VK_MAX_DEVICE_GROUP_SIZE_KHX 32 -#define VK_KHX_DEVICE_GROUP_SPEC_VERSION 1 -#define VK_KHX_DEVICE_GROUP_EXTENSION_NAME "VK_KHX_device_group" - - -typedef enum VkPeerMemoryFeatureFlagBitsKHX { - VK_PEER_MEMORY_FEATURE_COPY_SRC_BIT_KHX = 0x00000001, - VK_PEER_MEMORY_FEATURE_COPY_DST_BIT_KHX = 0x00000002, - VK_PEER_MEMORY_FEATURE_GENERIC_SRC_BIT_KHX = 0x00000004, - VK_PEER_MEMORY_FEATURE_GENERIC_DST_BIT_KHX = 0x00000008, - VK_PEER_MEMORY_FEATURE_FLAG_BITS_MAX_ENUM_KHX = 0x7FFFFFFF -} VkPeerMemoryFeatureFlagBitsKHX; -typedef VkFlags VkPeerMemoryFeatureFlagsKHX; - -typedef enum VkMemoryAllocateFlagBitsKHX { - VK_MEMORY_ALLOCATE_DEVICE_MASK_BIT_KHX = 0x00000001, - VK_MEMORY_ALLOCATE_FLAG_BITS_MAX_ENUM_KHX = 0x7FFFFFFF -} VkMemoryAllocateFlagBitsKHX; -typedef VkFlags VkMemoryAllocateFlagsKHX; - -typedef enum VkDeviceGroupPresentModeFlagBitsKHX { - VK_DEVICE_GROUP_PRESENT_MODE_LOCAL_BIT_KHX = 0x00000001, - VK_DEVICE_GROUP_PRESENT_MODE_REMOTE_BIT_KHX = 0x00000002, - VK_DEVICE_GROUP_PRESENT_MODE_SUM_BIT_KHX = 0x00000004, - VK_DEVICE_GROUP_PRESENT_MODE_LOCAL_MULTI_DEVICE_BIT_KHX = 0x00000008, - VK_DEVICE_GROUP_PRESENT_MODE_FLAG_BITS_MAX_ENUM_KHX = 0x7FFFFFFF -} VkDeviceGroupPresentModeFlagBitsKHX; -typedef VkFlags VkDeviceGroupPresentModeFlagsKHX; - -typedef struct VkMemoryAllocateFlagsInfoKHX { - VkStructureType sType; - const void* pNext; - VkMemoryAllocateFlagsKHX flags; - uint32_t deviceMask; -} VkMemoryAllocateFlagsInfoKHX; - -typedef struct VkBindBufferMemoryInfoKHX { - VkStructureType sType; - const void* pNext; - VkBuffer buffer; - VkDeviceMemory memory; - VkDeviceSize memoryOffset; - uint32_t deviceIndexCount; - const uint32_t* pDeviceIndices; -} VkBindBufferMemoryInfoKHX; - -typedef struct VkBindImageMemoryInfoKHX { - VkStructureType sType; - const void* pNext; - VkImage image; - VkDeviceMemory memory; - VkDeviceSize memoryOffset; - uint32_t deviceIndexCount; - const uint32_t* pDeviceIndices; - uint32_t SFRRectCount; - const VkRect2D* pSFRRects; -} VkBindImageMemoryInfoKHX; - -typedef struct VkDeviceGroupRenderPassBeginInfoKHX { - VkStructureType sType; - const void* pNext; - uint32_t deviceMask; - uint32_t deviceRenderAreaCount; - const VkRect2D* pDeviceRenderAreas; -} VkDeviceGroupRenderPassBeginInfoKHX; - -typedef struct VkDeviceGroupCommandBufferBeginInfoKHX { - VkStructureType sType; - const void* pNext; - uint32_t deviceMask; -} VkDeviceGroupCommandBufferBeginInfoKHX; - -typedef struct VkDeviceGroupSubmitInfoKHX { - VkStructureType sType; - const void* pNext; - uint32_t waitSemaphoreCount; - const uint32_t* pWaitSemaphoreDeviceIndices; - uint32_t commandBufferCount; - const uint32_t* pCommandBufferDeviceMasks; - uint32_t signalSemaphoreCount; - const uint32_t* pSignalSemaphoreDeviceIndices; -} VkDeviceGroupSubmitInfoKHX; - -typedef struct VkDeviceGroupBindSparseInfoKHX { - VkStructureType sType; - const void* pNext; - uint32_t resourceDeviceIndex; - uint32_t memoryDeviceIndex; -} VkDeviceGroupBindSparseInfoKHX; - -typedef struct VkDeviceGroupPresentCapabilitiesKHX { - VkStructureType sType; - const void* pNext; - uint32_t presentMask[VK_MAX_DEVICE_GROUP_SIZE_KHX]; - VkDeviceGroupPresentModeFlagsKHX modes; -} VkDeviceGroupPresentCapabilitiesKHX; - -typedef struct VkImageSwapchainCreateInfoKHX { - VkStructureType sType; - const void* pNext; - VkSwapchainKHR swapchain; -} VkImageSwapchainCreateInfoKHX; - -typedef struct VkBindImageMemorySwapchainInfoKHX { - VkStructureType sType; - const void* pNext; - VkSwapchainKHR swapchain; - uint32_t imageIndex; -} VkBindImageMemorySwapchainInfoKHX; - -typedef struct VkAcquireNextImageInfoKHX { - VkStructureType sType; - const void* pNext; - VkSwapchainKHR swapchain; - uint64_t timeout; - VkSemaphore semaphore; - VkFence fence; - uint32_t deviceMask; -} VkAcquireNextImageInfoKHX; - -typedef struct VkDeviceGroupPresentInfoKHX { - VkStructureType sType; - const void* pNext; - uint32_t swapchainCount; - const uint32_t* pDeviceMasks; - VkDeviceGroupPresentModeFlagBitsKHX mode; -} VkDeviceGroupPresentInfoKHX; - -typedef struct VkDeviceGroupSwapchainCreateInfoKHX { - VkStructureType sType; - const void* pNext; - VkDeviceGroupPresentModeFlagsKHX modes; -} VkDeviceGroupSwapchainCreateInfoKHX; - - -typedef void (VKAPI_PTR *PFN_vkGetDeviceGroupPeerMemoryFeaturesKHX)(VkDevice device, uint32_t heapIndex, uint32_t localDeviceIndex, uint32_t remoteDeviceIndex, VkPeerMemoryFeatureFlagsKHX* pPeerMemoryFeatures); -typedef VkResult (VKAPI_PTR *PFN_vkBindBufferMemory2KHX)(VkDevice device, uint32_t bindInfoCount, const VkBindBufferMemoryInfoKHX* pBindInfos); -typedef VkResult (VKAPI_PTR *PFN_vkBindImageMemory2KHX)(VkDevice device, uint32_t bindInfoCount, const VkBindImageMemoryInfoKHX* pBindInfos); -typedef void (VKAPI_PTR *PFN_vkCmdSetDeviceMaskKHX)(VkCommandBuffer commandBuffer, uint32_t deviceMask); -typedef VkResult (VKAPI_PTR *PFN_vkGetDeviceGroupPresentCapabilitiesKHX)(VkDevice device, VkDeviceGroupPresentCapabilitiesKHX* pDeviceGroupPresentCapabilities); -typedef VkResult (VKAPI_PTR *PFN_vkGetDeviceGroupSurfacePresentModesKHX)(VkDevice device, VkSurfaceKHR surface, VkDeviceGroupPresentModeFlagsKHX* pModes); -typedef VkResult (VKAPI_PTR *PFN_vkAcquireNextImage2KHX)(VkDevice device, const VkAcquireNextImageInfoKHX* pAcquireInfo, uint32_t* pImageIndex); -typedef void (VKAPI_PTR *PFN_vkCmdDispatchBaseKHX)(VkCommandBuffer commandBuffer, uint32_t baseGroupX, uint32_t baseGroupY, uint32_t baseGroupZ, uint32_t groupCountX, uint32_t groupCountY, uint32_t groupCountZ); -typedef VkResult (VKAPI_PTR *PFN_vkGetPhysicalDevicePresentRectanglesKHX)(VkPhysicalDevice physicalDevice, VkSurfaceKHR surface, uint32_t* pRectCount, VkRect2D* pRects); - -#ifndef VK_NO_PROTOTYPES -VKAPI_ATTR void VKAPI_CALL vkGetDeviceGroupPeerMemoryFeaturesKHX( - VkDevice device, - uint32_t heapIndex, - uint32_t localDeviceIndex, - uint32_t remoteDeviceIndex, - VkPeerMemoryFeatureFlagsKHX* pPeerMemoryFeatures); - -VKAPI_ATTR VkResult VKAPI_CALL vkBindBufferMemory2KHX( - VkDevice device, - uint32_t bindInfoCount, - const VkBindBufferMemoryInfoKHX* pBindInfos); - -VKAPI_ATTR VkResult VKAPI_CALL vkBindImageMemory2KHX( - VkDevice device, - uint32_t bindInfoCount, - const VkBindImageMemoryInfoKHX* pBindInfos); - -VKAPI_ATTR void VKAPI_CALL vkCmdSetDeviceMaskKHX( - VkCommandBuffer commandBuffer, - uint32_t deviceMask); - -VKAPI_ATTR VkResult VKAPI_CALL vkGetDeviceGroupPresentCapabilitiesKHX( - VkDevice device, - VkDeviceGroupPresentCapabilitiesKHX* pDeviceGroupPresentCapabilities); - -VKAPI_ATTR VkResult VKAPI_CALL vkGetDeviceGroupSurfacePresentModesKHX( - VkDevice device, - VkSurfaceKHR surface, - VkDeviceGroupPresentModeFlagsKHX* pModes); - -VKAPI_ATTR VkResult VKAPI_CALL vkAcquireNextImage2KHX( - VkDevice device, - const VkAcquireNextImageInfoKHX* pAcquireInfo, - uint32_t* pImageIndex); - -VKAPI_ATTR void VKAPI_CALL vkCmdDispatchBaseKHX( - VkCommandBuffer commandBuffer, - uint32_t baseGroupX, - uint32_t baseGroupY, - uint32_t baseGroupZ, - uint32_t groupCountX, - uint32_t groupCountY, - uint32_t groupCountZ); - -VKAPI_ATTR VkResult VKAPI_CALL vkGetPhysicalDevicePresentRectanglesKHX( - VkPhysicalDevice physicalDevice, - VkSurfaceKHR surface, - uint32_t* pRectCount, - VkRect2D* pRects); -#endif - -#define VK_EXT_validation_flags 1 -#define VK_EXT_VALIDATION_FLAGS_SPEC_VERSION 1 -#define VK_EXT_VALIDATION_FLAGS_EXTENSION_NAME "VK_EXT_validation_flags" - - -typedef enum VkValidationCheckEXT { - VK_VALIDATION_CHECK_ALL_EXT = 0, - VK_VALIDATION_CHECK_SHADERS_EXT = 1, - VK_VALIDATION_CHECK_BEGIN_RANGE_EXT = VK_VALIDATION_CHECK_ALL_EXT, - VK_VALIDATION_CHECK_END_RANGE_EXT = VK_VALIDATION_CHECK_SHADERS_EXT, - VK_VALIDATION_CHECK_RANGE_SIZE_EXT = (VK_VALIDATION_CHECK_SHADERS_EXT - VK_VALIDATION_CHECK_ALL_EXT + 1), - VK_VALIDATION_CHECK_MAX_ENUM_EXT = 0x7FFFFFFF -} VkValidationCheckEXT; - -typedef struct VkValidationFlagsEXT { - VkStructureType sType; - const void* pNext; - uint32_t disabledValidationCheckCount; - VkValidationCheckEXT* pDisabledValidationChecks; -} VkValidationFlagsEXT; - - - -#ifdef VK_USE_PLATFORM_VI_NN -#define VK_NN_vi_surface 1 -#define VK_NN_VI_SURFACE_SPEC_VERSION 1 -#define VK_NN_VI_SURFACE_EXTENSION_NAME "VK_NN_vi_surface" - -typedef VkFlags VkViSurfaceCreateFlagsNN; - -typedef struct VkViSurfaceCreateInfoNN { - VkStructureType sType; - const void* pNext; - VkViSurfaceCreateFlagsNN flags; - void* window; -} VkViSurfaceCreateInfoNN; - - -typedef VkResult (VKAPI_PTR *PFN_vkCreateViSurfaceNN)(VkInstance instance, const VkViSurfaceCreateInfoNN* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkSurfaceKHR* pSurface); - -#ifndef VK_NO_PROTOTYPES -VKAPI_ATTR VkResult VKAPI_CALL vkCreateViSurfaceNN( - VkInstance instance, - const VkViSurfaceCreateInfoNN* pCreateInfo, - const VkAllocationCallbacks* pAllocator, - VkSurfaceKHR* pSurface); -#endif -#endif /* VK_USE_PLATFORM_VI_NN */ - -#define VK_EXT_shader_subgroup_ballot 1 -#define VK_EXT_SHADER_SUBGROUP_BALLOT_SPEC_VERSION 1 -#define VK_EXT_SHADER_SUBGROUP_BALLOT_EXTENSION_NAME "VK_EXT_shader_subgroup_ballot" - - -#define VK_EXT_shader_subgroup_vote 1 -#define VK_EXT_SHADER_SUBGROUP_VOTE_SPEC_VERSION 1 -#define VK_EXT_SHADER_SUBGROUP_VOTE_EXTENSION_NAME "VK_EXT_shader_subgroup_vote" - - -#define VK_KHX_device_group_creation 1 -#define VK_KHX_DEVICE_GROUP_CREATION_SPEC_VERSION 1 -#define VK_KHX_DEVICE_GROUP_CREATION_EXTENSION_NAME "VK_KHX_device_group_creation" - -typedef struct VkPhysicalDeviceGroupPropertiesKHX { - VkStructureType sType; - void* pNext; - uint32_t physicalDeviceCount; - VkPhysicalDevice physicalDevices[VK_MAX_DEVICE_GROUP_SIZE_KHX]; - VkBool32 subsetAllocation; -} VkPhysicalDeviceGroupPropertiesKHX; - -typedef struct VkDeviceGroupDeviceCreateInfoKHX { - VkStructureType sType; - const void* pNext; - uint32_t physicalDeviceCount; - const VkPhysicalDevice* pPhysicalDevices; -} VkDeviceGroupDeviceCreateInfoKHX; - - -typedef VkResult (VKAPI_PTR *PFN_vkEnumeratePhysicalDeviceGroupsKHX)(VkInstance instance, uint32_t* pPhysicalDeviceGroupCount, VkPhysicalDeviceGroupPropertiesKHX* pPhysicalDeviceGroupProperties); - -#ifndef VK_NO_PROTOTYPES -VKAPI_ATTR VkResult VKAPI_CALL vkEnumeratePhysicalDeviceGroupsKHX( - VkInstance instance, - uint32_t* pPhysicalDeviceGroupCount, - VkPhysicalDeviceGroupPropertiesKHX* pPhysicalDeviceGroupProperties); -#endif - -#define VK_NVX_device_generated_commands 1 -VK_DEFINE_NON_DISPATCHABLE_HANDLE(VkObjectTableNVX) -VK_DEFINE_NON_DISPATCHABLE_HANDLE(VkIndirectCommandsLayoutNVX) - -#define VK_NVX_DEVICE_GENERATED_COMMANDS_SPEC_VERSION 3 -#define VK_NVX_DEVICE_GENERATED_COMMANDS_EXTENSION_NAME "VK_NVX_device_generated_commands" - - -typedef enum VkIndirectCommandsTokenTypeNVX { - VK_INDIRECT_COMMANDS_TOKEN_TYPE_PIPELINE_NVX = 0, - VK_INDIRECT_COMMANDS_TOKEN_TYPE_DESCRIPTOR_SET_NVX = 1, - VK_INDIRECT_COMMANDS_TOKEN_TYPE_INDEX_BUFFER_NVX = 2, - VK_INDIRECT_COMMANDS_TOKEN_TYPE_VERTEX_BUFFER_NVX = 3, - VK_INDIRECT_COMMANDS_TOKEN_TYPE_PUSH_CONSTANT_NVX = 4, - VK_INDIRECT_COMMANDS_TOKEN_TYPE_DRAW_INDEXED_NVX = 5, - VK_INDIRECT_COMMANDS_TOKEN_TYPE_DRAW_NVX = 6, - VK_INDIRECT_COMMANDS_TOKEN_TYPE_DISPATCH_NVX = 7, - VK_INDIRECT_COMMANDS_TOKEN_TYPE_BEGIN_RANGE_NVX = VK_INDIRECT_COMMANDS_TOKEN_TYPE_PIPELINE_NVX, - VK_INDIRECT_COMMANDS_TOKEN_TYPE_END_RANGE_NVX = VK_INDIRECT_COMMANDS_TOKEN_TYPE_DISPATCH_NVX, - VK_INDIRECT_COMMANDS_TOKEN_TYPE_RANGE_SIZE_NVX = (VK_INDIRECT_COMMANDS_TOKEN_TYPE_DISPATCH_NVX - VK_INDIRECT_COMMANDS_TOKEN_TYPE_PIPELINE_NVX + 1), - VK_INDIRECT_COMMANDS_TOKEN_TYPE_MAX_ENUM_NVX = 0x7FFFFFFF -} VkIndirectCommandsTokenTypeNVX; - -typedef enum VkObjectEntryTypeNVX { - VK_OBJECT_ENTRY_TYPE_DESCRIPTOR_SET_NVX = 0, - VK_OBJECT_ENTRY_TYPE_PIPELINE_NVX = 1, - VK_OBJECT_ENTRY_TYPE_INDEX_BUFFER_NVX = 2, - VK_OBJECT_ENTRY_TYPE_VERTEX_BUFFER_NVX = 3, - VK_OBJECT_ENTRY_TYPE_PUSH_CONSTANT_NVX = 4, - VK_OBJECT_ENTRY_TYPE_BEGIN_RANGE_NVX = VK_OBJECT_ENTRY_TYPE_DESCRIPTOR_SET_NVX, - VK_OBJECT_ENTRY_TYPE_END_RANGE_NVX = VK_OBJECT_ENTRY_TYPE_PUSH_CONSTANT_NVX, - VK_OBJECT_ENTRY_TYPE_RANGE_SIZE_NVX = (VK_OBJECT_ENTRY_TYPE_PUSH_CONSTANT_NVX - VK_OBJECT_ENTRY_TYPE_DESCRIPTOR_SET_NVX + 1), - VK_OBJECT_ENTRY_TYPE_MAX_ENUM_NVX = 0x7FFFFFFF -} VkObjectEntryTypeNVX; - - -typedef enum VkIndirectCommandsLayoutUsageFlagBitsNVX { - VK_INDIRECT_COMMANDS_LAYOUT_USAGE_UNORDERED_SEQUENCES_BIT_NVX = 0x00000001, - VK_INDIRECT_COMMANDS_LAYOUT_USAGE_SPARSE_SEQUENCES_BIT_NVX = 0x00000002, - VK_INDIRECT_COMMANDS_LAYOUT_USAGE_EMPTY_EXECUTIONS_BIT_NVX = 0x00000004, - VK_INDIRECT_COMMANDS_LAYOUT_USAGE_INDEXED_SEQUENCES_BIT_NVX = 0x00000008, - VK_INDIRECT_COMMANDS_LAYOUT_USAGE_FLAG_BITS_MAX_ENUM_NVX = 0x7FFFFFFF -} VkIndirectCommandsLayoutUsageFlagBitsNVX; -typedef VkFlags VkIndirectCommandsLayoutUsageFlagsNVX; - -typedef enum VkObjectEntryUsageFlagBitsNVX { - VK_OBJECT_ENTRY_USAGE_GRAPHICS_BIT_NVX = 0x00000001, - VK_OBJECT_ENTRY_USAGE_COMPUTE_BIT_NVX = 0x00000002, - VK_OBJECT_ENTRY_USAGE_FLAG_BITS_MAX_ENUM_NVX = 0x7FFFFFFF -} VkObjectEntryUsageFlagBitsNVX; -typedef VkFlags VkObjectEntryUsageFlagsNVX; - -typedef struct VkDeviceGeneratedCommandsFeaturesNVX { - VkStructureType sType; - const void* pNext; - VkBool32 computeBindingPointSupport; -} VkDeviceGeneratedCommandsFeaturesNVX; - -typedef struct VkDeviceGeneratedCommandsLimitsNVX { - VkStructureType sType; - const void* pNext; - uint32_t maxIndirectCommandsLayoutTokenCount; - uint32_t maxObjectEntryCounts; - uint32_t minSequenceCountBufferOffsetAlignment; - uint32_t minSequenceIndexBufferOffsetAlignment; - uint32_t minCommandsTokenBufferOffsetAlignment; -} VkDeviceGeneratedCommandsLimitsNVX; - -typedef struct VkIndirectCommandsTokenNVX { - VkIndirectCommandsTokenTypeNVX tokenType; - VkBuffer buffer; - VkDeviceSize offset; -} VkIndirectCommandsTokenNVX; - -typedef struct VkIndirectCommandsLayoutTokenNVX { - VkIndirectCommandsTokenTypeNVX tokenType; - uint32_t bindingUnit; - uint32_t dynamicCount; - uint32_t divisor; -} VkIndirectCommandsLayoutTokenNVX; - -typedef struct VkIndirectCommandsLayoutCreateInfoNVX { - VkStructureType sType; - const void* pNext; - VkPipelineBindPoint pipelineBindPoint; - VkIndirectCommandsLayoutUsageFlagsNVX flags; - uint32_t tokenCount; - const VkIndirectCommandsLayoutTokenNVX* pTokens; -} VkIndirectCommandsLayoutCreateInfoNVX; - -typedef struct VkCmdProcessCommandsInfoNVX { - VkStructureType sType; - const void* pNext; - VkObjectTableNVX objectTable; - VkIndirectCommandsLayoutNVX indirectCommandsLayout; - uint32_t indirectCommandsTokenCount; - const VkIndirectCommandsTokenNVX* pIndirectCommandsTokens; - uint32_t maxSequencesCount; - VkCommandBuffer targetCommandBuffer; - VkBuffer sequencesCountBuffer; - VkDeviceSize sequencesCountOffset; - VkBuffer sequencesIndexBuffer; - VkDeviceSize sequencesIndexOffset; -} VkCmdProcessCommandsInfoNVX; - -typedef struct VkCmdReserveSpaceForCommandsInfoNVX { - VkStructureType sType; - const void* pNext; - VkObjectTableNVX objectTable; - VkIndirectCommandsLayoutNVX indirectCommandsLayout; - uint32_t maxSequencesCount; -} VkCmdReserveSpaceForCommandsInfoNVX; - -typedef struct VkObjectTableCreateInfoNVX { - VkStructureType sType; - const void* pNext; - uint32_t objectCount; - const VkObjectEntryTypeNVX* pObjectEntryTypes; - const uint32_t* pObjectEntryCounts; - const VkObjectEntryUsageFlagsNVX* pObjectEntryUsageFlags; - uint32_t maxUniformBuffersPerDescriptor; - uint32_t maxStorageBuffersPerDescriptor; - uint32_t maxStorageImagesPerDescriptor; - uint32_t maxSampledImagesPerDescriptor; - uint32_t maxPipelineLayouts; -} VkObjectTableCreateInfoNVX; - -typedef struct VkObjectTableEntryNVX { - VkObjectEntryTypeNVX type; - VkObjectEntryUsageFlagsNVX flags; -} VkObjectTableEntryNVX; - -typedef struct VkObjectTablePipelineEntryNVX { - VkObjectEntryTypeNVX type; - VkObjectEntryUsageFlagsNVX flags; - VkPipeline pipeline; -} VkObjectTablePipelineEntryNVX; - -typedef struct VkObjectTableDescriptorSetEntryNVX { - VkObjectEntryTypeNVX type; - VkObjectEntryUsageFlagsNVX flags; - VkPipelineLayout pipelineLayout; - VkDescriptorSet descriptorSet; -} VkObjectTableDescriptorSetEntryNVX; - -typedef struct VkObjectTableVertexBufferEntryNVX { - VkObjectEntryTypeNVX type; - VkObjectEntryUsageFlagsNVX flags; - VkBuffer buffer; -} VkObjectTableVertexBufferEntryNVX; - -typedef struct VkObjectTableIndexBufferEntryNVX { - VkObjectEntryTypeNVX type; - VkObjectEntryUsageFlagsNVX flags; - VkBuffer buffer; - VkIndexType indexType; -} VkObjectTableIndexBufferEntryNVX; - -typedef struct VkObjectTablePushConstantEntryNVX { - VkObjectEntryTypeNVX type; - VkObjectEntryUsageFlagsNVX flags; - VkPipelineLayout pipelineLayout; - VkShaderStageFlags stageFlags; -} VkObjectTablePushConstantEntryNVX; - - -typedef void (VKAPI_PTR *PFN_vkCmdProcessCommandsNVX)(VkCommandBuffer commandBuffer, const VkCmdProcessCommandsInfoNVX* pProcessCommandsInfo); -typedef void (VKAPI_PTR *PFN_vkCmdReserveSpaceForCommandsNVX)(VkCommandBuffer commandBuffer, const VkCmdReserveSpaceForCommandsInfoNVX* pReserveSpaceInfo); -typedef VkResult (VKAPI_PTR *PFN_vkCreateIndirectCommandsLayoutNVX)(VkDevice device, const VkIndirectCommandsLayoutCreateInfoNVX* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkIndirectCommandsLayoutNVX* pIndirectCommandsLayout); -typedef void (VKAPI_PTR *PFN_vkDestroyIndirectCommandsLayoutNVX)(VkDevice device, VkIndirectCommandsLayoutNVX indirectCommandsLayout, const VkAllocationCallbacks* pAllocator); -typedef VkResult (VKAPI_PTR *PFN_vkCreateObjectTableNVX)(VkDevice device, const VkObjectTableCreateInfoNVX* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkObjectTableNVX* pObjectTable); -typedef void (VKAPI_PTR *PFN_vkDestroyObjectTableNVX)(VkDevice device, VkObjectTableNVX objectTable, const VkAllocationCallbacks* pAllocator); -typedef VkResult (VKAPI_PTR *PFN_vkRegisterObjectsNVX)(VkDevice device, VkObjectTableNVX objectTable, uint32_t objectCount, const VkObjectTableEntryNVX* const* ppObjectTableEntries, const uint32_t* pObjectIndices); -typedef VkResult (VKAPI_PTR *PFN_vkUnregisterObjectsNVX)(VkDevice device, VkObjectTableNVX objectTable, uint32_t objectCount, const VkObjectEntryTypeNVX* pObjectEntryTypes, const uint32_t* pObjectIndices); -typedef void (VKAPI_PTR *PFN_vkGetPhysicalDeviceGeneratedCommandsPropertiesNVX)(VkPhysicalDevice physicalDevice, VkDeviceGeneratedCommandsFeaturesNVX* pFeatures, VkDeviceGeneratedCommandsLimitsNVX* pLimits); - -#ifndef VK_NO_PROTOTYPES -VKAPI_ATTR void VKAPI_CALL vkCmdProcessCommandsNVX( - VkCommandBuffer commandBuffer, - const VkCmdProcessCommandsInfoNVX* pProcessCommandsInfo); - -VKAPI_ATTR void VKAPI_CALL vkCmdReserveSpaceForCommandsNVX( - VkCommandBuffer commandBuffer, - const VkCmdReserveSpaceForCommandsInfoNVX* pReserveSpaceInfo); - -VKAPI_ATTR VkResult VKAPI_CALL vkCreateIndirectCommandsLayoutNVX( - VkDevice device, - const VkIndirectCommandsLayoutCreateInfoNVX* pCreateInfo, - const VkAllocationCallbacks* pAllocator, - VkIndirectCommandsLayoutNVX* pIndirectCommandsLayout); - -VKAPI_ATTR void VKAPI_CALL vkDestroyIndirectCommandsLayoutNVX( - VkDevice device, - VkIndirectCommandsLayoutNVX indirectCommandsLayout, - const VkAllocationCallbacks* pAllocator); - -VKAPI_ATTR VkResult VKAPI_CALL vkCreateObjectTableNVX( - VkDevice device, - const VkObjectTableCreateInfoNVX* pCreateInfo, - const VkAllocationCallbacks* pAllocator, - VkObjectTableNVX* pObjectTable); - -VKAPI_ATTR void VKAPI_CALL vkDestroyObjectTableNVX( - VkDevice device, - VkObjectTableNVX objectTable, - const VkAllocationCallbacks* pAllocator); - -VKAPI_ATTR VkResult VKAPI_CALL vkRegisterObjectsNVX( - VkDevice device, - VkObjectTableNVX objectTable, - uint32_t objectCount, - const VkObjectTableEntryNVX* const* ppObjectTableEntries, - const uint32_t* pObjectIndices); - -VKAPI_ATTR VkResult VKAPI_CALL vkUnregisterObjectsNVX( - VkDevice device, - VkObjectTableNVX objectTable, - uint32_t objectCount, - const VkObjectEntryTypeNVX* pObjectEntryTypes, - const uint32_t* pObjectIndices); - -VKAPI_ATTR void VKAPI_CALL vkGetPhysicalDeviceGeneratedCommandsPropertiesNVX( - VkPhysicalDevice physicalDevice, - VkDeviceGeneratedCommandsFeaturesNVX* pFeatures, - VkDeviceGeneratedCommandsLimitsNVX* pLimits); -#endif - -#define VK_NV_clip_space_w_scaling 1 -#define VK_NV_CLIP_SPACE_W_SCALING_SPEC_VERSION 1 -#define VK_NV_CLIP_SPACE_W_SCALING_EXTENSION_NAME "VK_NV_clip_space_w_scaling" - -typedef struct VkViewportWScalingNV { - float xcoeff; - float ycoeff; -} VkViewportWScalingNV; - -typedef struct VkPipelineViewportWScalingStateCreateInfoNV { - VkStructureType sType; - const void* pNext; - VkBool32 viewportWScalingEnable; - uint32_t viewportCount; - const VkViewportWScalingNV* pViewportWScalings; -} VkPipelineViewportWScalingStateCreateInfoNV; - - -typedef void (VKAPI_PTR *PFN_vkCmdSetViewportWScalingNV)(VkCommandBuffer commandBuffer, uint32_t firstViewport, uint32_t viewportCount, const VkViewportWScalingNV* pViewportWScalings); - -#ifndef VK_NO_PROTOTYPES -VKAPI_ATTR void VKAPI_CALL vkCmdSetViewportWScalingNV( - VkCommandBuffer commandBuffer, - uint32_t firstViewport, - uint32_t viewportCount, - const VkViewportWScalingNV* pViewportWScalings); -#endif - -#define VK_EXT_direct_mode_display 1 -#define VK_EXT_DIRECT_MODE_DISPLAY_SPEC_VERSION 1 -#define VK_EXT_DIRECT_MODE_DISPLAY_EXTENSION_NAME "VK_EXT_direct_mode_display" - -typedef VkResult (VKAPI_PTR *PFN_vkReleaseDisplayEXT)(VkPhysicalDevice physicalDevice, VkDisplayKHR display); - -#ifndef VK_NO_PROTOTYPES -VKAPI_ATTR VkResult VKAPI_CALL vkReleaseDisplayEXT( - VkPhysicalDevice physicalDevice, - VkDisplayKHR display); -#endif - -#ifdef VK_USE_PLATFORM_XLIB_XRANDR_EXT -#define VK_EXT_acquire_xlib_display 1 -#include - -#define VK_EXT_ACQUIRE_XLIB_DISPLAY_SPEC_VERSION 1 -#define VK_EXT_ACQUIRE_XLIB_DISPLAY_EXTENSION_NAME "VK_EXT_acquire_xlib_display" - -typedef VkResult (VKAPI_PTR *PFN_vkAcquireXlibDisplayEXT)(VkPhysicalDevice physicalDevice, Display* dpy, VkDisplayKHR display); -typedef VkResult (VKAPI_PTR *PFN_vkGetRandROutputDisplayEXT)(VkPhysicalDevice physicalDevice, Display* dpy, RROutput rrOutput, VkDisplayKHR* pDisplay); - -#ifndef VK_NO_PROTOTYPES -VKAPI_ATTR VkResult VKAPI_CALL vkAcquireXlibDisplayEXT( - VkPhysicalDevice physicalDevice, - Display* dpy, - VkDisplayKHR display); - -VKAPI_ATTR VkResult VKAPI_CALL vkGetRandROutputDisplayEXT( - VkPhysicalDevice physicalDevice, - Display* dpy, - RROutput rrOutput, - VkDisplayKHR* pDisplay); -#endif -#endif /* VK_USE_PLATFORM_XLIB_XRANDR_EXT */ - -#define VK_EXT_display_surface_counter 1 -#define VK_EXT_DISPLAY_SURFACE_COUNTER_SPEC_VERSION 1 -#define VK_EXT_DISPLAY_SURFACE_COUNTER_EXTENSION_NAME "VK_EXT_display_surface_counter" -#define VK_STRUCTURE_TYPE_SURFACE_CAPABILITIES2_EXT VK_STRUCTURE_TYPE_SURFACE_CAPABILITIES_2_EXT - - -typedef enum VkSurfaceCounterFlagBitsEXT { - VK_SURFACE_COUNTER_VBLANK_EXT = 0x00000001, - VK_SURFACE_COUNTER_FLAG_BITS_MAX_ENUM_EXT = 0x7FFFFFFF -} VkSurfaceCounterFlagBitsEXT; -typedef VkFlags VkSurfaceCounterFlagsEXT; - -typedef struct VkSurfaceCapabilities2EXT { - VkStructureType sType; - void* pNext; - uint32_t minImageCount; - uint32_t maxImageCount; - VkExtent2D currentExtent; - VkExtent2D minImageExtent; - VkExtent2D maxImageExtent; - uint32_t maxImageArrayLayers; - VkSurfaceTransformFlagsKHR supportedTransforms; - VkSurfaceTransformFlagBitsKHR currentTransform; - VkCompositeAlphaFlagsKHR supportedCompositeAlpha; - VkImageUsageFlags supportedUsageFlags; - VkSurfaceCounterFlagsEXT supportedSurfaceCounters; -} VkSurfaceCapabilities2EXT; - - -typedef VkResult (VKAPI_PTR *PFN_vkGetPhysicalDeviceSurfaceCapabilities2EXT)(VkPhysicalDevice physicalDevice, VkSurfaceKHR surface, VkSurfaceCapabilities2EXT* pSurfaceCapabilities); - -#ifndef VK_NO_PROTOTYPES -VKAPI_ATTR VkResult VKAPI_CALL vkGetPhysicalDeviceSurfaceCapabilities2EXT( - VkPhysicalDevice physicalDevice, - VkSurfaceKHR surface, - VkSurfaceCapabilities2EXT* pSurfaceCapabilities); -#endif - -#define VK_EXT_display_control 1 -#define VK_EXT_DISPLAY_CONTROL_SPEC_VERSION 1 -#define VK_EXT_DISPLAY_CONTROL_EXTENSION_NAME "VK_EXT_display_control" - - -typedef enum VkDisplayPowerStateEXT { - VK_DISPLAY_POWER_STATE_OFF_EXT = 0, - VK_DISPLAY_POWER_STATE_SUSPEND_EXT = 1, - VK_DISPLAY_POWER_STATE_ON_EXT = 2, - VK_DISPLAY_POWER_STATE_BEGIN_RANGE_EXT = VK_DISPLAY_POWER_STATE_OFF_EXT, - VK_DISPLAY_POWER_STATE_END_RANGE_EXT = VK_DISPLAY_POWER_STATE_ON_EXT, - VK_DISPLAY_POWER_STATE_RANGE_SIZE_EXT = (VK_DISPLAY_POWER_STATE_ON_EXT - VK_DISPLAY_POWER_STATE_OFF_EXT + 1), - VK_DISPLAY_POWER_STATE_MAX_ENUM_EXT = 0x7FFFFFFF -} VkDisplayPowerStateEXT; - -typedef enum VkDeviceEventTypeEXT { - VK_DEVICE_EVENT_TYPE_DISPLAY_HOTPLUG_EXT = 0, - VK_DEVICE_EVENT_TYPE_BEGIN_RANGE_EXT = VK_DEVICE_EVENT_TYPE_DISPLAY_HOTPLUG_EXT, - VK_DEVICE_EVENT_TYPE_END_RANGE_EXT = VK_DEVICE_EVENT_TYPE_DISPLAY_HOTPLUG_EXT, - VK_DEVICE_EVENT_TYPE_RANGE_SIZE_EXT = (VK_DEVICE_EVENT_TYPE_DISPLAY_HOTPLUG_EXT - VK_DEVICE_EVENT_TYPE_DISPLAY_HOTPLUG_EXT + 1), - VK_DEVICE_EVENT_TYPE_MAX_ENUM_EXT = 0x7FFFFFFF -} VkDeviceEventTypeEXT; - -typedef enum VkDisplayEventTypeEXT { - VK_DISPLAY_EVENT_TYPE_FIRST_PIXEL_OUT_EXT = 0, - VK_DISPLAY_EVENT_TYPE_BEGIN_RANGE_EXT = VK_DISPLAY_EVENT_TYPE_FIRST_PIXEL_OUT_EXT, - VK_DISPLAY_EVENT_TYPE_END_RANGE_EXT = VK_DISPLAY_EVENT_TYPE_FIRST_PIXEL_OUT_EXT, - VK_DISPLAY_EVENT_TYPE_RANGE_SIZE_EXT = (VK_DISPLAY_EVENT_TYPE_FIRST_PIXEL_OUT_EXT - VK_DISPLAY_EVENT_TYPE_FIRST_PIXEL_OUT_EXT + 1), - VK_DISPLAY_EVENT_TYPE_MAX_ENUM_EXT = 0x7FFFFFFF -} VkDisplayEventTypeEXT; - -typedef struct VkDisplayPowerInfoEXT { - VkStructureType sType; - const void* pNext; - VkDisplayPowerStateEXT powerState; -} VkDisplayPowerInfoEXT; - -typedef struct VkDeviceEventInfoEXT { - VkStructureType sType; - const void* pNext; - VkDeviceEventTypeEXT deviceEvent; -} VkDeviceEventInfoEXT; - -typedef struct VkDisplayEventInfoEXT { - VkStructureType sType; - const void* pNext; - VkDisplayEventTypeEXT displayEvent; -} VkDisplayEventInfoEXT; - -typedef struct VkSwapchainCounterCreateInfoEXT { - VkStructureType sType; - const void* pNext; - VkSurfaceCounterFlagsEXT surfaceCounters; -} VkSwapchainCounterCreateInfoEXT; - - -typedef VkResult (VKAPI_PTR *PFN_vkDisplayPowerControlEXT)(VkDevice device, VkDisplayKHR display, const VkDisplayPowerInfoEXT* pDisplayPowerInfo); -typedef VkResult (VKAPI_PTR *PFN_vkRegisterDeviceEventEXT)(VkDevice device, const VkDeviceEventInfoEXT* pDeviceEventInfo, const VkAllocationCallbacks* pAllocator, VkFence* pFence); -typedef VkResult (VKAPI_PTR *PFN_vkRegisterDisplayEventEXT)(VkDevice device, VkDisplayKHR display, const VkDisplayEventInfoEXT* pDisplayEventInfo, const VkAllocationCallbacks* pAllocator, VkFence* pFence); -typedef VkResult (VKAPI_PTR *PFN_vkGetSwapchainCounterEXT)(VkDevice device, VkSwapchainKHR swapchain, VkSurfaceCounterFlagBitsEXT counter, uint64_t* pCounterValue); - -#ifndef VK_NO_PROTOTYPES -VKAPI_ATTR VkResult VKAPI_CALL vkDisplayPowerControlEXT( - VkDevice device, - VkDisplayKHR display, - const VkDisplayPowerInfoEXT* pDisplayPowerInfo); - -VKAPI_ATTR VkResult VKAPI_CALL vkRegisterDeviceEventEXT( - VkDevice device, - const VkDeviceEventInfoEXT* pDeviceEventInfo, - const VkAllocationCallbacks* pAllocator, - VkFence* pFence); - -VKAPI_ATTR VkResult VKAPI_CALL vkRegisterDisplayEventEXT( - VkDevice device, - VkDisplayKHR display, - const VkDisplayEventInfoEXT* pDisplayEventInfo, - const VkAllocationCallbacks* pAllocator, - VkFence* pFence); - -VKAPI_ATTR VkResult VKAPI_CALL vkGetSwapchainCounterEXT( - VkDevice device, - VkSwapchainKHR swapchain, - VkSurfaceCounterFlagBitsEXT counter, - uint64_t* pCounterValue); -#endif - -#define VK_GOOGLE_display_timing 1 -#define VK_GOOGLE_DISPLAY_TIMING_SPEC_VERSION 1 -#define VK_GOOGLE_DISPLAY_TIMING_EXTENSION_NAME "VK_GOOGLE_display_timing" - -typedef struct VkRefreshCycleDurationGOOGLE { - uint64_t refreshDuration; -} VkRefreshCycleDurationGOOGLE; - -typedef struct VkPastPresentationTimingGOOGLE { - uint32_t presentID; - uint64_t desiredPresentTime; - uint64_t actualPresentTime; - uint64_t earliestPresentTime; - uint64_t presentMargin; -} VkPastPresentationTimingGOOGLE; - -typedef struct VkPresentTimeGOOGLE { - uint32_t presentID; - uint64_t desiredPresentTime; -} VkPresentTimeGOOGLE; - -typedef struct VkPresentTimesInfoGOOGLE { - VkStructureType sType; - const void* pNext; - uint32_t swapchainCount; - const VkPresentTimeGOOGLE* pTimes; -} VkPresentTimesInfoGOOGLE; - - -typedef VkResult (VKAPI_PTR *PFN_vkGetRefreshCycleDurationGOOGLE)(VkDevice device, VkSwapchainKHR swapchain, VkRefreshCycleDurationGOOGLE* pDisplayTimingProperties); -typedef VkResult (VKAPI_PTR *PFN_vkGetPastPresentationTimingGOOGLE)(VkDevice device, VkSwapchainKHR swapchain, uint32_t* pPresentationTimingCount, VkPastPresentationTimingGOOGLE* pPresentationTimings); - -#ifndef VK_NO_PROTOTYPES -VKAPI_ATTR VkResult VKAPI_CALL vkGetRefreshCycleDurationGOOGLE( - VkDevice device, - VkSwapchainKHR swapchain, - VkRefreshCycleDurationGOOGLE* pDisplayTimingProperties); - -VKAPI_ATTR VkResult VKAPI_CALL vkGetPastPresentationTimingGOOGLE( - VkDevice device, - VkSwapchainKHR swapchain, - uint32_t* pPresentationTimingCount, - VkPastPresentationTimingGOOGLE* pPresentationTimings); -#endif - -#define VK_NV_sample_mask_override_coverage 1 -#define VK_NV_SAMPLE_MASK_OVERRIDE_COVERAGE_SPEC_VERSION 1 -#define VK_NV_SAMPLE_MASK_OVERRIDE_COVERAGE_EXTENSION_NAME "VK_NV_sample_mask_override_coverage" - - -#define VK_NV_geometry_shader_passthrough 1 -#define VK_NV_GEOMETRY_SHADER_PASSTHROUGH_SPEC_VERSION 1 -#define VK_NV_GEOMETRY_SHADER_PASSTHROUGH_EXTENSION_NAME "VK_NV_geometry_shader_passthrough" - - -#define VK_NV_viewport_array2 1 -#define VK_NV_VIEWPORT_ARRAY2_SPEC_VERSION 1 -#define VK_NV_VIEWPORT_ARRAY2_EXTENSION_NAME "VK_NV_viewport_array2" - - -#define VK_NVX_multiview_per_view_attributes 1 -#define VK_NVX_MULTIVIEW_PER_VIEW_ATTRIBUTES_SPEC_VERSION 1 -#define VK_NVX_MULTIVIEW_PER_VIEW_ATTRIBUTES_EXTENSION_NAME "VK_NVX_multiview_per_view_attributes" - -typedef struct VkPhysicalDeviceMultiviewPerViewAttributesPropertiesNVX { - VkStructureType sType; - void* pNext; - VkBool32 perViewPositionAllComponents; -} VkPhysicalDeviceMultiviewPerViewAttributesPropertiesNVX; - - - -#define VK_NV_viewport_swizzle 1 -#define VK_NV_VIEWPORT_SWIZZLE_SPEC_VERSION 1 -#define VK_NV_VIEWPORT_SWIZZLE_EXTENSION_NAME "VK_NV_viewport_swizzle" - - -typedef enum VkViewportCoordinateSwizzleNV { - VK_VIEWPORT_COORDINATE_SWIZZLE_POSITIVE_X_NV = 0, - VK_VIEWPORT_COORDINATE_SWIZZLE_NEGATIVE_X_NV = 1, - VK_VIEWPORT_COORDINATE_SWIZZLE_POSITIVE_Y_NV = 2, - VK_VIEWPORT_COORDINATE_SWIZZLE_NEGATIVE_Y_NV = 3, - VK_VIEWPORT_COORDINATE_SWIZZLE_POSITIVE_Z_NV = 4, - VK_VIEWPORT_COORDINATE_SWIZZLE_NEGATIVE_Z_NV = 5, - VK_VIEWPORT_COORDINATE_SWIZZLE_POSITIVE_W_NV = 6, - VK_VIEWPORT_COORDINATE_SWIZZLE_NEGATIVE_W_NV = 7, - VK_VIEWPORT_COORDINATE_SWIZZLE_BEGIN_RANGE_NV = VK_VIEWPORT_COORDINATE_SWIZZLE_POSITIVE_X_NV, - VK_VIEWPORT_COORDINATE_SWIZZLE_END_RANGE_NV = VK_VIEWPORT_COORDINATE_SWIZZLE_NEGATIVE_W_NV, - VK_VIEWPORT_COORDINATE_SWIZZLE_RANGE_SIZE_NV = (VK_VIEWPORT_COORDINATE_SWIZZLE_NEGATIVE_W_NV - VK_VIEWPORT_COORDINATE_SWIZZLE_POSITIVE_X_NV + 1), - VK_VIEWPORT_COORDINATE_SWIZZLE_MAX_ENUM_NV = 0x7FFFFFFF -} VkViewportCoordinateSwizzleNV; - -typedef VkFlags VkPipelineViewportSwizzleStateCreateFlagsNV; - -typedef struct VkViewportSwizzleNV { - VkViewportCoordinateSwizzleNV x; - VkViewportCoordinateSwizzleNV y; - VkViewportCoordinateSwizzleNV z; - VkViewportCoordinateSwizzleNV w; -} VkViewportSwizzleNV; - -typedef struct VkPipelineViewportSwizzleStateCreateInfoNV { - VkStructureType sType; - const void* pNext; - VkPipelineViewportSwizzleStateCreateFlagsNV flags; - uint32_t viewportCount; - const VkViewportSwizzleNV* pViewportSwizzles; -} VkPipelineViewportSwizzleStateCreateInfoNV; - - - -#define VK_EXT_discard_rectangles 1 -#define VK_EXT_DISCARD_RECTANGLES_SPEC_VERSION 1 -#define VK_EXT_DISCARD_RECTANGLES_EXTENSION_NAME "VK_EXT_discard_rectangles" - - -typedef enum VkDiscardRectangleModeEXT { - VK_DISCARD_RECTANGLE_MODE_INCLUSIVE_EXT = 0, - VK_DISCARD_RECTANGLE_MODE_EXCLUSIVE_EXT = 1, - VK_DISCARD_RECTANGLE_MODE_BEGIN_RANGE_EXT = VK_DISCARD_RECTANGLE_MODE_INCLUSIVE_EXT, - VK_DISCARD_RECTANGLE_MODE_END_RANGE_EXT = VK_DISCARD_RECTANGLE_MODE_EXCLUSIVE_EXT, - VK_DISCARD_RECTANGLE_MODE_RANGE_SIZE_EXT = (VK_DISCARD_RECTANGLE_MODE_EXCLUSIVE_EXT - VK_DISCARD_RECTANGLE_MODE_INCLUSIVE_EXT + 1), - VK_DISCARD_RECTANGLE_MODE_MAX_ENUM_EXT = 0x7FFFFFFF -} VkDiscardRectangleModeEXT; - -typedef VkFlags VkPipelineDiscardRectangleStateCreateFlagsEXT; - -typedef struct VkPhysicalDeviceDiscardRectanglePropertiesEXT { - VkStructureType sType; - void* pNext; - uint32_t maxDiscardRectangles; -} VkPhysicalDeviceDiscardRectanglePropertiesEXT; - -typedef struct VkPipelineDiscardRectangleStateCreateInfoEXT { - VkStructureType sType; - const void* pNext; - VkPipelineDiscardRectangleStateCreateFlagsEXT flags; - VkDiscardRectangleModeEXT discardRectangleMode; - uint32_t discardRectangleCount; - const VkRect2D* pDiscardRectangles; -} VkPipelineDiscardRectangleStateCreateInfoEXT; - - -typedef void (VKAPI_PTR *PFN_vkCmdSetDiscardRectangleEXT)(VkCommandBuffer commandBuffer, uint32_t firstDiscardRectangle, uint32_t discardRectangleCount, const VkRect2D* pDiscardRectangles); - -#ifndef VK_NO_PROTOTYPES -VKAPI_ATTR void VKAPI_CALL vkCmdSetDiscardRectangleEXT( - VkCommandBuffer commandBuffer, - uint32_t firstDiscardRectangle, - uint32_t discardRectangleCount, - const VkRect2D* pDiscardRectangles); -#endif - -#define VK_EXT_swapchain_colorspace 1 -#define VK_EXT_SWAPCHAIN_COLOR_SPACE_SPEC_VERSION 3 -#define VK_EXT_SWAPCHAIN_COLOR_SPACE_EXTENSION_NAME "VK_EXT_swapchain_colorspace" - - -#define VK_EXT_hdr_metadata 1 -#define VK_EXT_HDR_METADATA_SPEC_VERSION 1 -#define VK_EXT_HDR_METADATA_EXTENSION_NAME "VK_EXT_hdr_metadata" - -typedef struct VkXYColorEXT { - float x; - float y; -} VkXYColorEXT; - -typedef struct VkHdrMetadataEXT { - VkStructureType sType; - const void* pNext; - VkXYColorEXT displayPrimaryRed; - VkXYColorEXT displayPrimaryGreen; - VkXYColorEXT displayPrimaryBlue; - VkXYColorEXT whitePoint; - float maxLuminance; - float minLuminance; - float maxContentLightLevel; - float maxFrameAverageLightLevel; -} VkHdrMetadataEXT; - - -typedef void (VKAPI_PTR *PFN_vkSetHdrMetadataEXT)(VkDevice device, uint32_t swapchainCount, const VkSwapchainKHR* pSwapchains, const VkHdrMetadataEXT* pMetadata); - -#ifndef VK_NO_PROTOTYPES -VKAPI_ATTR void VKAPI_CALL vkSetHdrMetadataEXT( - VkDevice device, - uint32_t swapchainCount, - const VkSwapchainKHR* pSwapchains, - const VkHdrMetadataEXT* pMetadata); +#ifdef VK_USE_PLATFORM_FUCHSIA +#include +#include "vulkan_fuchsia.h" #endif #ifdef VK_USE_PLATFORM_IOS_MVK -#define VK_MVK_ios_surface 1 -#define VK_MVK_IOS_SURFACE_SPEC_VERSION 2 -#define VK_MVK_IOS_SURFACE_EXTENSION_NAME "VK_MVK_ios_surface" - -typedef VkFlags VkIOSSurfaceCreateFlagsMVK; - -typedef struct VkIOSSurfaceCreateInfoMVK { - VkStructureType sType; - const void* pNext; - VkIOSSurfaceCreateFlagsMVK flags; - const void* pView; -} VkIOSSurfaceCreateInfoMVK; - - -typedef VkResult (VKAPI_PTR *PFN_vkCreateIOSSurfaceMVK)(VkInstance instance, const VkIOSSurfaceCreateInfoMVK* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkSurfaceKHR* pSurface); - -#ifndef VK_NO_PROTOTYPES -VKAPI_ATTR VkResult VKAPI_CALL vkCreateIOSSurfaceMVK( - VkInstance instance, - const VkIOSSurfaceCreateInfoMVK* pCreateInfo, - const VkAllocationCallbacks* pAllocator, - VkSurfaceKHR* pSurface); +#include "vulkan_ios.h" #endif -#endif /* VK_USE_PLATFORM_IOS_MVK */ + #ifdef VK_USE_PLATFORM_MACOS_MVK -#define VK_MVK_macos_surface 1 -#define VK_MVK_MACOS_SURFACE_SPEC_VERSION 2 -#define VK_MVK_MACOS_SURFACE_EXTENSION_NAME "VK_MVK_macos_surface" - -typedef VkFlags VkMacOSSurfaceCreateFlagsMVK; - -typedef struct VkMacOSSurfaceCreateInfoMVK { - VkStructureType sType; - const void* pNext; - VkMacOSSurfaceCreateFlagsMVK flags; - const void* pView; -} VkMacOSSurfaceCreateInfoMVK; - - -typedef VkResult (VKAPI_PTR *PFN_vkCreateMacOSSurfaceMVK)(VkInstance instance, const VkMacOSSurfaceCreateInfoMVK* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkSurfaceKHR* pSurface); - -#ifndef VK_NO_PROTOTYPES -VKAPI_ATTR VkResult VKAPI_CALL vkCreateMacOSSurfaceMVK( - VkInstance instance, - const VkMacOSSurfaceCreateInfoMVK* pCreateInfo, - const VkAllocationCallbacks* pAllocator, - VkSurfaceKHR* pSurface); -#endif -#endif /* VK_USE_PLATFORM_MACOS_MVK */ - -#define VK_EXT_sampler_filter_minmax 1 -#define VK_EXT_SAMPLER_FILTER_MINMAX_SPEC_VERSION 1 -#define VK_EXT_SAMPLER_FILTER_MINMAX_EXTENSION_NAME "VK_EXT_sampler_filter_minmax" - - -typedef enum VkSamplerReductionModeEXT { - VK_SAMPLER_REDUCTION_MODE_WEIGHTED_AVERAGE_EXT = 0, - VK_SAMPLER_REDUCTION_MODE_MIN_EXT = 1, - VK_SAMPLER_REDUCTION_MODE_MAX_EXT = 2, - VK_SAMPLER_REDUCTION_MODE_BEGIN_RANGE_EXT = VK_SAMPLER_REDUCTION_MODE_WEIGHTED_AVERAGE_EXT, - VK_SAMPLER_REDUCTION_MODE_END_RANGE_EXT = VK_SAMPLER_REDUCTION_MODE_MAX_EXT, - VK_SAMPLER_REDUCTION_MODE_RANGE_SIZE_EXT = (VK_SAMPLER_REDUCTION_MODE_MAX_EXT - VK_SAMPLER_REDUCTION_MODE_WEIGHTED_AVERAGE_EXT + 1), - VK_SAMPLER_REDUCTION_MODE_MAX_ENUM_EXT = 0x7FFFFFFF -} VkSamplerReductionModeEXT; - -typedef struct VkSamplerReductionModeCreateInfoEXT { - VkStructureType sType; - const void* pNext; - VkSamplerReductionModeEXT reductionMode; -} VkSamplerReductionModeCreateInfoEXT; - -typedef struct VkPhysicalDeviceSamplerFilterMinmaxPropertiesEXT { - VkStructureType sType; - void* pNext; - VkBool32 filterMinmaxSingleComponentFormats; - VkBool32 filterMinmaxImageComponentMapping; -} VkPhysicalDeviceSamplerFilterMinmaxPropertiesEXT; - - - -#define VK_AMD_gpu_shader_int16 1 -#define VK_AMD_GPU_SHADER_INT16_SPEC_VERSION 1 -#define VK_AMD_GPU_SHADER_INT16_EXTENSION_NAME "VK_AMD_gpu_shader_int16" - - -#define VK_AMD_mixed_attachment_samples 1 -#define VK_AMD_MIXED_ATTACHMENT_SAMPLES_SPEC_VERSION 1 -#define VK_AMD_MIXED_ATTACHMENT_SAMPLES_EXTENSION_NAME "VK_AMD_mixed_attachment_samples" - - -#define VK_EXT_shader_stencil_export 1 -#define VK_EXT_SHADER_STENCIL_EXPORT_SPEC_VERSION 1 -#define VK_EXT_SHADER_STENCIL_EXPORT_EXTENSION_NAME "VK_EXT_shader_stencil_export" - - -#define VK_EXT_blend_operation_advanced 1 -#define VK_EXT_BLEND_OPERATION_ADVANCED_SPEC_VERSION 2 -#define VK_EXT_BLEND_OPERATION_ADVANCED_EXTENSION_NAME "VK_EXT_blend_operation_advanced" - - -typedef enum VkBlendOverlapEXT { - VK_BLEND_OVERLAP_UNCORRELATED_EXT = 0, - VK_BLEND_OVERLAP_DISJOINT_EXT = 1, - VK_BLEND_OVERLAP_CONJOINT_EXT = 2, - VK_BLEND_OVERLAP_BEGIN_RANGE_EXT = VK_BLEND_OVERLAP_UNCORRELATED_EXT, - VK_BLEND_OVERLAP_END_RANGE_EXT = VK_BLEND_OVERLAP_CONJOINT_EXT, - VK_BLEND_OVERLAP_RANGE_SIZE_EXT = (VK_BLEND_OVERLAP_CONJOINT_EXT - VK_BLEND_OVERLAP_UNCORRELATED_EXT + 1), - VK_BLEND_OVERLAP_MAX_ENUM_EXT = 0x7FFFFFFF -} VkBlendOverlapEXT; - -typedef struct VkPhysicalDeviceBlendOperationAdvancedFeaturesEXT { - VkStructureType sType; - void* pNext; - VkBool32 advancedBlendCoherentOperations; -} VkPhysicalDeviceBlendOperationAdvancedFeaturesEXT; - -typedef struct VkPhysicalDeviceBlendOperationAdvancedPropertiesEXT { - VkStructureType sType; - void* pNext; - uint32_t advancedBlendMaxColorAttachments; - VkBool32 advancedBlendIndependentBlend; - VkBool32 advancedBlendNonPremultipliedSrcColor; - VkBool32 advancedBlendNonPremultipliedDstColor; - VkBool32 advancedBlendCorrelatedOverlap; - VkBool32 advancedBlendAllOperations; -} VkPhysicalDeviceBlendOperationAdvancedPropertiesEXT; - -typedef struct VkPipelineColorBlendAdvancedStateCreateInfoEXT { - VkStructureType sType; - const void* pNext; - VkBool32 srcPremultiplied; - VkBool32 dstPremultiplied; - VkBlendOverlapEXT blendOverlap; -} VkPipelineColorBlendAdvancedStateCreateInfoEXT; - - - -#define VK_NV_fragment_coverage_to_color 1 -#define VK_NV_FRAGMENT_COVERAGE_TO_COLOR_SPEC_VERSION 1 -#define VK_NV_FRAGMENT_COVERAGE_TO_COLOR_EXTENSION_NAME "VK_NV_fragment_coverage_to_color" - -typedef VkFlags VkPipelineCoverageToColorStateCreateFlagsNV; - -typedef struct VkPipelineCoverageToColorStateCreateInfoNV { - VkStructureType sType; - const void* pNext; - VkPipelineCoverageToColorStateCreateFlagsNV flags; - VkBool32 coverageToColorEnable; - uint32_t coverageToColorLocation; -} VkPipelineCoverageToColorStateCreateInfoNV; - - - -#define VK_NV_framebuffer_mixed_samples 1 -#define VK_NV_FRAMEBUFFER_MIXED_SAMPLES_SPEC_VERSION 1 -#define VK_NV_FRAMEBUFFER_MIXED_SAMPLES_EXTENSION_NAME "VK_NV_framebuffer_mixed_samples" - - -typedef enum VkCoverageModulationModeNV { - VK_COVERAGE_MODULATION_MODE_NONE_NV = 0, - VK_COVERAGE_MODULATION_MODE_RGB_NV = 1, - VK_COVERAGE_MODULATION_MODE_ALPHA_NV = 2, - VK_COVERAGE_MODULATION_MODE_RGBA_NV = 3, - VK_COVERAGE_MODULATION_MODE_BEGIN_RANGE_NV = VK_COVERAGE_MODULATION_MODE_NONE_NV, - VK_COVERAGE_MODULATION_MODE_END_RANGE_NV = VK_COVERAGE_MODULATION_MODE_RGBA_NV, - VK_COVERAGE_MODULATION_MODE_RANGE_SIZE_NV = (VK_COVERAGE_MODULATION_MODE_RGBA_NV - VK_COVERAGE_MODULATION_MODE_NONE_NV + 1), - VK_COVERAGE_MODULATION_MODE_MAX_ENUM_NV = 0x7FFFFFFF -} VkCoverageModulationModeNV; - -typedef VkFlags VkPipelineCoverageModulationStateCreateFlagsNV; - -typedef struct VkPipelineCoverageModulationStateCreateInfoNV { - VkStructureType sType; - const void* pNext; - VkPipelineCoverageModulationStateCreateFlagsNV flags; - VkCoverageModulationModeNV coverageModulationMode; - VkBool32 coverageModulationTableEnable; - uint32_t coverageModulationTableCount; - const float* pCoverageModulationTable; -} VkPipelineCoverageModulationStateCreateInfoNV; - - - -#define VK_NV_fill_rectangle 1 -#define VK_NV_FILL_RECTANGLE_SPEC_VERSION 1 -#define VK_NV_FILL_RECTANGLE_EXTENSION_NAME "VK_NV_fill_rectangle" - - -#define VK_EXT_post_depth_coverage 1 -#define VK_EXT_POST_DEPTH_COVERAGE_SPEC_VERSION 1 -#define VK_EXT_POST_DEPTH_COVERAGE_EXTENSION_NAME "VK_EXT_post_depth_coverage" - - -#define VK_EXT_shader_viewport_index_layer 1 -#define VK_EXT_SHADER_VIEWPORT_INDEX_LAYER_SPEC_VERSION 1 -#define VK_EXT_SHADER_VIEWPORT_INDEX_LAYER_EXTENSION_NAME "VK_EXT_shader_viewport_index_layer" - - -#ifdef __cplusplus -} +#include "vulkan_macos.h" #endif + +#ifdef VK_USE_PLATFORM_VI_NN +#include "vulkan_vi.h" #endif + + +#ifdef VK_USE_PLATFORM_WAYLAND_KHR +#include +#include "vulkan_wayland.h" +#endif + + +#ifdef VK_USE_PLATFORM_WIN32_KHR +#include +#include "vulkan_win32.h" +#endif + + +#ifdef VK_USE_PLATFORM_XCB_KHR +#include +#include "vulkan_xcb.h" +#endif + + +#ifdef VK_USE_PLATFORM_XLIB_KHR +#include +#include "vulkan_xlib.h" +#endif + + +#ifdef VK_USE_PLATFORM_XLIB_XRANDR_EXT +#include +#include +#include "vulkan_xlib_xrandr.h" +#endif + +#endif // VULKAN_H_ diff --git a/src/video/khronos/vulkan/vulkan.hpp b/src/video/khronos/vulkan/vulkan.hpp new file mode 100644 index 000000000..bdc7e4d58 --- /dev/null +++ b/src/video/khronos/vulkan/vulkan.hpp @@ -0,0 +1,53056 @@ +// Copyright (c) 2015-2018 The Khronos Group Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// +// ---- Exceptions to the Apache 2.0 License: ---- +// +// As an exception, if you use this Software to generate code and portions of +// this Software are embedded into the generated code as a result, you may +// redistribute such product without providing attribution as would otherwise +// be required by Sections 4(a), 4(b) and 4(d) of the License. +// +// In addition, if you combine or link code generated by this Software with +// software that is licensed under the GPLv2 or the LGPL v2.0 or 2.1 +// ("`Combined Software`") and if a court of competent jurisdiction determines +// that the patent provision (Section 3), the indemnity provision (Section 9) +// or other Section of the License conflicts with the conditions of the +// applicable GPL or LGPL license, you may retroactively and prospectively +// choose to deem waived or otherwise exclude such Section(s) of the License, +// but only in their entirety and only with respect to the Combined Software. +// + +// This header is generated from the Khronos Vulkan XML API Registry. + +#ifndef VULKAN_HPP +#define VULKAN_HPP + +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE +# include +# include +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#if !defined(VULKAN_HPP_ASSERT) +# include +# define VULKAN_HPP_ASSERT assert +#endif + +// includes through some other header +// this results in major(x) being resolved to gnu_dev_major(x) +// which is an expression in a constructor initializer list. +#if defined(major) + #undef major +#endif +#if defined(minor) + #undef minor +#endif + +// Windows defines MemoryBarrier which is deprecated and collides +// with the vk::MemoryBarrier struct. +#ifdef MemoryBarrier + #undef MemoryBarrier +#endif + +static_assert( VK_HEADER_VERSION == 91 , "Wrong VK_HEADER_VERSION!" ); + +// 32-bit vulkan is not typesafe for handles, so don't allow copy constructors on this platform by default. +// To enable this feature on 32-bit platforms please define VULKAN_HPP_TYPESAFE_CONVERSION +#if defined(__LP64__) || defined(_WIN64) || (defined(__x86_64__) && !defined(__ILP32__) ) || defined(_M_X64) || defined(__ia64) || defined (_M_IA64) || defined(__aarch64__) || defined(__powerpc64__) +# if !defined( VULKAN_HPP_TYPESAFE_CONVERSION ) +# define VULKAN_HPP_TYPESAFE_CONVERSION +# endif +#endif + +#if !defined(VULKAN_HPP_HAS_UNRESTRICTED_UNIONS) +# if defined(__clang__) +# if __has_feature(cxx_unrestricted_unions) +# define VULKAN_HPP_HAS_UNRESTRICTED_UNIONS +# endif +# elif defined(__GNUC__) +# define GCC_VERSION (__GNUC__ * 10000 + __GNUC_MINOR__ * 100 + __GNUC_PATCHLEVEL__) +# if 40600 <= GCC_VERSION +# define VULKAN_HPP_HAS_UNRESTRICTED_UNIONS +# endif +# elif defined(_MSC_VER) +# if 1900 <= _MSC_VER +# define VULKAN_HPP_HAS_UNRESTRICTED_UNIONS +# endif +# endif +#endif + +#if !defined(VULKAN_HPP_INLINE) +# if defined(__clang___) +# if __has_attribute(always_inline) +# define VULKAN_HPP_INLINE __attribute__((always_inline)) __inline__ +# else +# define VULKAN_HPP_INLINE inline +# endif +# elif defined(__GNUC__) +# define VULKAN_HPP_INLINE __attribute__((always_inline)) __inline__ +# elif defined(_MSC_VER) +# define VULKAN_HPP_INLINE inline +# else +# define VULKAN_HPP_INLINE inline +# endif +#endif + +#if defined(VULKAN_HPP_TYPESAFE_CONVERSION) +# define VULKAN_HPP_TYPESAFE_EXPLICIT +#else +# define VULKAN_HPP_TYPESAFE_EXPLICIT explicit +#endif + +#if defined(_MSC_VER) && (_MSC_VER <= 1800) +# define VULKAN_HPP_CONSTEXPR +#else +# define VULKAN_HPP_CONSTEXPR constexpr +#endif + + +#if !defined(VULKAN_HPP_NAMESPACE) +#define VULKAN_HPP_NAMESPACE vk +#endif + +#define VULKAN_HPP_STRINGIFY2(text) #text +#define VULKAN_HPP_STRINGIFY(text) VULKAN_HPP_STRINGIFY2(text) +#define VULKAN_HPP_NAMESPACE_STRING VULKAN_HPP_STRINGIFY(VULKAN_HPP_NAMESPACE) + +namespace VULKAN_HPP_NAMESPACE +{ + + template struct FlagTraits + { + enum { allFlags = 0 }; + }; + + template + class Flags + { + public: + VULKAN_HPP_CONSTEXPR Flags() + : m_mask(0) + { + } + + Flags(BitType bit) + : m_mask(static_cast(bit)) + { + } + + Flags(Flags const& rhs) + : m_mask(rhs.m_mask) + { + } + + explicit Flags(MaskType flags) + : m_mask(flags) + { + } + + Flags & operator=(Flags const& rhs) + { + m_mask = rhs.m_mask; + return *this; + } + + Flags & operator|=(Flags const& rhs) + { + m_mask |= rhs.m_mask; + return *this; + } + + Flags & operator&=(Flags const& rhs) + { + m_mask &= rhs.m_mask; + return *this; + } + + Flags & operator^=(Flags const& rhs) + { + m_mask ^= rhs.m_mask; + return *this; + } + + Flags operator|(Flags const& rhs) const + { + Flags result(*this); + result |= rhs; + return result; + } + + Flags operator&(Flags const& rhs) const + { + Flags result(*this); + result &= rhs; + return result; + } + + Flags operator^(Flags const& rhs) const + { + Flags result(*this); + result ^= rhs; + return result; + } + + bool operator!() const + { + return !m_mask; + } + + Flags operator~() const + { + Flags result(*this); + result.m_mask ^= FlagTraits::allFlags; + return result; + } + + bool operator==(Flags const& rhs) const + { + return m_mask == rhs.m_mask; + } + + bool operator!=(Flags const& rhs) const + { + return m_mask != rhs.m_mask; + } + + explicit operator bool() const + { + return !!m_mask; + } + + explicit operator MaskType() const + { + return m_mask; + } + + private: + MaskType m_mask; + }; + + template + Flags operator|(BitType bit, Flags const& flags) + { + return flags | bit; + } + + template + Flags operator&(BitType bit, Flags const& flags) + { + return flags & bit; + } + + template + Flags operator^(BitType bit, Flags const& flags) + { + return flags ^ bit; + } + + + template + class Optional + { + public: + Optional(RefType & reference) { m_ptr = &reference; } + Optional(RefType * ptr) { m_ptr = ptr; } + Optional(std::nullptr_t) { m_ptr = nullptr; } + + operator RefType*() const { return m_ptr; } + RefType const* operator->() const { return m_ptr; } + explicit operator bool() const { return !!m_ptr; } + + private: + RefType *m_ptr; + }; + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + class ArrayProxy + { + public: + VULKAN_HPP_CONSTEXPR ArrayProxy(std::nullptr_t) + : m_count(0) + , m_ptr(nullptr) + {} + + ArrayProxy(T & ptr) + : m_count(1) + , m_ptr(&ptr) + {} + + ArrayProxy(uint32_t count, T * ptr) + : m_count(count) + , m_ptr(ptr) + {} + + template + ArrayProxy(std::array::type, N> & data) + : m_count(N) + , m_ptr(data.data()) + {} + + template + ArrayProxy(std::array::type, N> const& data) + : m_count(N) + , m_ptr(data.data()) + {} + + template ::type>> + ArrayProxy(std::vector::type, Allocator> & data) + : m_count(static_cast(data.size())) + , m_ptr(data.data()) + {} + + template ::type>> + ArrayProxy(std::vector::type, Allocator> const& data) + : m_count(static_cast(data.size())) + , m_ptr(data.data()) + {} + + ArrayProxy(std::initializer_list const& data) + : m_count(static_cast(data.end() - data.begin())) + , m_ptr(data.begin()) + {} + + const T * begin() const + { + return m_ptr; + } + + const T * end() const + { + return m_ptr + m_count; + } + + const T & front() const + { + VULKAN_HPP_ASSERT(m_count && m_ptr); + return *m_ptr; + } + + const T & back() const + { + VULKAN_HPP_ASSERT(m_count && m_ptr); + return *(m_ptr + m_count - 1); + } + + bool empty() const + { + return (m_count == 0); + } + + uint32_t size() const + { + return m_count; + } + + T * data() const + { + return m_ptr; + } + + private: + uint32_t m_count; + T * m_ptr; + }; +#endif + +#ifndef VULKAN_HPP_NO_SMART_HANDLE + + template class UniqueHandleTraits; + + template + class UniqueHandle : public UniqueHandleTraits::deleter + { + private: + using Deleter = typename UniqueHandleTraits::deleter; + public: + explicit UniqueHandle( Type const& value = Type(), Deleter const& deleter = Deleter() ) + : Deleter( deleter) + , m_value( value ) + {} + + UniqueHandle( UniqueHandle const& ) = delete; + + UniqueHandle( UniqueHandle && other ) + : Deleter( std::move( static_cast( other ) ) ) + , m_value( other.release() ) + {} + + ~UniqueHandle() + { + if ( m_value ) this->destroy( m_value ); + } + + UniqueHandle & operator=( UniqueHandle const& ) = delete; + + UniqueHandle & operator=( UniqueHandle && other ) + { + reset( other.release() ); + *static_cast(this) = std::move( static_cast(other) ); + return *this; + } + + explicit operator bool() const + { + return m_value.operator bool(); + } + + Type const* operator->() const + { + return &m_value; + } + + Type * operator->() + { + return &m_value; + } + + Type const& operator*() const + { + return m_value; + } + + Type & operator*() + { + return m_value; + } + + const Type & get() const + { + return m_value; + } + + Type & get() + { + return m_value; + } + + void reset( Type const& value = Type() ) + { + if ( m_value != value ) + { + if ( m_value ) this->destroy( m_value ); + m_value = value; + } + } + + Type release() + { + Type value = m_value; + m_value = nullptr; + return value; + } + + void swap( UniqueHandle & rhs ) + { + std::swap(m_value, rhs.m_value); + std::swap(static_cast(*this), static_cast(rhs)); + } + + private: + Type m_value; + }; + + template + VULKAN_HPP_INLINE void swap( UniqueHandle & lhs, UniqueHandle & rhs ) + { + lhs.swap( rhs ); + } +#endif + + + + template struct isStructureChainValid { enum { value = false }; }; + + template + struct TypeList + { + using list = P; + using last = T; + }; + + template + struct extendCheck + { + static const bool valid = isStructureChainValid::value || extendCheck::valid; + }; + + template + struct extendCheck,X> + { + static const bool valid = isStructureChainValid::value; + }; + + template + struct extendCheck + { + static const bool valid = true; + }; + + template + class StructureChainElement + { + public: + explicit operator Element&() { return value; } + explicit operator const Element&() const { return value; } + private: + Element value; + }; + + template + class StructureChain : private StructureChainElement... + { + public: + StructureChain() + { + link(); + } + + StructureChain(StructureChain const &rhs) + { + linkAndCopy(rhs); + } + + StructureChain(StructureElements const &... elems) + { + linkAndCopyElements(elems...); + } + + StructureChain& operator=(StructureChain const &rhs) + { + linkAndCopy(rhs); + return *this; + } + + template ClassType& get() { return static_cast(*this);} + + private: + template + void link() + { + static_assert(extendCheck::valid, "The structure chain is not valid!"); + } + + template + void link() + { + static_assert(extendCheck::valid, "The structure chain is not valid!"); + X& x = static_cast(*this); + Y& y = static_cast(*this); + x.pNext = &y; + link, Y, Z...>(); + } + + template + void linkAndCopy(StructureChain const &rhs) + { + static_assert(extendCheck::valid, "The structure chain is not valid!"); + static_cast(*this) = static_cast(rhs); + } + + template + void linkAndCopy(StructureChain const &rhs) + { + static_assert(extendCheck::valid, "The structure chain is not valid!"); + X& x = static_cast(*this); + Y& y = static_cast(*this); + x = static_cast(rhs); + x.pNext = &y; + linkAndCopy, Y, Z...>(rhs); + } + + template + void linkAndCopyElements(X const &xelem) + { + static_assert(extendCheck::valid, "The structure chain is not valid!"); + static_cast(*this) = xelem; + } + + template + void linkAndCopyElements(X const &xelem, Y const &yelem, Z const &... zelem) + { + static_assert(extendCheck::valid, "The structure chain is not valid!"); + X& x = static_cast(*this); + Y& y = static_cast(*this); + x = xelem; + x.pNext = &y; + linkAndCopyElements, Y, Z...>(yelem, zelem...); + } + }; + + enum class Result + { + eSuccess = VK_SUCCESS, + eNotReady = VK_NOT_READY, + eTimeout = VK_TIMEOUT, + eEventSet = VK_EVENT_SET, + eEventReset = VK_EVENT_RESET, + eIncomplete = VK_INCOMPLETE, + eErrorOutOfHostMemory = VK_ERROR_OUT_OF_HOST_MEMORY, + eErrorOutOfDeviceMemory = VK_ERROR_OUT_OF_DEVICE_MEMORY, + eErrorInitializationFailed = VK_ERROR_INITIALIZATION_FAILED, + eErrorDeviceLost = VK_ERROR_DEVICE_LOST, + eErrorMemoryMapFailed = VK_ERROR_MEMORY_MAP_FAILED, + eErrorLayerNotPresent = VK_ERROR_LAYER_NOT_PRESENT, + eErrorExtensionNotPresent = VK_ERROR_EXTENSION_NOT_PRESENT, + eErrorFeatureNotPresent = VK_ERROR_FEATURE_NOT_PRESENT, + eErrorIncompatibleDriver = VK_ERROR_INCOMPATIBLE_DRIVER, + eErrorTooManyObjects = VK_ERROR_TOO_MANY_OBJECTS, + eErrorFormatNotSupported = VK_ERROR_FORMAT_NOT_SUPPORTED, + eErrorFragmentedPool = VK_ERROR_FRAGMENTED_POOL, + eErrorOutOfPoolMemory = VK_ERROR_OUT_OF_POOL_MEMORY, + eErrorOutOfPoolMemoryKHR = VK_ERROR_OUT_OF_POOL_MEMORY, + eErrorInvalidExternalHandle = VK_ERROR_INVALID_EXTERNAL_HANDLE, + eErrorInvalidExternalHandleKHR = VK_ERROR_INVALID_EXTERNAL_HANDLE, + eErrorSurfaceLostKHR = VK_ERROR_SURFACE_LOST_KHR, + eErrorNativeWindowInUseKHR = VK_ERROR_NATIVE_WINDOW_IN_USE_KHR, + eSuboptimalKHR = VK_SUBOPTIMAL_KHR, + eErrorOutOfDateKHR = VK_ERROR_OUT_OF_DATE_KHR, + eErrorIncompatibleDisplayKHR = VK_ERROR_INCOMPATIBLE_DISPLAY_KHR, + eErrorValidationFailedEXT = VK_ERROR_VALIDATION_FAILED_EXT, + eErrorInvalidShaderNV = VK_ERROR_INVALID_SHADER_NV, + eErrorInvalidDrmFormatModifierPlaneLayoutEXT = VK_ERROR_INVALID_DRM_FORMAT_MODIFIER_PLANE_LAYOUT_EXT, + eErrorFragmentationEXT = VK_ERROR_FRAGMENTATION_EXT, + eErrorNotPermittedEXT = VK_ERROR_NOT_PERMITTED_EXT + }; + + VULKAN_HPP_INLINE std::string to_string(Result value) + { + switch (value) + { + case Result::eSuccess: return "Success"; + case Result::eNotReady: return "NotReady"; + case Result::eTimeout: return "Timeout"; + case Result::eEventSet: return "EventSet"; + case Result::eEventReset: return "EventReset"; + case Result::eIncomplete: return "Incomplete"; + case Result::eErrorOutOfHostMemory: return "ErrorOutOfHostMemory"; + case Result::eErrorOutOfDeviceMemory: return "ErrorOutOfDeviceMemory"; + case Result::eErrorInitializationFailed: return "ErrorInitializationFailed"; + case Result::eErrorDeviceLost: return "ErrorDeviceLost"; + case Result::eErrorMemoryMapFailed: return "ErrorMemoryMapFailed"; + case Result::eErrorLayerNotPresent: return "ErrorLayerNotPresent"; + case Result::eErrorExtensionNotPresent: return "ErrorExtensionNotPresent"; + case Result::eErrorFeatureNotPresent: return "ErrorFeatureNotPresent"; + case Result::eErrorIncompatibleDriver: return "ErrorIncompatibleDriver"; + case Result::eErrorTooManyObjects: return "ErrorTooManyObjects"; + case Result::eErrorFormatNotSupported: return "ErrorFormatNotSupported"; + case Result::eErrorFragmentedPool: return "ErrorFragmentedPool"; + case Result::eErrorOutOfPoolMemory: return "ErrorOutOfPoolMemory"; + case Result::eErrorInvalidExternalHandle: return "ErrorInvalidExternalHandle"; + case Result::eErrorSurfaceLostKHR: return "ErrorSurfaceLostKHR"; + case Result::eErrorNativeWindowInUseKHR: return "ErrorNativeWindowInUseKHR"; + case Result::eSuboptimalKHR: return "SuboptimalKHR"; + case Result::eErrorOutOfDateKHR: return "ErrorOutOfDateKHR"; + case Result::eErrorIncompatibleDisplayKHR: return "ErrorIncompatibleDisplayKHR"; + case Result::eErrorValidationFailedEXT: return "ErrorValidationFailedEXT"; + case Result::eErrorInvalidShaderNV: return "ErrorInvalidShaderNV"; + case Result::eErrorInvalidDrmFormatModifierPlaneLayoutEXT: return "ErrorInvalidDrmFormatModifierPlaneLayoutEXT"; + case Result::eErrorFragmentationEXT: return "ErrorFragmentationEXT"; + case Result::eErrorNotPermittedEXT: return "ErrorNotPermittedEXT"; + default: return "invalid"; + } + } + +#ifndef VULKAN_HPP_NO_EXCEPTIONS +#if defined(_MSC_VER) && (_MSC_VER == 1800) +# define noexcept _NOEXCEPT +#endif + + class ErrorCategoryImpl : public std::error_category + { + public: + virtual const char* name() const noexcept override { return VULKAN_HPP_NAMESPACE_STRING"::Result"; } + virtual std::string message(int ev) const override { return to_string(static_cast(ev)); } + }; + +#if defined(_MSC_VER) && (_MSC_VER == 1800) +# undef noexcept +#endif + + VULKAN_HPP_INLINE const std::error_category& errorCategory() + { + static ErrorCategoryImpl instance; + return instance; + } + + VULKAN_HPP_INLINE std::error_code make_error_code(Result e) + { + return std::error_code(static_cast(e), errorCategory()); + } + + VULKAN_HPP_INLINE std::error_condition make_error_condition(Result e) + { + return std::error_condition(static_cast(e), errorCategory()); + } + +#if defined(_MSC_VER) && (_MSC_VER == 1800) +# define noexcept _NOEXCEPT +#endif + + class Error + { + public: + virtual ~Error() = default; + + virtual const char* what() const noexcept = 0; + }; + + class LogicError : public Error, public std::logic_error + { + public: + explicit LogicError( const std::string& what ) + : Error(), std::logic_error(what) {} + explicit LogicError( char const * what ) + : Error(), std::logic_error(what) {} + virtual ~LogicError() = default; + + virtual const char* what() const noexcept { return std::logic_error::what(); } + }; + + class SystemError : public Error, public std::system_error + { + public: + SystemError( std::error_code ec ) + : Error(), std::system_error(ec) {} + SystemError( std::error_code ec, std::string const& what ) + : Error(), std::system_error(ec, what) {} + SystemError( std::error_code ec, char const * what ) + : Error(), std::system_error(ec, what) {} + SystemError( int ev, std::error_category const& ecat ) + : Error(), std::system_error(ev, ecat) {} + SystemError( int ev, std::error_category const& ecat, std::string const& what) + : Error(), std::system_error(ev, ecat, what) {} + SystemError( int ev, std::error_category const& ecat, char const * what) + : Error(), std::system_error(ev, ecat, what) {} + virtual ~SystemError() = default; + + virtual const char* what() const noexcept { return std::system_error::what(); } + }; + +#if defined(_MSC_VER) && (_MSC_VER == 1800) +# undef noexcept +#endif + + class OutOfHostMemoryError : public SystemError + { + public: + OutOfHostMemoryError( std::string const& message ) + : SystemError( make_error_code( Result::eErrorOutOfHostMemory ), message ) {} + OutOfHostMemoryError( char const * message ) + : SystemError( make_error_code( Result::eErrorOutOfHostMemory ), message ) {} + }; + class OutOfDeviceMemoryError : public SystemError + { + public: + OutOfDeviceMemoryError( std::string const& message ) + : SystemError( make_error_code( Result::eErrorOutOfDeviceMemory ), message ) {} + OutOfDeviceMemoryError( char const * message ) + : SystemError( make_error_code( Result::eErrorOutOfDeviceMemory ), message ) {} + }; + class InitializationFailedError : public SystemError + { + public: + InitializationFailedError( std::string const& message ) + : SystemError( make_error_code( Result::eErrorInitializationFailed ), message ) {} + InitializationFailedError( char const * message ) + : SystemError( make_error_code( Result::eErrorInitializationFailed ), message ) {} + }; + class DeviceLostError : public SystemError + { + public: + DeviceLostError( std::string const& message ) + : SystemError( make_error_code( Result::eErrorDeviceLost ), message ) {} + DeviceLostError( char const * message ) + : SystemError( make_error_code( Result::eErrorDeviceLost ), message ) {} + }; + class MemoryMapFailedError : public SystemError + { + public: + MemoryMapFailedError( std::string const& message ) + : SystemError( make_error_code( Result::eErrorMemoryMapFailed ), message ) {} + MemoryMapFailedError( char const * message ) + : SystemError( make_error_code( Result::eErrorMemoryMapFailed ), message ) {} + }; + class LayerNotPresentError : public SystemError + { + public: + LayerNotPresentError( std::string const& message ) + : SystemError( make_error_code( Result::eErrorLayerNotPresent ), message ) {} + LayerNotPresentError( char const * message ) + : SystemError( make_error_code( Result::eErrorLayerNotPresent ), message ) {} + }; + class ExtensionNotPresentError : public SystemError + { + public: + ExtensionNotPresentError( std::string const& message ) + : SystemError( make_error_code( Result::eErrorExtensionNotPresent ), message ) {} + ExtensionNotPresentError( char const * message ) + : SystemError( make_error_code( Result::eErrorExtensionNotPresent ), message ) {} + }; + class FeatureNotPresentError : public SystemError + { + public: + FeatureNotPresentError( std::string const& message ) + : SystemError( make_error_code( Result::eErrorFeatureNotPresent ), message ) {} + FeatureNotPresentError( char const * message ) + : SystemError( make_error_code( Result::eErrorFeatureNotPresent ), message ) {} + }; + class IncompatibleDriverError : public SystemError + { + public: + IncompatibleDriverError( std::string const& message ) + : SystemError( make_error_code( Result::eErrorIncompatibleDriver ), message ) {} + IncompatibleDriverError( char const * message ) + : SystemError( make_error_code( Result::eErrorIncompatibleDriver ), message ) {} + }; + class TooManyObjectsError : public SystemError + { + public: + TooManyObjectsError( std::string const& message ) + : SystemError( make_error_code( Result::eErrorTooManyObjects ), message ) {} + TooManyObjectsError( char const * message ) + : SystemError( make_error_code( Result::eErrorTooManyObjects ), message ) {} + }; + class FormatNotSupportedError : public SystemError + { + public: + FormatNotSupportedError( std::string const& message ) + : SystemError( make_error_code( Result::eErrorFormatNotSupported ), message ) {} + FormatNotSupportedError( char const * message ) + : SystemError( make_error_code( Result::eErrorFormatNotSupported ), message ) {} + }; + class FragmentedPoolError : public SystemError + { + public: + FragmentedPoolError( std::string const& message ) + : SystemError( make_error_code( Result::eErrorFragmentedPool ), message ) {} + FragmentedPoolError( char const * message ) + : SystemError( make_error_code( Result::eErrorFragmentedPool ), message ) {} + }; + class OutOfPoolMemoryError : public SystemError + { + public: + OutOfPoolMemoryError( std::string const& message ) + : SystemError( make_error_code( Result::eErrorOutOfPoolMemory ), message ) {} + OutOfPoolMemoryError( char const * message ) + : SystemError( make_error_code( Result::eErrorOutOfPoolMemory ), message ) {} + }; + class InvalidExternalHandleError : public SystemError + { + public: + InvalidExternalHandleError( std::string const& message ) + : SystemError( make_error_code( Result::eErrorInvalidExternalHandle ), message ) {} + InvalidExternalHandleError( char const * message ) + : SystemError( make_error_code( Result::eErrorInvalidExternalHandle ), message ) {} + }; + class SurfaceLostKHRError : public SystemError + { + public: + SurfaceLostKHRError( std::string const& message ) + : SystemError( make_error_code( Result::eErrorSurfaceLostKHR ), message ) {} + SurfaceLostKHRError( char const * message ) + : SystemError( make_error_code( Result::eErrorSurfaceLostKHR ), message ) {} + }; + class NativeWindowInUseKHRError : public SystemError + { + public: + NativeWindowInUseKHRError( std::string const& message ) + : SystemError( make_error_code( Result::eErrorNativeWindowInUseKHR ), message ) {} + NativeWindowInUseKHRError( char const * message ) + : SystemError( make_error_code( Result::eErrorNativeWindowInUseKHR ), message ) {} + }; + class OutOfDateKHRError : public SystemError + { + public: + OutOfDateKHRError( std::string const& message ) + : SystemError( make_error_code( Result::eErrorOutOfDateKHR ), message ) {} + OutOfDateKHRError( char const * message ) + : SystemError( make_error_code( Result::eErrorOutOfDateKHR ), message ) {} + }; + class IncompatibleDisplayKHRError : public SystemError + { + public: + IncompatibleDisplayKHRError( std::string const& message ) + : SystemError( make_error_code( Result::eErrorIncompatibleDisplayKHR ), message ) {} + IncompatibleDisplayKHRError( char const * message ) + : SystemError( make_error_code( Result::eErrorIncompatibleDisplayKHR ), message ) {} + }; + class ValidationFailedEXTError : public SystemError + { + public: + ValidationFailedEXTError( std::string const& message ) + : SystemError( make_error_code( Result::eErrorValidationFailedEXT ), message ) {} + ValidationFailedEXTError( char const * message ) + : SystemError( make_error_code( Result::eErrorValidationFailedEXT ), message ) {} + }; + class InvalidShaderNVError : public SystemError + { + public: + InvalidShaderNVError( std::string const& message ) + : SystemError( make_error_code( Result::eErrorInvalidShaderNV ), message ) {} + InvalidShaderNVError( char const * message ) + : SystemError( make_error_code( Result::eErrorInvalidShaderNV ), message ) {} + }; + class InvalidDrmFormatModifierPlaneLayoutEXTError : public SystemError + { + public: + InvalidDrmFormatModifierPlaneLayoutEXTError( std::string const& message ) + : SystemError( make_error_code( Result::eErrorInvalidDrmFormatModifierPlaneLayoutEXT ), message ) {} + InvalidDrmFormatModifierPlaneLayoutEXTError( char const * message ) + : SystemError( make_error_code( Result::eErrorInvalidDrmFormatModifierPlaneLayoutEXT ), message ) {} + }; + class FragmentationEXTError : public SystemError + { + public: + FragmentationEXTError( std::string const& message ) + : SystemError( make_error_code( Result::eErrorFragmentationEXT ), message ) {} + FragmentationEXTError( char const * message ) + : SystemError( make_error_code( Result::eErrorFragmentationEXT ), message ) {} + }; + class NotPermittedEXTError : public SystemError + { + public: + NotPermittedEXTError( std::string const& message ) + : SystemError( make_error_code( Result::eErrorNotPermittedEXT ), message ) {} + NotPermittedEXTError( char const * message ) + : SystemError( make_error_code( Result::eErrorNotPermittedEXT ), message ) {} + }; + + VULKAN_HPP_INLINE void throwResultException( Result result, char const * message ) + { + switch ( result ) + { + case Result::eErrorOutOfHostMemory: throw OutOfHostMemoryError ( message ); + case Result::eErrorOutOfDeviceMemory: throw OutOfDeviceMemoryError ( message ); + case Result::eErrorInitializationFailed: throw InitializationFailedError ( message ); + case Result::eErrorDeviceLost: throw DeviceLostError ( message ); + case Result::eErrorMemoryMapFailed: throw MemoryMapFailedError ( message ); + case Result::eErrorLayerNotPresent: throw LayerNotPresentError ( message ); + case Result::eErrorExtensionNotPresent: throw ExtensionNotPresentError ( message ); + case Result::eErrorFeatureNotPresent: throw FeatureNotPresentError ( message ); + case Result::eErrorIncompatibleDriver: throw IncompatibleDriverError ( message ); + case Result::eErrorTooManyObjects: throw TooManyObjectsError ( message ); + case Result::eErrorFormatNotSupported: throw FormatNotSupportedError ( message ); + case Result::eErrorFragmentedPool: throw FragmentedPoolError ( message ); + case Result::eErrorOutOfPoolMemory: throw OutOfPoolMemoryError ( message ); + case Result::eErrorInvalidExternalHandle: throw InvalidExternalHandleError ( message ); + case Result::eErrorSurfaceLostKHR: throw SurfaceLostKHRError ( message ); + case Result::eErrorNativeWindowInUseKHR: throw NativeWindowInUseKHRError ( message ); + case Result::eErrorOutOfDateKHR: throw OutOfDateKHRError ( message ); + case Result::eErrorIncompatibleDisplayKHR: throw IncompatibleDisplayKHRError ( message ); + case Result::eErrorValidationFailedEXT: throw ValidationFailedEXTError ( message ); + case Result::eErrorInvalidShaderNV: throw InvalidShaderNVError ( message ); + case Result::eErrorInvalidDrmFormatModifierPlaneLayoutEXT: throw InvalidDrmFormatModifierPlaneLayoutEXTError ( message ); + case Result::eErrorFragmentationEXT: throw FragmentationEXTError ( message ); + case Result::eErrorNotPermittedEXT: throw NotPermittedEXTError ( message ); + default: throw SystemError( make_error_code( result ) ); + } + } +#endif +} // namespace VULKAN_HPP_NAMESPACE + +namespace std +{ + template <> + struct is_error_code_enum : public true_type + {}; +} + +namespace VULKAN_HPP_NAMESPACE +{ + + template + struct ResultValue + { + ResultValue( Result r, T & v ) + : result( r ) + , value( v ) + {} + + ResultValue( Result r, T && v ) + : result( r ) + , value( std::move( v ) ) + {} + + Result result; + T value; + + operator std::tuple() { return std::tuple(result, value); } + }; + + template + struct ResultValueType + { +#ifdef VULKAN_HPP_NO_EXCEPTIONS + typedef ResultValue type; +#else + typedef T type; +#endif + }; + + template <> + struct ResultValueType + { +#ifdef VULKAN_HPP_NO_EXCEPTIONS + typedef Result type; +#else + typedef void type; +#endif + }; + + VULKAN_HPP_INLINE ResultValueType::type createResultValue( Result result, char const * message ) + { +#ifdef VULKAN_HPP_NO_EXCEPTIONS + VULKAN_HPP_ASSERT( result == Result::eSuccess ); + return result; +#else + if ( result != Result::eSuccess ) + { + throwResultException( result, message ); + } +#endif + } + + template + VULKAN_HPP_INLINE typename ResultValueType::type createResultValue( Result result, T & data, char const * message ) + { +#ifdef VULKAN_HPP_NO_EXCEPTIONS + VULKAN_HPP_ASSERT( result == Result::eSuccess ); + return ResultValue( result, data ); +#else + if ( result != Result::eSuccess ) + { + throwResultException( result, message ); + } + return std::move( data ); +#endif + } + + VULKAN_HPP_INLINE Result createResultValue( Result result, char const * message, std::initializer_list successCodes ) + { +#ifdef VULKAN_HPP_NO_EXCEPTIONS + VULKAN_HPP_ASSERT( std::find( successCodes.begin(), successCodes.end(), result ) != successCodes.end() ); +#else + if ( std::find( successCodes.begin(), successCodes.end(), result ) == successCodes.end() ) + { + throwResultException( result, message ); + } +#endif + return result; + } + + template + VULKAN_HPP_INLINE ResultValue createResultValue( Result result, T & data, char const * message, std::initializer_list successCodes ) + { +#ifdef VULKAN_HPP_NO_EXCEPTIONS + VULKAN_HPP_ASSERT( std::find( successCodes.begin(), successCodes.end(), result ) != successCodes.end() ); +#else + if ( std::find( successCodes.begin(), successCodes.end(), result ) == successCodes.end() ) + { + throwResultException( result, message ); + } +#endif + return ResultValue( result, data ); + } + +#ifndef VULKAN_HPP_NO_SMART_HANDLE + template + VULKAN_HPP_INLINE typename ResultValueType>::type createResultValue( Result result, T & data, char const * message, typename UniqueHandleTraits::deleter const& deleter ) + { +#ifdef VULKAN_HPP_NO_EXCEPTIONS + VULKAN_HPP_ASSERT( result == Result::eSuccess ); + return ResultValue>( result, UniqueHandle(data, deleter) ); +#else + if ( result != Result::eSuccess ) + { + throwResultException( result, message ); + } + return UniqueHandle(data, deleter); +#endif + } +#endif + +class DispatchLoaderStatic +{ +public: + VkResult vkAcquireNextImage2KHR( VkDevice device, const VkAcquireNextImageInfoKHR* pAcquireInfo, uint32_t* pImageIndex ) const + { + return ::vkAcquireNextImage2KHR( device, pAcquireInfo, pImageIndex); + } + VkResult vkAcquireNextImageKHR( VkDevice device, VkSwapchainKHR swapchain, uint64_t timeout, VkSemaphore semaphore, VkFence fence, uint32_t* pImageIndex ) const + { + return ::vkAcquireNextImageKHR( device, swapchain, timeout, semaphore, fence, pImageIndex); + } +#ifdef VK_USE_PLATFORM_XLIB_XRANDR_NV + VkResult vkAcquireXlibDisplayEXT( VkPhysicalDevice physicalDevice, Display* dpy, VkDisplayKHR display ) const + { + return ::vkAcquireXlibDisplayEXT( physicalDevice, dpy, display); + } +#endif /*VK_USE_PLATFORM_XLIB_XRANDR_NV*/ + VkResult vkAllocateCommandBuffers( VkDevice device, const VkCommandBufferAllocateInfo* pAllocateInfo, VkCommandBuffer* pCommandBuffers ) const + { + return ::vkAllocateCommandBuffers( device, pAllocateInfo, pCommandBuffers); + } + VkResult vkAllocateDescriptorSets( VkDevice device, const VkDescriptorSetAllocateInfo* pAllocateInfo, VkDescriptorSet* pDescriptorSets ) const + { + return ::vkAllocateDescriptorSets( device, pAllocateInfo, pDescriptorSets); + } + VkResult vkAllocateMemory( VkDevice device, const VkMemoryAllocateInfo* pAllocateInfo, const VkAllocationCallbacks* pAllocator, VkDeviceMemory* pMemory ) const + { + return ::vkAllocateMemory( device, pAllocateInfo, pAllocator, pMemory); + } + VkResult vkBeginCommandBuffer( VkCommandBuffer commandBuffer, const VkCommandBufferBeginInfo* pBeginInfo ) const + { + return ::vkBeginCommandBuffer( commandBuffer, pBeginInfo); + } + VkResult vkBindAccelerationStructureMemoryNV( VkDevice device, uint32_t bindInfoCount, const VkBindAccelerationStructureMemoryInfoNV* pBindInfos ) const + { + return ::vkBindAccelerationStructureMemoryNV( device, bindInfoCount, pBindInfos); + } + VkResult vkBindBufferMemory( VkDevice device, VkBuffer buffer, VkDeviceMemory memory, VkDeviceSize memoryOffset ) const + { + return ::vkBindBufferMemory( device, buffer, memory, memoryOffset); + } + VkResult vkBindBufferMemory2( VkDevice device, uint32_t bindInfoCount, const VkBindBufferMemoryInfo* pBindInfos ) const + { + return ::vkBindBufferMemory2( device, bindInfoCount, pBindInfos); + } + VkResult vkBindBufferMemory2KHR( VkDevice device, uint32_t bindInfoCount, const VkBindBufferMemoryInfo* pBindInfos ) const + { + return ::vkBindBufferMemory2KHR( device, bindInfoCount, pBindInfos); + } + VkResult vkBindImageMemory( VkDevice device, VkImage image, VkDeviceMemory memory, VkDeviceSize memoryOffset ) const + { + return ::vkBindImageMemory( device, image, memory, memoryOffset); + } + VkResult vkBindImageMemory2( VkDevice device, uint32_t bindInfoCount, const VkBindImageMemoryInfo* pBindInfos ) const + { + return ::vkBindImageMemory2( device, bindInfoCount, pBindInfos); + } + VkResult vkBindImageMemory2KHR( VkDevice device, uint32_t bindInfoCount, const VkBindImageMemoryInfo* pBindInfos ) const + { + return ::vkBindImageMemory2KHR( device, bindInfoCount, pBindInfos); + } + void vkCmdBeginConditionalRenderingEXT( VkCommandBuffer commandBuffer, const VkConditionalRenderingBeginInfoEXT* pConditionalRenderingBegin ) const + { + return ::vkCmdBeginConditionalRenderingEXT( commandBuffer, pConditionalRenderingBegin); + } + void vkCmdBeginDebugUtilsLabelEXT( VkCommandBuffer commandBuffer, const VkDebugUtilsLabelEXT* pLabelInfo ) const + { + return ::vkCmdBeginDebugUtilsLabelEXT( commandBuffer, pLabelInfo); + } + void vkCmdBeginQuery( VkCommandBuffer commandBuffer, VkQueryPool queryPool, uint32_t query, VkQueryControlFlags flags ) const + { + return ::vkCmdBeginQuery( commandBuffer, queryPool, query, flags); + } + void vkCmdBeginQueryIndexedEXT( VkCommandBuffer commandBuffer, VkQueryPool queryPool, uint32_t query, VkQueryControlFlags flags, uint32_t index ) const + { + return ::vkCmdBeginQueryIndexedEXT( commandBuffer, queryPool, query, flags, index); + } + void vkCmdBeginRenderPass( VkCommandBuffer commandBuffer, const VkRenderPassBeginInfo* pRenderPassBegin, VkSubpassContents contents ) const + { + return ::vkCmdBeginRenderPass( commandBuffer, pRenderPassBegin, contents); + } + void vkCmdBeginRenderPass2KHR( VkCommandBuffer commandBuffer, const VkRenderPassBeginInfo* pRenderPassBegin, const VkSubpassBeginInfoKHR* pSubpassBeginInfo ) const + { + return ::vkCmdBeginRenderPass2KHR( commandBuffer, pRenderPassBegin, pSubpassBeginInfo); + } + void vkCmdBeginTransformFeedbackEXT( VkCommandBuffer commandBuffer, uint32_t firstCounterBuffer, uint32_t counterBufferCount, const VkBuffer* pCounterBuffers, const VkDeviceSize* pCounterBufferOffsets ) const + { + return ::vkCmdBeginTransformFeedbackEXT( commandBuffer, firstCounterBuffer, counterBufferCount, pCounterBuffers, pCounterBufferOffsets); + } + void vkCmdBindDescriptorSets( VkCommandBuffer commandBuffer, VkPipelineBindPoint pipelineBindPoint, VkPipelineLayout layout, uint32_t firstSet, uint32_t descriptorSetCount, const VkDescriptorSet* pDescriptorSets, uint32_t dynamicOffsetCount, const uint32_t* pDynamicOffsets ) const + { + return ::vkCmdBindDescriptorSets( commandBuffer, pipelineBindPoint, layout, firstSet, descriptorSetCount, pDescriptorSets, dynamicOffsetCount, pDynamicOffsets); + } + void vkCmdBindIndexBuffer( VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset, VkIndexType indexType ) const + { + return ::vkCmdBindIndexBuffer( commandBuffer, buffer, offset, indexType); + } + void vkCmdBindPipeline( VkCommandBuffer commandBuffer, VkPipelineBindPoint pipelineBindPoint, VkPipeline pipeline ) const + { + return ::vkCmdBindPipeline( commandBuffer, pipelineBindPoint, pipeline); + } + void vkCmdBindShadingRateImageNV( VkCommandBuffer commandBuffer, VkImageView imageView, VkImageLayout imageLayout ) const + { + return ::vkCmdBindShadingRateImageNV( commandBuffer, imageView, imageLayout); + } + void vkCmdBindTransformFeedbackBuffersEXT( VkCommandBuffer commandBuffer, uint32_t firstBinding, uint32_t bindingCount, const VkBuffer* pBuffers, const VkDeviceSize* pOffsets, const VkDeviceSize* pSizes ) const + { + return ::vkCmdBindTransformFeedbackBuffersEXT( commandBuffer, firstBinding, bindingCount, pBuffers, pOffsets, pSizes); + } + void vkCmdBindVertexBuffers( VkCommandBuffer commandBuffer, uint32_t firstBinding, uint32_t bindingCount, const VkBuffer* pBuffers, const VkDeviceSize* pOffsets ) const + { + return ::vkCmdBindVertexBuffers( commandBuffer, firstBinding, bindingCount, pBuffers, pOffsets); + } + void vkCmdBlitImage( VkCommandBuffer commandBuffer, VkImage srcImage, VkImageLayout srcImageLayout, VkImage dstImage, VkImageLayout dstImageLayout, uint32_t regionCount, const VkImageBlit* pRegions, VkFilter filter ) const + { + return ::vkCmdBlitImage( commandBuffer, srcImage, srcImageLayout, dstImage, dstImageLayout, regionCount, pRegions, filter); + } + void vkCmdBuildAccelerationStructureNV( VkCommandBuffer commandBuffer, const VkAccelerationStructureInfoNV* pInfo, VkBuffer instanceData, VkDeviceSize instanceOffset, VkBool32 update, VkAccelerationStructureNV dst, VkAccelerationStructureNV src, VkBuffer scratch, VkDeviceSize scratchOffset ) const + { + return ::vkCmdBuildAccelerationStructureNV( commandBuffer, pInfo, instanceData, instanceOffset, update, dst, src, scratch, scratchOffset); + } + void vkCmdClearAttachments( VkCommandBuffer commandBuffer, uint32_t attachmentCount, const VkClearAttachment* pAttachments, uint32_t rectCount, const VkClearRect* pRects ) const + { + return ::vkCmdClearAttachments( commandBuffer, attachmentCount, pAttachments, rectCount, pRects); + } + void vkCmdClearColorImage( VkCommandBuffer commandBuffer, VkImage image, VkImageLayout imageLayout, const VkClearColorValue* pColor, uint32_t rangeCount, const VkImageSubresourceRange* pRanges ) const + { + return ::vkCmdClearColorImage( commandBuffer, image, imageLayout, pColor, rangeCount, pRanges); + } + void vkCmdClearDepthStencilImage( VkCommandBuffer commandBuffer, VkImage image, VkImageLayout imageLayout, const VkClearDepthStencilValue* pDepthStencil, uint32_t rangeCount, const VkImageSubresourceRange* pRanges ) const + { + return ::vkCmdClearDepthStencilImage( commandBuffer, image, imageLayout, pDepthStencil, rangeCount, pRanges); + } + void vkCmdCopyAccelerationStructureNV( VkCommandBuffer commandBuffer, VkAccelerationStructureNV dst, VkAccelerationStructureNV src, VkCopyAccelerationStructureModeNV mode ) const + { + return ::vkCmdCopyAccelerationStructureNV( commandBuffer, dst, src, mode); + } + void vkCmdCopyBuffer( VkCommandBuffer commandBuffer, VkBuffer srcBuffer, VkBuffer dstBuffer, uint32_t regionCount, const VkBufferCopy* pRegions ) const + { + return ::vkCmdCopyBuffer( commandBuffer, srcBuffer, dstBuffer, regionCount, pRegions); + } + void vkCmdCopyBufferToImage( VkCommandBuffer commandBuffer, VkBuffer srcBuffer, VkImage dstImage, VkImageLayout dstImageLayout, uint32_t regionCount, const VkBufferImageCopy* pRegions ) const + { + return ::vkCmdCopyBufferToImage( commandBuffer, srcBuffer, dstImage, dstImageLayout, regionCount, pRegions); + } + void vkCmdCopyImage( VkCommandBuffer commandBuffer, VkImage srcImage, VkImageLayout srcImageLayout, VkImage dstImage, VkImageLayout dstImageLayout, uint32_t regionCount, const VkImageCopy* pRegions ) const + { + return ::vkCmdCopyImage( commandBuffer, srcImage, srcImageLayout, dstImage, dstImageLayout, regionCount, pRegions); + } + void vkCmdCopyImageToBuffer( VkCommandBuffer commandBuffer, VkImage srcImage, VkImageLayout srcImageLayout, VkBuffer dstBuffer, uint32_t regionCount, const VkBufferImageCopy* pRegions ) const + { + return ::vkCmdCopyImageToBuffer( commandBuffer, srcImage, srcImageLayout, dstBuffer, regionCount, pRegions); + } + void vkCmdCopyQueryPoolResults( VkCommandBuffer commandBuffer, VkQueryPool queryPool, uint32_t firstQuery, uint32_t queryCount, VkBuffer dstBuffer, VkDeviceSize dstOffset, VkDeviceSize stride, VkQueryResultFlags flags ) const + { + return ::vkCmdCopyQueryPoolResults( commandBuffer, queryPool, firstQuery, queryCount, dstBuffer, dstOffset, stride, flags); + } + void vkCmdDebugMarkerBeginEXT( VkCommandBuffer commandBuffer, const VkDebugMarkerMarkerInfoEXT* pMarkerInfo ) const + { + return ::vkCmdDebugMarkerBeginEXT( commandBuffer, pMarkerInfo); + } + void vkCmdDebugMarkerEndEXT( VkCommandBuffer commandBuffer ) const + { + return ::vkCmdDebugMarkerEndEXT( commandBuffer); + } + void vkCmdDebugMarkerInsertEXT( VkCommandBuffer commandBuffer, const VkDebugMarkerMarkerInfoEXT* pMarkerInfo ) const + { + return ::vkCmdDebugMarkerInsertEXT( commandBuffer, pMarkerInfo); + } + void vkCmdDispatch( VkCommandBuffer commandBuffer, uint32_t groupCountX, uint32_t groupCountY, uint32_t groupCountZ ) const + { + return ::vkCmdDispatch( commandBuffer, groupCountX, groupCountY, groupCountZ); + } + void vkCmdDispatchBase( VkCommandBuffer commandBuffer, uint32_t baseGroupX, uint32_t baseGroupY, uint32_t baseGroupZ, uint32_t groupCountX, uint32_t groupCountY, uint32_t groupCountZ ) const + { + return ::vkCmdDispatchBase( commandBuffer, baseGroupX, baseGroupY, baseGroupZ, groupCountX, groupCountY, groupCountZ); + } + void vkCmdDispatchBaseKHR( VkCommandBuffer commandBuffer, uint32_t baseGroupX, uint32_t baseGroupY, uint32_t baseGroupZ, uint32_t groupCountX, uint32_t groupCountY, uint32_t groupCountZ ) const + { + return ::vkCmdDispatchBaseKHR( commandBuffer, baseGroupX, baseGroupY, baseGroupZ, groupCountX, groupCountY, groupCountZ); + } + void vkCmdDispatchIndirect( VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset ) const + { + return ::vkCmdDispatchIndirect( commandBuffer, buffer, offset); + } + void vkCmdDraw( VkCommandBuffer commandBuffer, uint32_t vertexCount, uint32_t instanceCount, uint32_t firstVertex, uint32_t firstInstance ) const + { + return ::vkCmdDraw( commandBuffer, vertexCount, instanceCount, firstVertex, firstInstance); + } + void vkCmdDrawIndexed( VkCommandBuffer commandBuffer, uint32_t indexCount, uint32_t instanceCount, uint32_t firstIndex, int32_t vertexOffset, uint32_t firstInstance ) const + { + return ::vkCmdDrawIndexed( commandBuffer, indexCount, instanceCount, firstIndex, vertexOffset, firstInstance); + } + void vkCmdDrawIndexedIndirect( VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset, uint32_t drawCount, uint32_t stride ) const + { + return ::vkCmdDrawIndexedIndirect( commandBuffer, buffer, offset, drawCount, stride); + } + void vkCmdDrawIndexedIndirectCountAMD( VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset, VkBuffer countBuffer, VkDeviceSize countBufferOffset, uint32_t maxDrawCount, uint32_t stride ) const + { + return ::vkCmdDrawIndexedIndirectCountAMD( commandBuffer, buffer, offset, countBuffer, countBufferOffset, maxDrawCount, stride); + } + void vkCmdDrawIndexedIndirectCountKHR( VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset, VkBuffer countBuffer, VkDeviceSize countBufferOffset, uint32_t maxDrawCount, uint32_t stride ) const + { + return ::vkCmdDrawIndexedIndirectCountKHR( commandBuffer, buffer, offset, countBuffer, countBufferOffset, maxDrawCount, stride); + } + void vkCmdDrawIndirect( VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset, uint32_t drawCount, uint32_t stride ) const + { + return ::vkCmdDrawIndirect( commandBuffer, buffer, offset, drawCount, stride); + } + void vkCmdDrawIndirectByteCountEXT( VkCommandBuffer commandBuffer, uint32_t instanceCount, uint32_t firstInstance, VkBuffer counterBuffer, VkDeviceSize counterBufferOffset, uint32_t counterOffset, uint32_t vertexStride ) const + { + return ::vkCmdDrawIndirectByteCountEXT( commandBuffer, instanceCount, firstInstance, counterBuffer, counterBufferOffset, counterOffset, vertexStride); + } + void vkCmdDrawIndirectCountAMD( VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset, VkBuffer countBuffer, VkDeviceSize countBufferOffset, uint32_t maxDrawCount, uint32_t stride ) const + { + return ::vkCmdDrawIndirectCountAMD( commandBuffer, buffer, offset, countBuffer, countBufferOffset, maxDrawCount, stride); + } + void vkCmdDrawIndirectCountKHR( VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset, VkBuffer countBuffer, VkDeviceSize countBufferOffset, uint32_t maxDrawCount, uint32_t stride ) const + { + return ::vkCmdDrawIndirectCountKHR( commandBuffer, buffer, offset, countBuffer, countBufferOffset, maxDrawCount, stride); + } + void vkCmdDrawMeshTasksIndirectCountNV( VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset, VkBuffer countBuffer, VkDeviceSize countBufferOffset, uint32_t maxDrawCount, uint32_t stride ) const + { + return ::vkCmdDrawMeshTasksIndirectCountNV( commandBuffer, buffer, offset, countBuffer, countBufferOffset, maxDrawCount, stride); + } + void vkCmdDrawMeshTasksIndirectNV( VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset, uint32_t drawCount, uint32_t stride ) const + { + return ::vkCmdDrawMeshTasksIndirectNV( commandBuffer, buffer, offset, drawCount, stride); + } + void vkCmdDrawMeshTasksNV( VkCommandBuffer commandBuffer, uint32_t taskCount, uint32_t firstTask ) const + { + return ::vkCmdDrawMeshTasksNV( commandBuffer, taskCount, firstTask); + } + void vkCmdEndConditionalRenderingEXT( VkCommandBuffer commandBuffer ) const + { + return ::vkCmdEndConditionalRenderingEXT( commandBuffer); + } + void vkCmdEndDebugUtilsLabelEXT( VkCommandBuffer commandBuffer ) const + { + return ::vkCmdEndDebugUtilsLabelEXT( commandBuffer); + } + void vkCmdEndQuery( VkCommandBuffer commandBuffer, VkQueryPool queryPool, uint32_t query ) const + { + return ::vkCmdEndQuery( commandBuffer, queryPool, query); + } + void vkCmdEndQueryIndexedEXT( VkCommandBuffer commandBuffer, VkQueryPool queryPool, uint32_t query, uint32_t index ) const + { + return ::vkCmdEndQueryIndexedEXT( commandBuffer, queryPool, query, index); + } + void vkCmdEndRenderPass( VkCommandBuffer commandBuffer ) const + { + return ::vkCmdEndRenderPass( commandBuffer); + } + void vkCmdEndRenderPass2KHR( VkCommandBuffer commandBuffer, const VkSubpassEndInfoKHR* pSubpassEndInfo ) const + { + return ::vkCmdEndRenderPass2KHR( commandBuffer, pSubpassEndInfo); + } + void vkCmdEndTransformFeedbackEXT( VkCommandBuffer commandBuffer, uint32_t firstCounterBuffer, uint32_t counterBufferCount, const VkBuffer* pCounterBuffers, const VkDeviceSize* pCounterBufferOffsets ) const + { + return ::vkCmdEndTransformFeedbackEXT( commandBuffer, firstCounterBuffer, counterBufferCount, pCounterBuffers, pCounterBufferOffsets); + } + void vkCmdExecuteCommands( VkCommandBuffer commandBuffer, uint32_t commandBufferCount, const VkCommandBuffer* pCommandBuffers ) const + { + return ::vkCmdExecuteCommands( commandBuffer, commandBufferCount, pCommandBuffers); + } + void vkCmdFillBuffer( VkCommandBuffer commandBuffer, VkBuffer dstBuffer, VkDeviceSize dstOffset, VkDeviceSize size, uint32_t data ) const + { + return ::vkCmdFillBuffer( commandBuffer, dstBuffer, dstOffset, size, data); + } + void vkCmdInsertDebugUtilsLabelEXT( VkCommandBuffer commandBuffer, const VkDebugUtilsLabelEXT* pLabelInfo ) const + { + return ::vkCmdInsertDebugUtilsLabelEXT( commandBuffer, pLabelInfo); + } + void vkCmdNextSubpass( VkCommandBuffer commandBuffer, VkSubpassContents contents ) const + { + return ::vkCmdNextSubpass( commandBuffer, contents); + } + void vkCmdNextSubpass2KHR( VkCommandBuffer commandBuffer, const VkSubpassBeginInfoKHR* pSubpassBeginInfo, const VkSubpassEndInfoKHR* pSubpassEndInfo ) const + { + return ::vkCmdNextSubpass2KHR( commandBuffer, pSubpassBeginInfo, pSubpassEndInfo); + } + void vkCmdPipelineBarrier( VkCommandBuffer commandBuffer, VkPipelineStageFlags srcStageMask, VkPipelineStageFlags dstStageMask, VkDependencyFlags dependencyFlags, uint32_t memoryBarrierCount, const VkMemoryBarrier* pMemoryBarriers, uint32_t bufferMemoryBarrierCount, const VkBufferMemoryBarrier* pBufferMemoryBarriers, uint32_t imageMemoryBarrierCount, const VkImageMemoryBarrier* pImageMemoryBarriers ) const + { + return ::vkCmdPipelineBarrier( commandBuffer, srcStageMask, dstStageMask, dependencyFlags, memoryBarrierCount, pMemoryBarriers, bufferMemoryBarrierCount, pBufferMemoryBarriers, imageMemoryBarrierCount, pImageMemoryBarriers); + } + void vkCmdProcessCommandsNVX( VkCommandBuffer commandBuffer, const VkCmdProcessCommandsInfoNVX* pProcessCommandsInfo ) const + { + return ::vkCmdProcessCommandsNVX( commandBuffer, pProcessCommandsInfo); + } + void vkCmdPushConstants( VkCommandBuffer commandBuffer, VkPipelineLayout layout, VkShaderStageFlags stageFlags, uint32_t offset, uint32_t size, const void* pValues ) const + { + return ::vkCmdPushConstants( commandBuffer, layout, stageFlags, offset, size, pValues); + } + void vkCmdPushDescriptorSetKHR( VkCommandBuffer commandBuffer, VkPipelineBindPoint pipelineBindPoint, VkPipelineLayout layout, uint32_t set, uint32_t descriptorWriteCount, const VkWriteDescriptorSet* pDescriptorWrites ) const + { + return ::vkCmdPushDescriptorSetKHR( commandBuffer, pipelineBindPoint, layout, set, descriptorWriteCount, pDescriptorWrites); + } + void vkCmdPushDescriptorSetWithTemplateKHR( VkCommandBuffer commandBuffer, VkDescriptorUpdateTemplate descriptorUpdateTemplate, VkPipelineLayout layout, uint32_t set, const void* pData ) const + { + return ::vkCmdPushDescriptorSetWithTemplateKHR( commandBuffer, descriptorUpdateTemplate, layout, set, pData); + } + void vkCmdReserveSpaceForCommandsNVX( VkCommandBuffer commandBuffer, const VkCmdReserveSpaceForCommandsInfoNVX* pReserveSpaceInfo ) const + { + return ::vkCmdReserveSpaceForCommandsNVX( commandBuffer, pReserveSpaceInfo); + } + void vkCmdResetEvent( VkCommandBuffer commandBuffer, VkEvent event, VkPipelineStageFlags stageMask ) const + { + return ::vkCmdResetEvent( commandBuffer, event, stageMask); + } + void vkCmdResetQueryPool( VkCommandBuffer commandBuffer, VkQueryPool queryPool, uint32_t firstQuery, uint32_t queryCount ) const + { + return ::vkCmdResetQueryPool( commandBuffer, queryPool, firstQuery, queryCount); + } + void vkCmdResolveImage( VkCommandBuffer commandBuffer, VkImage srcImage, VkImageLayout srcImageLayout, VkImage dstImage, VkImageLayout dstImageLayout, uint32_t regionCount, const VkImageResolve* pRegions ) const + { + return ::vkCmdResolveImage( commandBuffer, srcImage, srcImageLayout, dstImage, dstImageLayout, regionCount, pRegions); + } + void vkCmdSetBlendConstants( VkCommandBuffer commandBuffer, const float blendConstants[4] ) const + { + return ::vkCmdSetBlendConstants( commandBuffer, blendConstants); + } + void vkCmdSetCheckpointNV( VkCommandBuffer commandBuffer, const void* pCheckpointMarker ) const + { + return ::vkCmdSetCheckpointNV( commandBuffer, pCheckpointMarker); + } + void vkCmdSetCoarseSampleOrderNV( VkCommandBuffer commandBuffer, VkCoarseSampleOrderTypeNV sampleOrderType, uint32_t customSampleOrderCount, const VkCoarseSampleOrderCustomNV* pCustomSampleOrders ) const + { + return ::vkCmdSetCoarseSampleOrderNV( commandBuffer, sampleOrderType, customSampleOrderCount, pCustomSampleOrders); + } + void vkCmdSetDepthBias( VkCommandBuffer commandBuffer, float depthBiasConstantFactor, float depthBiasClamp, float depthBiasSlopeFactor ) const + { + return ::vkCmdSetDepthBias( commandBuffer, depthBiasConstantFactor, depthBiasClamp, depthBiasSlopeFactor); + } + void vkCmdSetDepthBounds( VkCommandBuffer commandBuffer, float minDepthBounds, float maxDepthBounds ) const + { + return ::vkCmdSetDepthBounds( commandBuffer, minDepthBounds, maxDepthBounds); + } + void vkCmdSetDeviceMask( VkCommandBuffer commandBuffer, uint32_t deviceMask ) const + { + return ::vkCmdSetDeviceMask( commandBuffer, deviceMask); + } + void vkCmdSetDeviceMaskKHR( VkCommandBuffer commandBuffer, uint32_t deviceMask ) const + { + return ::vkCmdSetDeviceMaskKHR( commandBuffer, deviceMask); + } + void vkCmdSetDiscardRectangleEXT( VkCommandBuffer commandBuffer, uint32_t firstDiscardRectangle, uint32_t discardRectangleCount, const VkRect2D* pDiscardRectangles ) const + { + return ::vkCmdSetDiscardRectangleEXT( commandBuffer, firstDiscardRectangle, discardRectangleCount, pDiscardRectangles); + } + void vkCmdSetEvent( VkCommandBuffer commandBuffer, VkEvent event, VkPipelineStageFlags stageMask ) const + { + return ::vkCmdSetEvent( commandBuffer, event, stageMask); + } + void vkCmdSetExclusiveScissorNV( VkCommandBuffer commandBuffer, uint32_t firstExclusiveScissor, uint32_t exclusiveScissorCount, const VkRect2D* pExclusiveScissors ) const + { + return ::vkCmdSetExclusiveScissorNV( commandBuffer, firstExclusiveScissor, exclusiveScissorCount, pExclusiveScissors); + } + void vkCmdSetLineWidth( VkCommandBuffer commandBuffer, float lineWidth ) const + { + return ::vkCmdSetLineWidth( commandBuffer, lineWidth); + } + void vkCmdSetSampleLocationsEXT( VkCommandBuffer commandBuffer, const VkSampleLocationsInfoEXT* pSampleLocationsInfo ) const + { + return ::vkCmdSetSampleLocationsEXT( commandBuffer, pSampleLocationsInfo); + } + void vkCmdSetScissor( VkCommandBuffer commandBuffer, uint32_t firstScissor, uint32_t scissorCount, const VkRect2D* pScissors ) const + { + return ::vkCmdSetScissor( commandBuffer, firstScissor, scissorCount, pScissors); + } + void vkCmdSetStencilCompareMask( VkCommandBuffer commandBuffer, VkStencilFaceFlags faceMask, uint32_t compareMask ) const + { + return ::vkCmdSetStencilCompareMask( commandBuffer, faceMask, compareMask); + } + void vkCmdSetStencilReference( VkCommandBuffer commandBuffer, VkStencilFaceFlags faceMask, uint32_t reference ) const + { + return ::vkCmdSetStencilReference( commandBuffer, faceMask, reference); + } + void vkCmdSetStencilWriteMask( VkCommandBuffer commandBuffer, VkStencilFaceFlags faceMask, uint32_t writeMask ) const + { + return ::vkCmdSetStencilWriteMask( commandBuffer, faceMask, writeMask); + } + void vkCmdSetViewport( VkCommandBuffer commandBuffer, uint32_t firstViewport, uint32_t viewportCount, const VkViewport* pViewports ) const + { + return ::vkCmdSetViewport( commandBuffer, firstViewport, viewportCount, pViewports); + } + void vkCmdSetViewportShadingRatePaletteNV( VkCommandBuffer commandBuffer, uint32_t firstViewport, uint32_t viewportCount, const VkShadingRatePaletteNV* pShadingRatePalettes ) const + { + return ::vkCmdSetViewportShadingRatePaletteNV( commandBuffer, firstViewport, viewportCount, pShadingRatePalettes); + } + void vkCmdSetViewportWScalingNV( VkCommandBuffer commandBuffer, uint32_t firstViewport, uint32_t viewportCount, const VkViewportWScalingNV* pViewportWScalings ) const + { + return ::vkCmdSetViewportWScalingNV( commandBuffer, firstViewport, viewportCount, pViewportWScalings); + } + void vkCmdTraceRaysNV( VkCommandBuffer commandBuffer, VkBuffer raygenShaderBindingTableBuffer, VkDeviceSize raygenShaderBindingOffset, VkBuffer missShaderBindingTableBuffer, VkDeviceSize missShaderBindingOffset, VkDeviceSize missShaderBindingStride, VkBuffer hitShaderBindingTableBuffer, VkDeviceSize hitShaderBindingOffset, VkDeviceSize hitShaderBindingStride, VkBuffer callableShaderBindingTableBuffer, VkDeviceSize callableShaderBindingOffset, VkDeviceSize callableShaderBindingStride, uint32_t width, uint32_t height, uint32_t depth ) const + { + return ::vkCmdTraceRaysNV( commandBuffer, raygenShaderBindingTableBuffer, raygenShaderBindingOffset, missShaderBindingTableBuffer, missShaderBindingOffset, missShaderBindingStride, hitShaderBindingTableBuffer, hitShaderBindingOffset, hitShaderBindingStride, callableShaderBindingTableBuffer, callableShaderBindingOffset, callableShaderBindingStride, width, height, depth); + } + void vkCmdUpdateBuffer( VkCommandBuffer commandBuffer, VkBuffer dstBuffer, VkDeviceSize dstOffset, VkDeviceSize dataSize, const void* pData ) const + { + return ::vkCmdUpdateBuffer( commandBuffer, dstBuffer, dstOffset, dataSize, pData); + } + void vkCmdWaitEvents( VkCommandBuffer commandBuffer, uint32_t eventCount, const VkEvent* pEvents, VkPipelineStageFlags srcStageMask, VkPipelineStageFlags dstStageMask, uint32_t memoryBarrierCount, const VkMemoryBarrier* pMemoryBarriers, uint32_t bufferMemoryBarrierCount, const VkBufferMemoryBarrier* pBufferMemoryBarriers, uint32_t imageMemoryBarrierCount, const VkImageMemoryBarrier* pImageMemoryBarriers ) const + { + return ::vkCmdWaitEvents( commandBuffer, eventCount, pEvents, srcStageMask, dstStageMask, memoryBarrierCount, pMemoryBarriers, bufferMemoryBarrierCount, pBufferMemoryBarriers, imageMemoryBarrierCount, pImageMemoryBarriers); + } + void vkCmdWriteAccelerationStructuresPropertiesNV( VkCommandBuffer commandBuffer, uint32_t accelerationStructureCount, const VkAccelerationStructureNV* pAccelerationStructures, VkQueryType queryType, VkQueryPool queryPool, uint32_t firstQuery ) const + { + return ::vkCmdWriteAccelerationStructuresPropertiesNV( commandBuffer, accelerationStructureCount, pAccelerationStructures, queryType, queryPool, firstQuery); + } + void vkCmdWriteBufferMarkerAMD( VkCommandBuffer commandBuffer, VkPipelineStageFlagBits pipelineStage, VkBuffer dstBuffer, VkDeviceSize dstOffset, uint32_t marker ) const + { + return ::vkCmdWriteBufferMarkerAMD( commandBuffer, pipelineStage, dstBuffer, dstOffset, marker); + } + void vkCmdWriteTimestamp( VkCommandBuffer commandBuffer, VkPipelineStageFlagBits pipelineStage, VkQueryPool queryPool, uint32_t query ) const + { + return ::vkCmdWriteTimestamp( commandBuffer, pipelineStage, queryPool, query); + } + VkResult vkCompileDeferredNV( VkDevice device, VkPipeline pipeline, uint32_t shader ) const + { + return ::vkCompileDeferredNV( device, pipeline, shader); + } + VkResult vkCreateAccelerationStructureNV( VkDevice device, const VkAccelerationStructureCreateInfoNV* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkAccelerationStructureNV* pAccelerationStructure ) const + { + return ::vkCreateAccelerationStructureNV( device, pCreateInfo, pAllocator, pAccelerationStructure); + } +#ifdef VK_USE_PLATFORM_ANDROID_KHR + VkResult vkCreateAndroidSurfaceKHR( VkInstance instance, const VkAndroidSurfaceCreateInfoKHR* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkSurfaceKHR* pSurface ) const + { + return ::vkCreateAndroidSurfaceKHR( instance, pCreateInfo, pAllocator, pSurface); + } +#endif /*VK_USE_PLATFORM_ANDROID_KHR*/ + VkResult vkCreateBuffer( VkDevice device, const VkBufferCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkBuffer* pBuffer ) const + { + return ::vkCreateBuffer( device, pCreateInfo, pAllocator, pBuffer); + } + VkResult vkCreateBufferView( VkDevice device, const VkBufferViewCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkBufferView* pView ) const + { + return ::vkCreateBufferView( device, pCreateInfo, pAllocator, pView); + } + VkResult vkCreateCommandPool( VkDevice device, const VkCommandPoolCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkCommandPool* pCommandPool ) const + { + return ::vkCreateCommandPool( device, pCreateInfo, pAllocator, pCommandPool); + } + VkResult vkCreateComputePipelines( VkDevice device, VkPipelineCache pipelineCache, uint32_t createInfoCount, const VkComputePipelineCreateInfo* pCreateInfos, const VkAllocationCallbacks* pAllocator, VkPipeline* pPipelines ) const + { + return ::vkCreateComputePipelines( device, pipelineCache, createInfoCount, pCreateInfos, pAllocator, pPipelines); + } + VkResult vkCreateDebugReportCallbackEXT( VkInstance instance, const VkDebugReportCallbackCreateInfoEXT* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkDebugReportCallbackEXT* pCallback ) const + { + return ::vkCreateDebugReportCallbackEXT( instance, pCreateInfo, pAllocator, pCallback); + } + VkResult vkCreateDebugUtilsMessengerEXT( VkInstance instance, const VkDebugUtilsMessengerCreateInfoEXT* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkDebugUtilsMessengerEXT* pMessenger ) const + { + return ::vkCreateDebugUtilsMessengerEXT( instance, pCreateInfo, pAllocator, pMessenger); + } + VkResult vkCreateDescriptorPool( VkDevice device, const VkDescriptorPoolCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkDescriptorPool* pDescriptorPool ) const + { + return ::vkCreateDescriptorPool( device, pCreateInfo, pAllocator, pDescriptorPool); + } + VkResult vkCreateDescriptorSetLayout( VkDevice device, const VkDescriptorSetLayoutCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkDescriptorSetLayout* pSetLayout ) const + { + return ::vkCreateDescriptorSetLayout( device, pCreateInfo, pAllocator, pSetLayout); + } + VkResult vkCreateDescriptorUpdateTemplate( VkDevice device, const VkDescriptorUpdateTemplateCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkDescriptorUpdateTemplate* pDescriptorUpdateTemplate ) const + { + return ::vkCreateDescriptorUpdateTemplate( device, pCreateInfo, pAllocator, pDescriptorUpdateTemplate); + } + VkResult vkCreateDescriptorUpdateTemplateKHR( VkDevice device, const VkDescriptorUpdateTemplateCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkDescriptorUpdateTemplate* pDescriptorUpdateTemplate ) const + { + return ::vkCreateDescriptorUpdateTemplateKHR( device, pCreateInfo, pAllocator, pDescriptorUpdateTemplate); + } + VkResult vkCreateDevice( VkPhysicalDevice physicalDevice, const VkDeviceCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkDevice* pDevice ) const + { + return ::vkCreateDevice( physicalDevice, pCreateInfo, pAllocator, pDevice); + } + VkResult vkCreateDisplayModeKHR( VkPhysicalDevice physicalDevice, VkDisplayKHR display, const VkDisplayModeCreateInfoKHR* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkDisplayModeKHR* pMode ) const + { + return ::vkCreateDisplayModeKHR( physicalDevice, display, pCreateInfo, pAllocator, pMode); + } + VkResult vkCreateDisplayPlaneSurfaceKHR( VkInstance instance, const VkDisplaySurfaceCreateInfoKHR* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkSurfaceKHR* pSurface ) const + { + return ::vkCreateDisplayPlaneSurfaceKHR( instance, pCreateInfo, pAllocator, pSurface); + } + VkResult vkCreateEvent( VkDevice device, const VkEventCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkEvent* pEvent ) const + { + return ::vkCreateEvent( device, pCreateInfo, pAllocator, pEvent); + } + VkResult vkCreateFence( VkDevice device, const VkFenceCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkFence* pFence ) const + { + return ::vkCreateFence( device, pCreateInfo, pAllocator, pFence); + } + VkResult vkCreateFramebuffer( VkDevice device, const VkFramebufferCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkFramebuffer* pFramebuffer ) const + { + return ::vkCreateFramebuffer( device, pCreateInfo, pAllocator, pFramebuffer); + } + VkResult vkCreateGraphicsPipelines( VkDevice device, VkPipelineCache pipelineCache, uint32_t createInfoCount, const VkGraphicsPipelineCreateInfo* pCreateInfos, const VkAllocationCallbacks* pAllocator, VkPipeline* pPipelines ) const + { + return ::vkCreateGraphicsPipelines( device, pipelineCache, createInfoCount, pCreateInfos, pAllocator, pPipelines); + } +#ifdef VK_USE_PLATFORM_IOS_MVK + VkResult vkCreateIOSSurfaceMVK( VkInstance instance, const VkIOSSurfaceCreateInfoMVK* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkSurfaceKHR* pSurface ) const + { + return ::vkCreateIOSSurfaceMVK( instance, pCreateInfo, pAllocator, pSurface); + } +#endif /*VK_USE_PLATFORM_IOS_MVK*/ + VkResult vkCreateImage( VkDevice device, const VkImageCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkImage* pImage ) const + { + return ::vkCreateImage( device, pCreateInfo, pAllocator, pImage); + } +#ifdef VK_USE_PLATFORM_FUCHSIA_FUCHSIA + VkResult vkCreateImagePipeSurfaceFUCHSIA( VkInstance instance, const VkImagePipeSurfaceCreateInfoFUCHSIA* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkSurfaceKHR* pSurface ) const + { + return ::vkCreateImagePipeSurfaceFUCHSIA( instance, pCreateInfo, pAllocator, pSurface); + } +#endif /*VK_USE_PLATFORM_FUCHSIA_FUCHSIA*/ + VkResult vkCreateImageView( VkDevice device, const VkImageViewCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkImageView* pView ) const + { + return ::vkCreateImageView( device, pCreateInfo, pAllocator, pView); + } + VkResult vkCreateIndirectCommandsLayoutNVX( VkDevice device, const VkIndirectCommandsLayoutCreateInfoNVX* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkIndirectCommandsLayoutNVX* pIndirectCommandsLayout ) const + { + return ::vkCreateIndirectCommandsLayoutNVX( device, pCreateInfo, pAllocator, pIndirectCommandsLayout); + } + VkResult vkCreateInstance( const VkInstanceCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkInstance* pInstance ) const + { + return ::vkCreateInstance( pCreateInfo, pAllocator, pInstance); + } +#ifdef VK_USE_PLATFORM_MACOS_MVK + VkResult vkCreateMacOSSurfaceMVK( VkInstance instance, const VkMacOSSurfaceCreateInfoMVK* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkSurfaceKHR* pSurface ) const + { + return ::vkCreateMacOSSurfaceMVK( instance, pCreateInfo, pAllocator, pSurface); + } +#endif /*VK_USE_PLATFORM_MACOS_MVK*/ + VkResult vkCreateObjectTableNVX( VkDevice device, const VkObjectTableCreateInfoNVX* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkObjectTableNVX* pObjectTable ) const + { + return ::vkCreateObjectTableNVX( device, pCreateInfo, pAllocator, pObjectTable); + } + VkResult vkCreatePipelineCache( VkDevice device, const VkPipelineCacheCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkPipelineCache* pPipelineCache ) const + { + return ::vkCreatePipelineCache( device, pCreateInfo, pAllocator, pPipelineCache); + } + VkResult vkCreatePipelineLayout( VkDevice device, const VkPipelineLayoutCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkPipelineLayout* pPipelineLayout ) const + { + return ::vkCreatePipelineLayout( device, pCreateInfo, pAllocator, pPipelineLayout); + } + VkResult vkCreateQueryPool( VkDevice device, const VkQueryPoolCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkQueryPool* pQueryPool ) const + { + return ::vkCreateQueryPool( device, pCreateInfo, pAllocator, pQueryPool); + } + VkResult vkCreateRayTracingPipelinesNV( VkDevice device, VkPipelineCache pipelineCache, uint32_t createInfoCount, const VkRayTracingPipelineCreateInfoNV* pCreateInfos, const VkAllocationCallbacks* pAllocator, VkPipeline* pPipelines ) const + { + return ::vkCreateRayTracingPipelinesNV( device, pipelineCache, createInfoCount, pCreateInfos, pAllocator, pPipelines); + } + VkResult vkCreateRenderPass( VkDevice device, const VkRenderPassCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkRenderPass* pRenderPass ) const + { + return ::vkCreateRenderPass( device, pCreateInfo, pAllocator, pRenderPass); + } + VkResult vkCreateRenderPass2KHR( VkDevice device, const VkRenderPassCreateInfo2KHR* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkRenderPass* pRenderPass ) const + { + return ::vkCreateRenderPass2KHR( device, pCreateInfo, pAllocator, pRenderPass); + } + VkResult vkCreateSampler( VkDevice device, const VkSamplerCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkSampler* pSampler ) const + { + return ::vkCreateSampler( device, pCreateInfo, pAllocator, pSampler); + } + VkResult vkCreateSamplerYcbcrConversion( VkDevice device, const VkSamplerYcbcrConversionCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkSamplerYcbcrConversion* pYcbcrConversion ) const + { + return ::vkCreateSamplerYcbcrConversion( device, pCreateInfo, pAllocator, pYcbcrConversion); + } + VkResult vkCreateSamplerYcbcrConversionKHR( VkDevice device, const VkSamplerYcbcrConversionCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkSamplerYcbcrConversion* pYcbcrConversion ) const + { + return ::vkCreateSamplerYcbcrConversionKHR( device, pCreateInfo, pAllocator, pYcbcrConversion); + } + VkResult vkCreateSemaphore( VkDevice device, const VkSemaphoreCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkSemaphore* pSemaphore ) const + { + return ::vkCreateSemaphore( device, pCreateInfo, pAllocator, pSemaphore); + } + VkResult vkCreateShaderModule( VkDevice device, const VkShaderModuleCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkShaderModule* pShaderModule ) const + { + return ::vkCreateShaderModule( device, pCreateInfo, pAllocator, pShaderModule); + } + VkResult vkCreateSharedSwapchainsKHR( VkDevice device, uint32_t swapchainCount, const VkSwapchainCreateInfoKHR* pCreateInfos, const VkAllocationCallbacks* pAllocator, VkSwapchainKHR* pSwapchains ) const + { + return ::vkCreateSharedSwapchainsKHR( device, swapchainCount, pCreateInfos, pAllocator, pSwapchains); + } + VkResult vkCreateSwapchainKHR( VkDevice device, const VkSwapchainCreateInfoKHR* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkSwapchainKHR* pSwapchain ) const + { + return ::vkCreateSwapchainKHR( device, pCreateInfo, pAllocator, pSwapchain); + } + VkResult vkCreateValidationCacheEXT( VkDevice device, const VkValidationCacheCreateInfoEXT* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkValidationCacheEXT* pValidationCache ) const + { + return ::vkCreateValidationCacheEXT( device, pCreateInfo, pAllocator, pValidationCache); + } +#ifdef VK_USE_PLATFORM_VI_NN + VkResult vkCreateViSurfaceNN( VkInstance instance, const VkViSurfaceCreateInfoNN* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkSurfaceKHR* pSurface ) const + { + return ::vkCreateViSurfaceNN( instance, pCreateInfo, pAllocator, pSurface); + } +#endif /*VK_USE_PLATFORM_VI_NN*/ +#ifdef VK_USE_PLATFORM_WAYLAND_KHR + VkResult vkCreateWaylandSurfaceKHR( VkInstance instance, const VkWaylandSurfaceCreateInfoKHR* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkSurfaceKHR* pSurface ) const + { + return ::vkCreateWaylandSurfaceKHR( instance, pCreateInfo, pAllocator, pSurface); + } +#endif /*VK_USE_PLATFORM_WAYLAND_KHR*/ +#ifdef VK_USE_PLATFORM_WIN32_KHR + VkResult vkCreateWin32SurfaceKHR( VkInstance instance, const VkWin32SurfaceCreateInfoKHR* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkSurfaceKHR* pSurface ) const + { + return ::vkCreateWin32SurfaceKHR( instance, pCreateInfo, pAllocator, pSurface); + } +#endif /*VK_USE_PLATFORM_WIN32_KHR*/ +#ifdef VK_USE_PLATFORM_XCB_KHR + VkResult vkCreateXcbSurfaceKHR( VkInstance instance, const VkXcbSurfaceCreateInfoKHR* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkSurfaceKHR* pSurface ) const + { + return ::vkCreateXcbSurfaceKHR( instance, pCreateInfo, pAllocator, pSurface); + } +#endif /*VK_USE_PLATFORM_XCB_KHR*/ +#ifdef VK_USE_PLATFORM_XLIB_KHR + VkResult vkCreateXlibSurfaceKHR( VkInstance instance, const VkXlibSurfaceCreateInfoKHR* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkSurfaceKHR* pSurface ) const + { + return ::vkCreateXlibSurfaceKHR( instance, pCreateInfo, pAllocator, pSurface); + } +#endif /*VK_USE_PLATFORM_XLIB_KHR*/ + VkResult vkDebugMarkerSetObjectNameEXT( VkDevice device, const VkDebugMarkerObjectNameInfoEXT* pNameInfo ) const + { + return ::vkDebugMarkerSetObjectNameEXT( device, pNameInfo); + } + VkResult vkDebugMarkerSetObjectTagEXT( VkDevice device, const VkDebugMarkerObjectTagInfoEXT* pTagInfo ) const + { + return ::vkDebugMarkerSetObjectTagEXT( device, pTagInfo); + } + void vkDebugReportMessageEXT( VkInstance instance, VkDebugReportFlagsEXT flags, VkDebugReportObjectTypeEXT objectType, uint64_t object, size_t location, int32_t messageCode, const char* pLayerPrefix, const char* pMessage ) const + { + return ::vkDebugReportMessageEXT( instance, flags, objectType, object, location, messageCode, pLayerPrefix, pMessage); + } + void vkDestroyAccelerationStructureNV( VkDevice device, VkAccelerationStructureNV accelerationStructure, const VkAllocationCallbacks* pAllocator ) const + { + return ::vkDestroyAccelerationStructureNV( device, accelerationStructure, pAllocator); + } + void vkDestroyBuffer( VkDevice device, VkBuffer buffer, const VkAllocationCallbacks* pAllocator ) const + { + return ::vkDestroyBuffer( device, buffer, pAllocator); + } + void vkDestroyBufferView( VkDevice device, VkBufferView bufferView, const VkAllocationCallbacks* pAllocator ) const + { + return ::vkDestroyBufferView( device, bufferView, pAllocator); + } + void vkDestroyCommandPool( VkDevice device, VkCommandPool commandPool, const VkAllocationCallbacks* pAllocator ) const + { + return ::vkDestroyCommandPool( device, commandPool, pAllocator); + } + void vkDestroyDebugReportCallbackEXT( VkInstance instance, VkDebugReportCallbackEXT callback, const VkAllocationCallbacks* pAllocator ) const + { + return ::vkDestroyDebugReportCallbackEXT( instance, callback, pAllocator); + } + void vkDestroyDebugUtilsMessengerEXT( VkInstance instance, VkDebugUtilsMessengerEXT messenger, const VkAllocationCallbacks* pAllocator ) const + { + return ::vkDestroyDebugUtilsMessengerEXT( instance, messenger, pAllocator); + } + void vkDestroyDescriptorPool( VkDevice device, VkDescriptorPool descriptorPool, const VkAllocationCallbacks* pAllocator ) const + { + return ::vkDestroyDescriptorPool( device, descriptorPool, pAllocator); + } + void vkDestroyDescriptorSetLayout( VkDevice device, VkDescriptorSetLayout descriptorSetLayout, const VkAllocationCallbacks* pAllocator ) const + { + return ::vkDestroyDescriptorSetLayout( device, descriptorSetLayout, pAllocator); + } + void vkDestroyDescriptorUpdateTemplate( VkDevice device, VkDescriptorUpdateTemplate descriptorUpdateTemplate, const VkAllocationCallbacks* pAllocator ) const + { + return ::vkDestroyDescriptorUpdateTemplate( device, descriptorUpdateTemplate, pAllocator); + } + void vkDestroyDescriptorUpdateTemplateKHR( VkDevice device, VkDescriptorUpdateTemplate descriptorUpdateTemplate, const VkAllocationCallbacks* pAllocator ) const + { + return ::vkDestroyDescriptorUpdateTemplateKHR( device, descriptorUpdateTemplate, pAllocator); + } + void vkDestroyDevice( VkDevice device, const VkAllocationCallbacks* pAllocator ) const + { + return ::vkDestroyDevice( device, pAllocator); + } + void vkDestroyEvent( VkDevice device, VkEvent event, const VkAllocationCallbacks* pAllocator ) const + { + return ::vkDestroyEvent( device, event, pAllocator); + } + void vkDestroyFence( VkDevice device, VkFence fence, const VkAllocationCallbacks* pAllocator ) const + { + return ::vkDestroyFence( device, fence, pAllocator); + } + void vkDestroyFramebuffer( VkDevice device, VkFramebuffer framebuffer, const VkAllocationCallbacks* pAllocator ) const + { + return ::vkDestroyFramebuffer( device, framebuffer, pAllocator); + } + void vkDestroyImage( VkDevice device, VkImage image, const VkAllocationCallbacks* pAllocator ) const + { + return ::vkDestroyImage( device, image, pAllocator); + } + void vkDestroyImageView( VkDevice device, VkImageView imageView, const VkAllocationCallbacks* pAllocator ) const + { + return ::vkDestroyImageView( device, imageView, pAllocator); + } + void vkDestroyIndirectCommandsLayoutNVX( VkDevice device, VkIndirectCommandsLayoutNVX indirectCommandsLayout, const VkAllocationCallbacks* pAllocator ) const + { + return ::vkDestroyIndirectCommandsLayoutNVX( device, indirectCommandsLayout, pAllocator); + } + void vkDestroyInstance( VkInstance instance, const VkAllocationCallbacks* pAllocator ) const + { + return ::vkDestroyInstance( instance, pAllocator); + } + void vkDestroyObjectTableNVX( VkDevice device, VkObjectTableNVX objectTable, const VkAllocationCallbacks* pAllocator ) const + { + return ::vkDestroyObjectTableNVX( device, objectTable, pAllocator); + } + void vkDestroyPipeline( VkDevice device, VkPipeline pipeline, const VkAllocationCallbacks* pAllocator ) const + { + return ::vkDestroyPipeline( device, pipeline, pAllocator); + } + void vkDestroyPipelineCache( VkDevice device, VkPipelineCache pipelineCache, const VkAllocationCallbacks* pAllocator ) const + { + return ::vkDestroyPipelineCache( device, pipelineCache, pAllocator); + } + void vkDestroyPipelineLayout( VkDevice device, VkPipelineLayout pipelineLayout, const VkAllocationCallbacks* pAllocator ) const + { + return ::vkDestroyPipelineLayout( device, pipelineLayout, pAllocator); + } + void vkDestroyQueryPool( VkDevice device, VkQueryPool queryPool, const VkAllocationCallbacks* pAllocator ) const + { + return ::vkDestroyQueryPool( device, queryPool, pAllocator); + } + void vkDestroyRenderPass( VkDevice device, VkRenderPass renderPass, const VkAllocationCallbacks* pAllocator ) const + { + return ::vkDestroyRenderPass( device, renderPass, pAllocator); + } + void vkDestroySampler( VkDevice device, VkSampler sampler, const VkAllocationCallbacks* pAllocator ) const + { + return ::vkDestroySampler( device, sampler, pAllocator); + } + void vkDestroySamplerYcbcrConversion( VkDevice device, VkSamplerYcbcrConversion ycbcrConversion, const VkAllocationCallbacks* pAllocator ) const + { + return ::vkDestroySamplerYcbcrConversion( device, ycbcrConversion, pAllocator); + } + void vkDestroySamplerYcbcrConversionKHR( VkDevice device, VkSamplerYcbcrConversion ycbcrConversion, const VkAllocationCallbacks* pAllocator ) const + { + return ::vkDestroySamplerYcbcrConversionKHR( device, ycbcrConversion, pAllocator); + } + void vkDestroySemaphore( VkDevice device, VkSemaphore semaphore, const VkAllocationCallbacks* pAllocator ) const + { + return ::vkDestroySemaphore( device, semaphore, pAllocator); + } + void vkDestroyShaderModule( VkDevice device, VkShaderModule shaderModule, const VkAllocationCallbacks* pAllocator ) const + { + return ::vkDestroyShaderModule( device, shaderModule, pAllocator); + } + void vkDestroySurfaceKHR( VkInstance instance, VkSurfaceKHR surface, const VkAllocationCallbacks* pAllocator ) const + { + return ::vkDestroySurfaceKHR( instance, surface, pAllocator); + } + void vkDestroySwapchainKHR( VkDevice device, VkSwapchainKHR swapchain, const VkAllocationCallbacks* pAllocator ) const + { + return ::vkDestroySwapchainKHR( device, swapchain, pAllocator); + } + void vkDestroyValidationCacheEXT( VkDevice device, VkValidationCacheEXT validationCache, const VkAllocationCallbacks* pAllocator ) const + { + return ::vkDestroyValidationCacheEXT( device, validationCache, pAllocator); + } + VkResult vkDeviceWaitIdle( VkDevice device ) const + { + return ::vkDeviceWaitIdle( device); + } + VkResult vkDisplayPowerControlEXT( VkDevice device, VkDisplayKHR display, const VkDisplayPowerInfoEXT* pDisplayPowerInfo ) const + { + return ::vkDisplayPowerControlEXT( device, display, pDisplayPowerInfo); + } + VkResult vkEndCommandBuffer( VkCommandBuffer commandBuffer ) const + { + return ::vkEndCommandBuffer( commandBuffer); + } + VkResult vkEnumerateDeviceExtensionProperties( VkPhysicalDevice physicalDevice, const char* pLayerName, uint32_t* pPropertyCount, VkExtensionProperties* pProperties ) const + { + return ::vkEnumerateDeviceExtensionProperties( physicalDevice, pLayerName, pPropertyCount, pProperties); + } + VkResult vkEnumerateDeviceLayerProperties( VkPhysicalDevice physicalDevice, uint32_t* pPropertyCount, VkLayerProperties* pProperties ) const + { + return ::vkEnumerateDeviceLayerProperties( physicalDevice, pPropertyCount, pProperties); + } + VkResult vkEnumerateInstanceExtensionProperties( const char* pLayerName, uint32_t* pPropertyCount, VkExtensionProperties* pProperties ) const + { + return ::vkEnumerateInstanceExtensionProperties( pLayerName, pPropertyCount, pProperties); + } + VkResult vkEnumerateInstanceLayerProperties( uint32_t* pPropertyCount, VkLayerProperties* pProperties ) const + { + return ::vkEnumerateInstanceLayerProperties( pPropertyCount, pProperties); + } + VkResult vkEnumerateInstanceVersion( uint32_t* pApiVersion ) const + { + return ::vkEnumerateInstanceVersion( pApiVersion); + } + VkResult vkEnumeratePhysicalDeviceGroups( VkInstance instance, uint32_t* pPhysicalDeviceGroupCount, VkPhysicalDeviceGroupProperties* pPhysicalDeviceGroupProperties ) const + { + return ::vkEnumeratePhysicalDeviceGroups( instance, pPhysicalDeviceGroupCount, pPhysicalDeviceGroupProperties); + } + VkResult vkEnumeratePhysicalDeviceGroupsKHR( VkInstance instance, uint32_t* pPhysicalDeviceGroupCount, VkPhysicalDeviceGroupProperties* pPhysicalDeviceGroupProperties ) const + { + return ::vkEnumeratePhysicalDeviceGroupsKHR( instance, pPhysicalDeviceGroupCount, pPhysicalDeviceGroupProperties); + } + VkResult vkEnumeratePhysicalDevices( VkInstance instance, uint32_t* pPhysicalDeviceCount, VkPhysicalDevice* pPhysicalDevices ) const + { + return ::vkEnumeratePhysicalDevices( instance, pPhysicalDeviceCount, pPhysicalDevices); + } + VkResult vkFlushMappedMemoryRanges( VkDevice device, uint32_t memoryRangeCount, const VkMappedMemoryRange* pMemoryRanges ) const + { + return ::vkFlushMappedMemoryRanges( device, memoryRangeCount, pMemoryRanges); + } + void vkFreeCommandBuffers( VkDevice device, VkCommandPool commandPool, uint32_t commandBufferCount, const VkCommandBuffer* pCommandBuffers ) const + { + return ::vkFreeCommandBuffers( device, commandPool, commandBufferCount, pCommandBuffers); + } + VkResult vkFreeDescriptorSets( VkDevice device, VkDescriptorPool descriptorPool, uint32_t descriptorSetCount, const VkDescriptorSet* pDescriptorSets ) const + { + return ::vkFreeDescriptorSets( device, descriptorPool, descriptorSetCount, pDescriptorSets); + } + void vkFreeMemory( VkDevice device, VkDeviceMemory memory, const VkAllocationCallbacks* pAllocator ) const + { + return ::vkFreeMemory( device, memory, pAllocator); + } + VkResult vkGetAccelerationStructureHandleNV( VkDevice device, VkAccelerationStructureNV accelerationStructure, size_t dataSize, void* pData ) const + { + return ::vkGetAccelerationStructureHandleNV( device, accelerationStructure, dataSize, pData); + } + void vkGetAccelerationStructureMemoryRequirementsNV( VkDevice device, const VkAccelerationStructureMemoryRequirementsInfoNV* pInfo, VkMemoryRequirements2KHR* pMemoryRequirements ) const + { + return ::vkGetAccelerationStructureMemoryRequirementsNV( device, pInfo, pMemoryRequirements); + } +#ifdef VK_USE_PLATFORM_ANDROID_ANDROID + VkResult vkGetAndroidHardwareBufferPropertiesANDROID( VkDevice device, const struct AHardwareBuffer* buffer, VkAndroidHardwareBufferPropertiesANDROID* pProperties ) const + { + return ::vkGetAndroidHardwareBufferPropertiesANDROID( device, buffer, pProperties); + } +#endif /*VK_USE_PLATFORM_ANDROID_ANDROID*/ + void vkGetBufferMemoryRequirements( VkDevice device, VkBuffer buffer, VkMemoryRequirements* pMemoryRequirements ) const + { + return ::vkGetBufferMemoryRequirements( device, buffer, pMemoryRequirements); + } + void vkGetBufferMemoryRequirements2( VkDevice device, const VkBufferMemoryRequirementsInfo2* pInfo, VkMemoryRequirements2* pMemoryRequirements ) const + { + return ::vkGetBufferMemoryRequirements2( device, pInfo, pMemoryRequirements); + } + void vkGetBufferMemoryRequirements2KHR( VkDevice device, const VkBufferMemoryRequirementsInfo2* pInfo, VkMemoryRequirements2* pMemoryRequirements ) const + { + return ::vkGetBufferMemoryRequirements2KHR( device, pInfo, pMemoryRequirements); + } + VkResult vkGetCalibratedTimestampsEXT( VkDevice device, uint32_t timestampCount, const VkCalibratedTimestampInfoEXT* pTimestampInfos, uint64_t* pTimestamps, uint64_t* pMaxDeviation ) const + { + return ::vkGetCalibratedTimestampsEXT( device, timestampCount, pTimestampInfos, pTimestamps, pMaxDeviation); + } + void vkGetDescriptorSetLayoutSupport( VkDevice device, const VkDescriptorSetLayoutCreateInfo* pCreateInfo, VkDescriptorSetLayoutSupport* pSupport ) const + { + return ::vkGetDescriptorSetLayoutSupport( device, pCreateInfo, pSupport); + } + void vkGetDescriptorSetLayoutSupportKHR( VkDevice device, const VkDescriptorSetLayoutCreateInfo* pCreateInfo, VkDescriptorSetLayoutSupport* pSupport ) const + { + return ::vkGetDescriptorSetLayoutSupportKHR( device, pCreateInfo, pSupport); + } + void vkGetDeviceGroupPeerMemoryFeatures( VkDevice device, uint32_t heapIndex, uint32_t localDeviceIndex, uint32_t remoteDeviceIndex, VkPeerMemoryFeatureFlags* pPeerMemoryFeatures ) const + { + return ::vkGetDeviceGroupPeerMemoryFeatures( device, heapIndex, localDeviceIndex, remoteDeviceIndex, pPeerMemoryFeatures); + } + void vkGetDeviceGroupPeerMemoryFeaturesKHR( VkDevice device, uint32_t heapIndex, uint32_t localDeviceIndex, uint32_t remoteDeviceIndex, VkPeerMemoryFeatureFlags* pPeerMemoryFeatures ) const + { + return ::vkGetDeviceGroupPeerMemoryFeaturesKHR( device, heapIndex, localDeviceIndex, remoteDeviceIndex, pPeerMemoryFeatures); + } + VkResult vkGetDeviceGroupPresentCapabilitiesKHR( VkDevice device, VkDeviceGroupPresentCapabilitiesKHR* pDeviceGroupPresentCapabilities ) const + { + return ::vkGetDeviceGroupPresentCapabilitiesKHR( device, pDeviceGroupPresentCapabilities); + } + VkResult vkGetDeviceGroupSurfacePresentModesKHR( VkDevice device, VkSurfaceKHR surface, VkDeviceGroupPresentModeFlagsKHR* pModes ) const + { + return ::vkGetDeviceGroupSurfacePresentModesKHR( device, surface, pModes); + } + void vkGetDeviceMemoryCommitment( VkDevice device, VkDeviceMemory memory, VkDeviceSize* pCommittedMemoryInBytes ) const + { + return ::vkGetDeviceMemoryCommitment( device, memory, pCommittedMemoryInBytes); + } + PFN_vkVoidFunction vkGetDeviceProcAddr( VkDevice device, const char* pName ) const + { + return ::vkGetDeviceProcAddr( device, pName); + } + void vkGetDeviceQueue( VkDevice device, uint32_t queueFamilyIndex, uint32_t queueIndex, VkQueue* pQueue ) const + { + return ::vkGetDeviceQueue( device, queueFamilyIndex, queueIndex, pQueue); + } + void vkGetDeviceQueue2( VkDevice device, const VkDeviceQueueInfo2* pQueueInfo, VkQueue* pQueue ) const + { + return ::vkGetDeviceQueue2( device, pQueueInfo, pQueue); + } + VkResult vkGetDisplayModeProperties2KHR( VkPhysicalDevice physicalDevice, VkDisplayKHR display, uint32_t* pPropertyCount, VkDisplayModeProperties2KHR* pProperties ) const + { + return ::vkGetDisplayModeProperties2KHR( physicalDevice, display, pPropertyCount, pProperties); + } + VkResult vkGetDisplayModePropertiesKHR( VkPhysicalDevice physicalDevice, VkDisplayKHR display, uint32_t* pPropertyCount, VkDisplayModePropertiesKHR* pProperties ) const + { + return ::vkGetDisplayModePropertiesKHR( physicalDevice, display, pPropertyCount, pProperties); + } + VkResult vkGetDisplayPlaneCapabilities2KHR( VkPhysicalDevice physicalDevice, const VkDisplayPlaneInfo2KHR* pDisplayPlaneInfo, VkDisplayPlaneCapabilities2KHR* pCapabilities ) const + { + return ::vkGetDisplayPlaneCapabilities2KHR( physicalDevice, pDisplayPlaneInfo, pCapabilities); + } + VkResult vkGetDisplayPlaneCapabilitiesKHR( VkPhysicalDevice physicalDevice, VkDisplayModeKHR mode, uint32_t planeIndex, VkDisplayPlaneCapabilitiesKHR* pCapabilities ) const + { + return ::vkGetDisplayPlaneCapabilitiesKHR( physicalDevice, mode, planeIndex, pCapabilities); + } + VkResult vkGetDisplayPlaneSupportedDisplaysKHR( VkPhysicalDevice physicalDevice, uint32_t planeIndex, uint32_t* pDisplayCount, VkDisplayKHR* pDisplays ) const + { + return ::vkGetDisplayPlaneSupportedDisplaysKHR( physicalDevice, planeIndex, pDisplayCount, pDisplays); + } + VkResult vkGetEventStatus( VkDevice device, VkEvent event ) const + { + return ::vkGetEventStatus( device, event); + } + VkResult vkGetFenceFdKHR( VkDevice device, const VkFenceGetFdInfoKHR* pGetFdInfo, int* pFd ) const + { + return ::vkGetFenceFdKHR( device, pGetFdInfo, pFd); + } + VkResult vkGetFenceStatus( VkDevice device, VkFence fence ) const + { + return ::vkGetFenceStatus( device, fence); + } +#ifdef VK_USE_PLATFORM_WIN32_KHR + VkResult vkGetFenceWin32HandleKHR( VkDevice device, const VkFenceGetWin32HandleInfoKHR* pGetWin32HandleInfo, HANDLE* pHandle ) const + { + return ::vkGetFenceWin32HandleKHR( device, pGetWin32HandleInfo, pHandle); + } +#endif /*VK_USE_PLATFORM_WIN32_KHR*/ + VkResult vkGetImageDrmFormatModifierPropertiesEXT( VkDevice device, VkImage image, VkImageDrmFormatModifierPropertiesEXT* pProperties ) const + { + return ::vkGetImageDrmFormatModifierPropertiesEXT( device, image, pProperties); + } + void vkGetImageMemoryRequirements( VkDevice device, VkImage image, VkMemoryRequirements* pMemoryRequirements ) const + { + return ::vkGetImageMemoryRequirements( device, image, pMemoryRequirements); + } + void vkGetImageMemoryRequirements2( VkDevice device, const VkImageMemoryRequirementsInfo2* pInfo, VkMemoryRequirements2* pMemoryRequirements ) const + { + return ::vkGetImageMemoryRequirements2( device, pInfo, pMemoryRequirements); + } + void vkGetImageMemoryRequirements2KHR( VkDevice device, const VkImageMemoryRequirementsInfo2* pInfo, VkMemoryRequirements2* pMemoryRequirements ) const + { + return ::vkGetImageMemoryRequirements2KHR( device, pInfo, pMemoryRequirements); + } + void vkGetImageSparseMemoryRequirements( VkDevice device, VkImage image, uint32_t* pSparseMemoryRequirementCount, VkSparseImageMemoryRequirements* pSparseMemoryRequirements ) const + { + return ::vkGetImageSparseMemoryRequirements( device, image, pSparseMemoryRequirementCount, pSparseMemoryRequirements); + } + void vkGetImageSparseMemoryRequirements2( VkDevice device, const VkImageSparseMemoryRequirementsInfo2* pInfo, uint32_t* pSparseMemoryRequirementCount, VkSparseImageMemoryRequirements2* pSparseMemoryRequirements ) const + { + return ::vkGetImageSparseMemoryRequirements2( device, pInfo, pSparseMemoryRequirementCount, pSparseMemoryRequirements); + } + void vkGetImageSparseMemoryRequirements2KHR( VkDevice device, const VkImageSparseMemoryRequirementsInfo2* pInfo, uint32_t* pSparseMemoryRequirementCount, VkSparseImageMemoryRequirements2* pSparseMemoryRequirements ) const + { + return ::vkGetImageSparseMemoryRequirements2KHR( device, pInfo, pSparseMemoryRequirementCount, pSparseMemoryRequirements); + } + void vkGetImageSubresourceLayout( VkDevice device, VkImage image, const VkImageSubresource* pSubresource, VkSubresourceLayout* pLayout ) const + { + return ::vkGetImageSubresourceLayout( device, image, pSubresource, pLayout); + } + PFN_vkVoidFunction vkGetInstanceProcAddr( VkInstance instance, const char* pName ) const + { + return ::vkGetInstanceProcAddr( instance, pName); + } +#ifdef VK_USE_PLATFORM_ANDROID_ANDROID + VkResult vkGetMemoryAndroidHardwareBufferANDROID( VkDevice device, const VkMemoryGetAndroidHardwareBufferInfoANDROID* pInfo, struct AHardwareBuffer** pBuffer ) const + { + return ::vkGetMemoryAndroidHardwareBufferANDROID( device, pInfo, pBuffer); + } +#endif /*VK_USE_PLATFORM_ANDROID_ANDROID*/ + VkResult vkGetMemoryFdKHR( VkDevice device, const VkMemoryGetFdInfoKHR* pGetFdInfo, int* pFd ) const + { + return ::vkGetMemoryFdKHR( device, pGetFdInfo, pFd); + } + VkResult vkGetMemoryFdPropertiesKHR( VkDevice device, VkExternalMemoryHandleTypeFlagBits handleType, int fd, VkMemoryFdPropertiesKHR* pMemoryFdProperties ) const + { + return ::vkGetMemoryFdPropertiesKHR( device, handleType, fd, pMemoryFdProperties); + } + VkResult vkGetMemoryHostPointerPropertiesEXT( VkDevice device, VkExternalMemoryHandleTypeFlagBits handleType, const void* pHostPointer, VkMemoryHostPointerPropertiesEXT* pMemoryHostPointerProperties ) const + { + return ::vkGetMemoryHostPointerPropertiesEXT( device, handleType, pHostPointer, pMemoryHostPointerProperties); + } +#ifdef VK_USE_PLATFORM_WIN32_KHR + VkResult vkGetMemoryWin32HandleKHR( VkDevice device, const VkMemoryGetWin32HandleInfoKHR* pGetWin32HandleInfo, HANDLE* pHandle ) const + { + return ::vkGetMemoryWin32HandleKHR( device, pGetWin32HandleInfo, pHandle); + } +#endif /*VK_USE_PLATFORM_WIN32_KHR*/ +#ifdef VK_USE_PLATFORM_WIN32_NV + VkResult vkGetMemoryWin32HandleNV( VkDevice device, VkDeviceMemory memory, VkExternalMemoryHandleTypeFlagsNV handleType, HANDLE* pHandle ) const + { + return ::vkGetMemoryWin32HandleNV( device, memory, handleType, pHandle); + } +#endif /*VK_USE_PLATFORM_WIN32_NV*/ +#ifdef VK_USE_PLATFORM_WIN32_KHR + VkResult vkGetMemoryWin32HandlePropertiesKHR( VkDevice device, VkExternalMemoryHandleTypeFlagBits handleType, HANDLE handle, VkMemoryWin32HandlePropertiesKHR* pMemoryWin32HandleProperties ) const + { + return ::vkGetMemoryWin32HandlePropertiesKHR( device, handleType, handle, pMemoryWin32HandleProperties); + } +#endif /*VK_USE_PLATFORM_WIN32_KHR*/ + VkResult vkGetPastPresentationTimingGOOGLE( VkDevice device, VkSwapchainKHR swapchain, uint32_t* pPresentationTimingCount, VkPastPresentationTimingGOOGLE* pPresentationTimings ) const + { + return ::vkGetPastPresentationTimingGOOGLE( device, swapchain, pPresentationTimingCount, pPresentationTimings); + } + VkResult vkGetPhysicalDeviceCalibrateableTimeDomainsEXT( VkPhysicalDevice physicalDevice, uint32_t* pTimeDomainCount, VkTimeDomainEXT* pTimeDomains ) const + { + return ::vkGetPhysicalDeviceCalibrateableTimeDomainsEXT( physicalDevice, pTimeDomainCount, pTimeDomains); + } + VkResult vkGetPhysicalDeviceDisplayPlaneProperties2KHR( VkPhysicalDevice physicalDevice, uint32_t* pPropertyCount, VkDisplayPlaneProperties2KHR* pProperties ) const + { + return ::vkGetPhysicalDeviceDisplayPlaneProperties2KHR( physicalDevice, pPropertyCount, pProperties); + } + VkResult vkGetPhysicalDeviceDisplayPlanePropertiesKHR( VkPhysicalDevice physicalDevice, uint32_t* pPropertyCount, VkDisplayPlanePropertiesKHR* pProperties ) const + { + return ::vkGetPhysicalDeviceDisplayPlanePropertiesKHR( physicalDevice, pPropertyCount, pProperties); + } + VkResult vkGetPhysicalDeviceDisplayProperties2KHR( VkPhysicalDevice physicalDevice, uint32_t* pPropertyCount, VkDisplayProperties2KHR* pProperties ) const + { + return ::vkGetPhysicalDeviceDisplayProperties2KHR( physicalDevice, pPropertyCount, pProperties); + } + VkResult vkGetPhysicalDeviceDisplayPropertiesKHR( VkPhysicalDevice physicalDevice, uint32_t* pPropertyCount, VkDisplayPropertiesKHR* pProperties ) const + { + return ::vkGetPhysicalDeviceDisplayPropertiesKHR( physicalDevice, pPropertyCount, pProperties); + } + void vkGetPhysicalDeviceExternalBufferProperties( VkPhysicalDevice physicalDevice, const VkPhysicalDeviceExternalBufferInfo* pExternalBufferInfo, VkExternalBufferProperties* pExternalBufferProperties ) const + { + return ::vkGetPhysicalDeviceExternalBufferProperties( physicalDevice, pExternalBufferInfo, pExternalBufferProperties); + } + void vkGetPhysicalDeviceExternalBufferPropertiesKHR( VkPhysicalDevice physicalDevice, const VkPhysicalDeviceExternalBufferInfo* pExternalBufferInfo, VkExternalBufferProperties* pExternalBufferProperties ) const + { + return ::vkGetPhysicalDeviceExternalBufferPropertiesKHR( physicalDevice, pExternalBufferInfo, pExternalBufferProperties); + } + void vkGetPhysicalDeviceExternalFenceProperties( VkPhysicalDevice physicalDevice, const VkPhysicalDeviceExternalFenceInfo* pExternalFenceInfo, VkExternalFenceProperties* pExternalFenceProperties ) const + { + return ::vkGetPhysicalDeviceExternalFenceProperties( physicalDevice, pExternalFenceInfo, pExternalFenceProperties); + } + void vkGetPhysicalDeviceExternalFencePropertiesKHR( VkPhysicalDevice physicalDevice, const VkPhysicalDeviceExternalFenceInfo* pExternalFenceInfo, VkExternalFenceProperties* pExternalFenceProperties ) const + { + return ::vkGetPhysicalDeviceExternalFencePropertiesKHR( physicalDevice, pExternalFenceInfo, pExternalFenceProperties); + } + VkResult vkGetPhysicalDeviceExternalImageFormatPropertiesNV( VkPhysicalDevice physicalDevice, VkFormat format, VkImageType type, VkImageTiling tiling, VkImageUsageFlags usage, VkImageCreateFlags flags, VkExternalMemoryHandleTypeFlagsNV externalHandleType, VkExternalImageFormatPropertiesNV* pExternalImageFormatProperties ) const + { + return ::vkGetPhysicalDeviceExternalImageFormatPropertiesNV( physicalDevice, format, type, tiling, usage, flags, externalHandleType, pExternalImageFormatProperties); + } + void vkGetPhysicalDeviceExternalSemaphoreProperties( VkPhysicalDevice physicalDevice, const VkPhysicalDeviceExternalSemaphoreInfo* pExternalSemaphoreInfo, VkExternalSemaphoreProperties* pExternalSemaphoreProperties ) const + { + return ::vkGetPhysicalDeviceExternalSemaphoreProperties( physicalDevice, pExternalSemaphoreInfo, pExternalSemaphoreProperties); + } + void vkGetPhysicalDeviceExternalSemaphorePropertiesKHR( VkPhysicalDevice physicalDevice, const VkPhysicalDeviceExternalSemaphoreInfo* pExternalSemaphoreInfo, VkExternalSemaphoreProperties* pExternalSemaphoreProperties ) const + { + return ::vkGetPhysicalDeviceExternalSemaphorePropertiesKHR( physicalDevice, pExternalSemaphoreInfo, pExternalSemaphoreProperties); + } + void vkGetPhysicalDeviceFeatures( VkPhysicalDevice physicalDevice, VkPhysicalDeviceFeatures* pFeatures ) const + { + return ::vkGetPhysicalDeviceFeatures( physicalDevice, pFeatures); + } + void vkGetPhysicalDeviceFeatures2( VkPhysicalDevice physicalDevice, VkPhysicalDeviceFeatures2* pFeatures ) const + { + return ::vkGetPhysicalDeviceFeatures2( physicalDevice, pFeatures); + } + void vkGetPhysicalDeviceFeatures2KHR( VkPhysicalDevice physicalDevice, VkPhysicalDeviceFeatures2* pFeatures ) const + { + return ::vkGetPhysicalDeviceFeatures2KHR( physicalDevice, pFeatures); + } + void vkGetPhysicalDeviceFormatProperties( VkPhysicalDevice physicalDevice, VkFormat format, VkFormatProperties* pFormatProperties ) const + { + return ::vkGetPhysicalDeviceFormatProperties( physicalDevice, format, pFormatProperties); + } + void vkGetPhysicalDeviceFormatProperties2( VkPhysicalDevice physicalDevice, VkFormat format, VkFormatProperties2* pFormatProperties ) const + { + return ::vkGetPhysicalDeviceFormatProperties2( physicalDevice, format, pFormatProperties); + } + void vkGetPhysicalDeviceFormatProperties2KHR( VkPhysicalDevice physicalDevice, VkFormat format, VkFormatProperties2* pFormatProperties ) const + { + return ::vkGetPhysicalDeviceFormatProperties2KHR( physicalDevice, format, pFormatProperties); + } + void vkGetPhysicalDeviceGeneratedCommandsPropertiesNVX( VkPhysicalDevice physicalDevice, VkDeviceGeneratedCommandsFeaturesNVX* pFeatures, VkDeviceGeneratedCommandsLimitsNVX* pLimits ) const + { + return ::vkGetPhysicalDeviceGeneratedCommandsPropertiesNVX( physicalDevice, pFeatures, pLimits); + } + VkResult vkGetPhysicalDeviceImageFormatProperties( VkPhysicalDevice physicalDevice, VkFormat format, VkImageType type, VkImageTiling tiling, VkImageUsageFlags usage, VkImageCreateFlags flags, VkImageFormatProperties* pImageFormatProperties ) const + { + return ::vkGetPhysicalDeviceImageFormatProperties( physicalDevice, format, type, tiling, usage, flags, pImageFormatProperties); + } + VkResult vkGetPhysicalDeviceImageFormatProperties2( VkPhysicalDevice physicalDevice, const VkPhysicalDeviceImageFormatInfo2* pImageFormatInfo, VkImageFormatProperties2* pImageFormatProperties ) const + { + return ::vkGetPhysicalDeviceImageFormatProperties2( physicalDevice, pImageFormatInfo, pImageFormatProperties); + } + VkResult vkGetPhysicalDeviceImageFormatProperties2KHR( VkPhysicalDevice physicalDevice, const VkPhysicalDeviceImageFormatInfo2* pImageFormatInfo, VkImageFormatProperties2* pImageFormatProperties ) const + { + return ::vkGetPhysicalDeviceImageFormatProperties2KHR( physicalDevice, pImageFormatInfo, pImageFormatProperties); + } + void vkGetPhysicalDeviceMemoryProperties( VkPhysicalDevice physicalDevice, VkPhysicalDeviceMemoryProperties* pMemoryProperties ) const + { + return ::vkGetPhysicalDeviceMemoryProperties( physicalDevice, pMemoryProperties); + } + void vkGetPhysicalDeviceMemoryProperties2( VkPhysicalDevice physicalDevice, VkPhysicalDeviceMemoryProperties2* pMemoryProperties ) const + { + return ::vkGetPhysicalDeviceMemoryProperties2( physicalDevice, pMemoryProperties); + } + void vkGetPhysicalDeviceMemoryProperties2KHR( VkPhysicalDevice physicalDevice, VkPhysicalDeviceMemoryProperties2* pMemoryProperties ) const + { + return ::vkGetPhysicalDeviceMemoryProperties2KHR( physicalDevice, pMemoryProperties); + } + void vkGetPhysicalDeviceMultisamplePropertiesEXT( VkPhysicalDevice physicalDevice, VkSampleCountFlagBits samples, VkMultisamplePropertiesEXT* pMultisampleProperties ) const + { + return ::vkGetPhysicalDeviceMultisamplePropertiesEXT( physicalDevice, samples, pMultisampleProperties); + } + VkResult vkGetPhysicalDevicePresentRectanglesKHR( VkPhysicalDevice physicalDevice, VkSurfaceKHR surface, uint32_t* pRectCount, VkRect2D* pRects ) const + { + return ::vkGetPhysicalDevicePresentRectanglesKHR( physicalDevice, surface, pRectCount, pRects); + } + void vkGetPhysicalDeviceProperties( VkPhysicalDevice physicalDevice, VkPhysicalDeviceProperties* pProperties ) const + { + return ::vkGetPhysicalDeviceProperties( physicalDevice, pProperties); + } + void vkGetPhysicalDeviceProperties2( VkPhysicalDevice physicalDevice, VkPhysicalDeviceProperties2* pProperties ) const + { + return ::vkGetPhysicalDeviceProperties2( physicalDevice, pProperties); + } + void vkGetPhysicalDeviceProperties2KHR( VkPhysicalDevice physicalDevice, VkPhysicalDeviceProperties2* pProperties ) const + { + return ::vkGetPhysicalDeviceProperties2KHR( physicalDevice, pProperties); + } + void vkGetPhysicalDeviceQueueFamilyProperties( VkPhysicalDevice physicalDevice, uint32_t* pQueueFamilyPropertyCount, VkQueueFamilyProperties* pQueueFamilyProperties ) const + { + return ::vkGetPhysicalDeviceQueueFamilyProperties( physicalDevice, pQueueFamilyPropertyCount, pQueueFamilyProperties); + } + void vkGetPhysicalDeviceQueueFamilyProperties2( VkPhysicalDevice physicalDevice, uint32_t* pQueueFamilyPropertyCount, VkQueueFamilyProperties2* pQueueFamilyProperties ) const + { + return ::vkGetPhysicalDeviceQueueFamilyProperties2( physicalDevice, pQueueFamilyPropertyCount, pQueueFamilyProperties); + } + void vkGetPhysicalDeviceQueueFamilyProperties2KHR( VkPhysicalDevice physicalDevice, uint32_t* pQueueFamilyPropertyCount, VkQueueFamilyProperties2* pQueueFamilyProperties ) const + { + return ::vkGetPhysicalDeviceQueueFamilyProperties2KHR( physicalDevice, pQueueFamilyPropertyCount, pQueueFamilyProperties); + } + void vkGetPhysicalDeviceSparseImageFormatProperties( VkPhysicalDevice physicalDevice, VkFormat format, VkImageType type, VkSampleCountFlagBits samples, VkImageUsageFlags usage, VkImageTiling tiling, uint32_t* pPropertyCount, VkSparseImageFormatProperties* pProperties ) const + { + return ::vkGetPhysicalDeviceSparseImageFormatProperties( physicalDevice, format, type, samples, usage, tiling, pPropertyCount, pProperties); + } + void vkGetPhysicalDeviceSparseImageFormatProperties2( VkPhysicalDevice physicalDevice, const VkPhysicalDeviceSparseImageFormatInfo2* pFormatInfo, uint32_t* pPropertyCount, VkSparseImageFormatProperties2* pProperties ) const + { + return ::vkGetPhysicalDeviceSparseImageFormatProperties2( physicalDevice, pFormatInfo, pPropertyCount, pProperties); + } + void vkGetPhysicalDeviceSparseImageFormatProperties2KHR( VkPhysicalDevice physicalDevice, const VkPhysicalDeviceSparseImageFormatInfo2* pFormatInfo, uint32_t* pPropertyCount, VkSparseImageFormatProperties2* pProperties ) const + { + return ::vkGetPhysicalDeviceSparseImageFormatProperties2KHR( physicalDevice, pFormatInfo, pPropertyCount, pProperties); + } + VkResult vkGetPhysicalDeviceSurfaceCapabilities2EXT( VkPhysicalDevice physicalDevice, VkSurfaceKHR surface, VkSurfaceCapabilities2EXT* pSurfaceCapabilities ) const + { + return ::vkGetPhysicalDeviceSurfaceCapabilities2EXT( physicalDevice, surface, pSurfaceCapabilities); + } + VkResult vkGetPhysicalDeviceSurfaceCapabilities2KHR( VkPhysicalDevice physicalDevice, const VkPhysicalDeviceSurfaceInfo2KHR* pSurfaceInfo, VkSurfaceCapabilities2KHR* pSurfaceCapabilities ) const + { + return ::vkGetPhysicalDeviceSurfaceCapabilities2KHR( physicalDevice, pSurfaceInfo, pSurfaceCapabilities); + } + VkResult vkGetPhysicalDeviceSurfaceCapabilitiesKHR( VkPhysicalDevice physicalDevice, VkSurfaceKHR surface, VkSurfaceCapabilitiesKHR* pSurfaceCapabilities ) const + { + return ::vkGetPhysicalDeviceSurfaceCapabilitiesKHR( physicalDevice, surface, pSurfaceCapabilities); + } + VkResult vkGetPhysicalDeviceSurfaceFormats2KHR( VkPhysicalDevice physicalDevice, const VkPhysicalDeviceSurfaceInfo2KHR* pSurfaceInfo, uint32_t* pSurfaceFormatCount, VkSurfaceFormat2KHR* pSurfaceFormats ) const + { + return ::vkGetPhysicalDeviceSurfaceFormats2KHR( physicalDevice, pSurfaceInfo, pSurfaceFormatCount, pSurfaceFormats); + } + VkResult vkGetPhysicalDeviceSurfaceFormatsKHR( VkPhysicalDevice physicalDevice, VkSurfaceKHR surface, uint32_t* pSurfaceFormatCount, VkSurfaceFormatKHR* pSurfaceFormats ) const + { + return ::vkGetPhysicalDeviceSurfaceFormatsKHR( physicalDevice, surface, pSurfaceFormatCount, pSurfaceFormats); + } + VkResult vkGetPhysicalDeviceSurfacePresentModesKHR( VkPhysicalDevice physicalDevice, VkSurfaceKHR surface, uint32_t* pPresentModeCount, VkPresentModeKHR* pPresentModes ) const + { + return ::vkGetPhysicalDeviceSurfacePresentModesKHR( physicalDevice, surface, pPresentModeCount, pPresentModes); + } + VkResult vkGetPhysicalDeviceSurfaceSupportKHR( VkPhysicalDevice physicalDevice, uint32_t queueFamilyIndex, VkSurfaceKHR surface, VkBool32* pSupported ) const + { + return ::vkGetPhysicalDeviceSurfaceSupportKHR( physicalDevice, queueFamilyIndex, surface, pSupported); + } +#ifdef VK_USE_PLATFORM_WAYLAND_KHR + VkBool32 vkGetPhysicalDeviceWaylandPresentationSupportKHR( VkPhysicalDevice physicalDevice, uint32_t queueFamilyIndex, struct wl_display* display ) const + { + return ::vkGetPhysicalDeviceWaylandPresentationSupportKHR( physicalDevice, queueFamilyIndex, display); + } +#endif /*VK_USE_PLATFORM_WAYLAND_KHR*/ +#ifdef VK_USE_PLATFORM_WIN32_KHR + VkBool32 vkGetPhysicalDeviceWin32PresentationSupportKHR( VkPhysicalDevice physicalDevice, uint32_t queueFamilyIndex ) const + { + return ::vkGetPhysicalDeviceWin32PresentationSupportKHR( physicalDevice, queueFamilyIndex); + } +#endif /*VK_USE_PLATFORM_WIN32_KHR*/ +#ifdef VK_USE_PLATFORM_XCB_KHR + VkBool32 vkGetPhysicalDeviceXcbPresentationSupportKHR( VkPhysicalDevice physicalDevice, uint32_t queueFamilyIndex, xcb_connection_t* connection, xcb_visualid_t visual_id ) const + { + return ::vkGetPhysicalDeviceXcbPresentationSupportKHR( physicalDevice, queueFamilyIndex, connection, visual_id); + } +#endif /*VK_USE_PLATFORM_XCB_KHR*/ +#ifdef VK_USE_PLATFORM_XLIB_KHR + VkBool32 vkGetPhysicalDeviceXlibPresentationSupportKHR( VkPhysicalDevice physicalDevice, uint32_t queueFamilyIndex, Display* dpy, VisualID visualID ) const + { + return ::vkGetPhysicalDeviceXlibPresentationSupportKHR( physicalDevice, queueFamilyIndex, dpy, visualID); + } +#endif /*VK_USE_PLATFORM_XLIB_KHR*/ + VkResult vkGetPipelineCacheData( VkDevice device, VkPipelineCache pipelineCache, size_t* pDataSize, void* pData ) const + { + return ::vkGetPipelineCacheData( device, pipelineCache, pDataSize, pData); + } + VkResult vkGetQueryPoolResults( VkDevice device, VkQueryPool queryPool, uint32_t firstQuery, uint32_t queryCount, size_t dataSize, void* pData, VkDeviceSize stride, VkQueryResultFlags flags ) const + { + return ::vkGetQueryPoolResults( device, queryPool, firstQuery, queryCount, dataSize, pData, stride, flags); + } + void vkGetQueueCheckpointDataNV( VkQueue queue, uint32_t* pCheckpointDataCount, VkCheckpointDataNV* pCheckpointData ) const + { + return ::vkGetQueueCheckpointDataNV( queue, pCheckpointDataCount, pCheckpointData); + } +#ifdef VK_USE_PLATFORM_XLIB_XRANDR_NV + VkResult vkGetRandROutputDisplayEXT( VkPhysicalDevice physicalDevice, Display* dpy, RROutput rrOutput, VkDisplayKHR* pDisplay ) const + { + return ::vkGetRandROutputDisplayEXT( physicalDevice, dpy, rrOutput, pDisplay); + } +#endif /*VK_USE_PLATFORM_XLIB_XRANDR_NV*/ + VkResult vkGetRayTracingShaderGroupHandlesNV( VkDevice device, VkPipeline pipeline, uint32_t firstGroup, uint32_t groupCount, size_t dataSize, void* pData ) const + { + return ::vkGetRayTracingShaderGroupHandlesNV( device, pipeline, firstGroup, groupCount, dataSize, pData); + } + VkResult vkGetRefreshCycleDurationGOOGLE( VkDevice device, VkSwapchainKHR swapchain, VkRefreshCycleDurationGOOGLE* pDisplayTimingProperties ) const + { + return ::vkGetRefreshCycleDurationGOOGLE( device, swapchain, pDisplayTimingProperties); + } + void vkGetRenderAreaGranularity( VkDevice device, VkRenderPass renderPass, VkExtent2D* pGranularity ) const + { + return ::vkGetRenderAreaGranularity( device, renderPass, pGranularity); + } + VkResult vkGetSemaphoreFdKHR( VkDevice device, const VkSemaphoreGetFdInfoKHR* pGetFdInfo, int* pFd ) const + { + return ::vkGetSemaphoreFdKHR( device, pGetFdInfo, pFd); + } +#ifdef VK_USE_PLATFORM_WIN32_KHR + VkResult vkGetSemaphoreWin32HandleKHR( VkDevice device, const VkSemaphoreGetWin32HandleInfoKHR* pGetWin32HandleInfo, HANDLE* pHandle ) const + { + return ::vkGetSemaphoreWin32HandleKHR( device, pGetWin32HandleInfo, pHandle); + } +#endif /*VK_USE_PLATFORM_WIN32_KHR*/ + VkResult vkGetShaderInfoAMD( VkDevice device, VkPipeline pipeline, VkShaderStageFlagBits shaderStage, VkShaderInfoTypeAMD infoType, size_t* pInfoSize, void* pInfo ) const + { + return ::vkGetShaderInfoAMD( device, pipeline, shaderStage, infoType, pInfoSize, pInfo); + } + VkResult vkGetSwapchainCounterEXT( VkDevice device, VkSwapchainKHR swapchain, VkSurfaceCounterFlagBitsEXT counter, uint64_t* pCounterValue ) const + { + return ::vkGetSwapchainCounterEXT( device, swapchain, counter, pCounterValue); + } + VkResult vkGetSwapchainImagesKHR( VkDevice device, VkSwapchainKHR swapchain, uint32_t* pSwapchainImageCount, VkImage* pSwapchainImages ) const + { + return ::vkGetSwapchainImagesKHR( device, swapchain, pSwapchainImageCount, pSwapchainImages); + } + VkResult vkGetSwapchainStatusKHR( VkDevice device, VkSwapchainKHR swapchain ) const + { + return ::vkGetSwapchainStatusKHR( device, swapchain); + } + VkResult vkGetValidationCacheDataEXT( VkDevice device, VkValidationCacheEXT validationCache, size_t* pDataSize, void* pData ) const + { + return ::vkGetValidationCacheDataEXT( device, validationCache, pDataSize, pData); + } + VkResult vkImportFenceFdKHR( VkDevice device, const VkImportFenceFdInfoKHR* pImportFenceFdInfo ) const + { + return ::vkImportFenceFdKHR( device, pImportFenceFdInfo); + } +#ifdef VK_USE_PLATFORM_WIN32_KHR + VkResult vkImportFenceWin32HandleKHR( VkDevice device, const VkImportFenceWin32HandleInfoKHR* pImportFenceWin32HandleInfo ) const + { + return ::vkImportFenceWin32HandleKHR( device, pImportFenceWin32HandleInfo); + } +#endif /*VK_USE_PLATFORM_WIN32_KHR*/ + VkResult vkImportSemaphoreFdKHR( VkDevice device, const VkImportSemaphoreFdInfoKHR* pImportSemaphoreFdInfo ) const + { + return ::vkImportSemaphoreFdKHR( device, pImportSemaphoreFdInfo); + } +#ifdef VK_USE_PLATFORM_WIN32_KHR + VkResult vkImportSemaphoreWin32HandleKHR( VkDevice device, const VkImportSemaphoreWin32HandleInfoKHR* pImportSemaphoreWin32HandleInfo ) const + { + return ::vkImportSemaphoreWin32HandleKHR( device, pImportSemaphoreWin32HandleInfo); + } +#endif /*VK_USE_PLATFORM_WIN32_KHR*/ + VkResult vkInvalidateMappedMemoryRanges( VkDevice device, uint32_t memoryRangeCount, const VkMappedMemoryRange* pMemoryRanges ) const + { + return ::vkInvalidateMappedMemoryRanges( device, memoryRangeCount, pMemoryRanges); + } + VkResult vkMapMemory( VkDevice device, VkDeviceMemory memory, VkDeviceSize offset, VkDeviceSize size, VkMemoryMapFlags flags, void** ppData ) const + { + return ::vkMapMemory( device, memory, offset, size, flags, ppData); + } + VkResult vkMergePipelineCaches( VkDevice device, VkPipelineCache dstCache, uint32_t srcCacheCount, const VkPipelineCache* pSrcCaches ) const + { + return ::vkMergePipelineCaches( device, dstCache, srcCacheCount, pSrcCaches); + } + VkResult vkMergeValidationCachesEXT( VkDevice device, VkValidationCacheEXT dstCache, uint32_t srcCacheCount, const VkValidationCacheEXT* pSrcCaches ) const + { + return ::vkMergeValidationCachesEXT( device, dstCache, srcCacheCount, pSrcCaches); + } + void vkQueueBeginDebugUtilsLabelEXT( VkQueue queue, const VkDebugUtilsLabelEXT* pLabelInfo ) const + { + return ::vkQueueBeginDebugUtilsLabelEXT( queue, pLabelInfo); + } + VkResult vkQueueBindSparse( VkQueue queue, uint32_t bindInfoCount, const VkBindSparseInfo* pBindInfo, VkFence fence ) const + { + return ::vkQueueBindSparse( queue, bindInfoCount, pBindInfo, fence); + } + void vkQueueEndDebugUtilsLabelEXT( VkQueue queue ) const + { + return ::vkQueueEndDebugUtilsLabelEXT( queue); + } + void vkQueueInsertDebugUtilsLabelEXT( VkQueue queue, const VkDebugUtilsLabelEXT* pLabelInfo ) const + { + return ::vkQueueInsertDebugUtilsLabelEXT( queue, pLabelInfo); + } + VkResult vkQueuePresentKHR( VkQueue queue, const VkPresentInfoKHR* pPresentInfo ) const + { + return ::vkQueuePresentKHR( queue, pPresentInfo); + } + VkResult vkQueueSubmit( VkQueue queue, uint32_t submitCount, const VkSubmitInfo* pSubmits, VkFence fence ) const + { + return ::vkQueueSubmit( queue, submitCount, pSubmits, fence); + } + VkResult vkQueueWaitIdle( VkQueue queue ) const + { + return ::vkQueueWaitIdle( queue); + } + VkResult vkRegisterDeviceEventEXT( VkDevice device, const VkDeviceEventInfoEXT* pDeviceEventInfo, const VkAllocationCallbacks* pAllocator, VkFence* pFence ) const + { + return ::vkRegisterDeviceEventEXT( device, pDeviceEventInfo, pAllocator, pFence); + } + VkResult vkRegisterDisplayEventEXT( VkDevice device, VkDisplayKHR display, const VkDisplayEventInfoEXT* pDisplayEventInfo, const VkAllocationCallbacks* pAllocator, VkFence* pFence ) const + { + return ::vkRegisterDisplayEventEXT( device, display, pDisplayEventInfo, pAllocator, pFence); + } + VkResult vkRegisterObjectsNVX( VkDevice device, VkObjectTableNVX objectTable, uint32_t objectCount, const VkObjectTableEntryNVX* const* ppObjectTableEntries, const uint32_t* pObjectIndices ) const + { + return ::vkRegisterObjectsNVX( device, objectTable, objectCount, ppObjectTableEntries, pObjectIndices); + } + VkResult vkReleaseDisplayEXT( VkPhysicalDevice physicalDevice, VkDisplayKHR display ) const + { + return ::vkReleaseDisplayEXT( physicalDevice, display); + } + VkResult vkResetCommandBuffer( VkCommandBuffer commandBuffer, VkCommandBufferResetFlags flags ) const + { + return ::vkResetCommandBuffer( commandBuffer, flags); + } + VkResult vkResetCommandPool( VkDevice device, VkCommandPool commandPool, VkCommandPoolResetFlags flags ) const + { + return ::vkResetCommandPool( device, commandPool, flags); + } + VkResult vkResetDescriptorPool( VkDevice device, VkDescriptorPool descriptorPool, VkDescriptorPoolResetFlags flags ) const + { + return ::vkResetDescriptorPool( device, descriptorPool, flags); + } + VkResult vkResetEvent( VkDevice device, VkEvent event ) const + { + return ::vkResetEvent( device, event); + } + VkResult vkResetFences( VkDevice device, uint32_t fenceCount, const VkFence* pFences ) const + { + return ::vkResetFences( device, fenceCount, pFences); + } + VkResult vkSetDebugUtilsObjectNameEXT( VkDevice device, const VkDebugUtilsObjectNameInfoEXT* pNameInfo ) const + { + return ::vkSetDebugUtilsObjectNameEXT( device, pNameInfo); + } + VkResult vkSetDebugUtilsObjectTagEXT( VkDevice device, const VkDebugUtilsObjectTagInfoEXT* pTagInfo ) const + { + return ::vkSetDebugUtilsObjectTagEXT( device, pTagInfo); + } + VkResult vkSetEvent( VkDevice device, VkEvent event ) const + { + return ::vkSetEvent( device, event); + } + void vkSetHdrMetadataEXT( VkDevice device, uint32_t swapchainCount, const VkSwapchainKHR* pSwapchains, const VkHdrMetadataEXT* pMetadata ) const + { + return ::vkSetHdrMetadataEXT( device, swapchainCount, pSwapchains, pMetadata); + } + void vkSubmitDebugUtilsMessageEXT( VkInstance instance, VkDebugUtilsMessageSeverityFlagBitsEXT messageSeverity, VkDebugUtilsMessageTypeFlagsEXT messageTypes, const VkDebugUtilsMessengerCallbackDataEXT* pCallbackData ) const + { + return ::vkSubmitDebugUtilsMessageEXT( instance, messageSeverity, messageTypes, pCallbackData); + } + void vkTrimCommandPool( VkDevice device, VkCommandPool commandPool, VkCommandPoolTrimFlags flags ) const + { + return ::vkTrimCommandPool( device, commandPool, flags); + } + void vkTrimCommandPoolKHR( VkDevice device, VkCommandPool commandPool, VkCommandPoolTrimFlags flags ) const + { + return ::vkTrimCommandPoolKHR( device, commandPool, flags); + } + void vkUnmapMemory( VkDevice device, VkDeviceMemory memory ) const + { + return ::vkUnmapMemory( device, memory); + } + VkResult vkUnregisterObjectsNVX( VkDevice device, VkObjectTableNVX objectTable, uint32_t objectCount, const VkObjectEntryTypeNVX* pObjectEntryTypes, const uint32_t* pObjectIndices ) const + { + return ::vkUnregisterObjectsNVX( device, objectTable, objectCount, pObjectEntryTypes, pObjectIndices); + } + void vkUpdateDescriptorSetWithTemplate( VkDevice device, VkDescriptorSet descriptorSet, VkDescriptorUpdateTemplate descriptorUpdateTemplate, const void* pData ) const + { + return ::vkUpdateDescriptorSetWithTemplate( device, descriptorSet, descriptorUpdateTemplate, pData); + } + void vkUpdateDescriptorSetWithTemplateKHR( VkDevice device, VkDescriptorSet descriptorSet, VkDescriptorUpdateTemplate descriptorUpdateTemplate, const void* pData ) const + { + return ::vkUpdateDescriptorSetWithTemplateKHR( device, descriptorSet, descriptorUpdateTemplate, pData); + } + void vkUpdateDescriptorSets( VkDevice device, uint32_t descriptorWriteCount, const VkWriteDescriptorSet* pDescriptorWrites, uint32_t descriptorCopyCount, const VkCopyDescriptorSet* pDescriptorCopies ) const + { + return ::vkUpdateDescriptorSets( device, descriptorWriteCount, pDescriptorWrites, descriptorCopyCount, pDescriptorCopies); + } + VkResult vkWaitForFences( VkDevice device, uint32_t fenceCount, const VkFence* pFences, VkBool32 waitAll, uint64_t timeout ) const + { + return ::vkWaitForFences( device, fenceCount, pFences, waitAll, timeout); + } +}; + + struct AllocationCallbacks; + + template + class ObjectDestroy + { + public: + ObjectDestroy( OwnerType owner = OwnerType(), Optional allocationCallbacks = nullptr, Dispatch const &dispatch = Dispatch() ) + : m_owner( owner ) + , m_allocationCallbacks( allocationCallbacks ) + , m_dispatch( &dispatch ) + {} + + OwnerType getOwner() const { return m_owner; } + Optional getAllocator() const { return m_allocationCallbacks; } + + protected: + template + void destroy(T t) + { + m_owner.destroy( t, m_allocationCallbacks, *m_dispatch ); + } + + private: + OwnerType m_owner; + Optional m_allocationCallbacks; + Dispatch const* m_dispatch; + }; + + class NoParent; + + template + class ObjectDestroy + { + public: + ObjectDestroy( Optional allocationCallbacks = nullptr, Dispatch const &dispatch = Dispatch() ) + : m_allocationCallbacks( allocationCallbacks ) + , m_dispatch( &dispatch ) + {} + + Optional getAllocator() const { return m_allocationCallbacks; } + + protected: + template + void destroy(T t) + { + t.destroy( m_allocationCallbacks, *m_dispatch ); + } + + private: + Optional m_allocationCallbacks; + Dispatch const* m_dispatch; + }; + + template + class ObjectFree + { + public: + ObjectFree( OwnerType owner = OwnerType(), Optional allocationCallbacks = nullptr, Dispatch const &dispatch = Dispatch() ) + : m_owner( owner ) + , m_allocationCallbacks( allocationCallbacks ) + , m_dispatch( &dispatch ) + {} + + OwnerType getOwner() const { return m_owner; } + Optional getAllocator() const { return m_allocationCallbacks; } + + protected: + template + void destroy(T t) + { + m_owner.free( t, m_allocationCallbacks, *m_dispatch ); + } + + private: + OwnerType m_owner; + Optional m_allocationCallbacks; + Dispatch const* m_dispatch; + }; + + template + class PoolFree + { + public: + PoolFree( OwnerType owner = OwnerType(), PoolType pool = PoolType(), Dispatch const &dispatch = Dispatch() ) + : m_owner( owner ) + , m_pool( pool ) + , m_dispatch( &dispatch ) + {} + + OwnerType getOwner() const { return m_owner; } + PoolType getPool() const { return m_pool; } + + protected: + template + void destroy(T t) + { + m_owner.free( m_pool, t, *m_dispatch ); + } + + private: + OwnerType m_owner; + PoolType m_pool; + Dispatch const* m_dispatch; + }; + + using SampleMask = uint32_t; + + using Bool32 = uint32_t; + + using DeviceSize = uint64_t; + + enum class FramebufferCreateFlagBits + { + }; + + using FramebufferCreateFlags = Flags; + + enum class QueryPoolCreateFlagBits + { + }; + + using QueryPoolCreateFlags = Flags; + + enum class RenderPassCreateFlagBits + { + }; + + using RenderPassCreateFlags = Flags; + + enum class SamplerCreateFlagBits + { + }; + + using SamplerCreateFlags = Flags; + + enum class PipelineLayoutCreateFlagBits + { + }; + + using PipelineLayoutCreateFlags = Flags; + + enum class PipelineCacheCreateFlagBits + { + }; + + using PipelineCacheCreateFlags = Flags; + + enum class PipelineDepthStencilStateCreateFlagBits + { + }; + + using PipelineDepthStencilStateCreateFlags = Flags; + + enum class PipelineDynamicStateCreateFlagBits + { + }; + + using PipelineDynamicStateCreateFlags = Flags; + + enum class PipelineColorBlendStateCreateFlagBits + { + }; + + using PipelineColorBlendStateCreateFlags = Flags; + + enum class PipelineMultisampleStateCreateFlagBits + { + }; + + using PipelineMultisampleStateCreateFlags = Flags; + + enum class PipelineRasterizationStateCreateFlagBits + { + }; + + using PipelineRasterizationStateCreateFlags = Flags; + + enum class PipelineViewportStateCreateFlagBits + { + }; + + using PipelineViewportStateCreateFlags = Flags; + + enum class PipelineTessellationStateCreateFlagBits + { + }; + + using PipelineTessellationStateCreateFlags = Flags; + + enum class PipelineInputAssemblyStateCreateFlagBits + { + }; + + using PipelineInputAssemblyStateCreateFlags = Flags; + + enum class PipelineVertexInputStateCreateFlagBits + { + }; + + using PipelineVertexInputStateCreateFlags = Flags; + + enum class PipelineShaderStageCreateFlagBits + { + }; + + using PipelineShaderStageCreateFlags = Flags; + + enum class BufferViewCreateFlagBits + { + }; + + using BufferViewCreateFlags = Flags; + + enum class InstanceCreateFlagBits + { + }; + + using InstanceCreateFlags = Flags; + + enum class DeviceCreateFlagBits + { + }; + + using DeviceCreateFlags = Flags; + + enum class ImageViewCreateFlagBits + { + }; + + using ImageViewCreateFlags = Flags; + + enum class SemaphoreCreateFlagBits + { + }; + + using SemaphoreCreateFlags = Flags; + + enum class ShaderModuleCreateFlagBits + { + }; + + using ShaderModuleCreateFlags = Flags; + + enum class EventCreateFlagBits + { + }; + + using EventCreateFlags = Flags; + + enum class MemoryMapFlagBits + { + }; + + using MemoryMapFlags = Flags; + + enum class DescriptorPoolResetFlagBits + { + }; + + using DescriptorPoolResetFlags = Flags; + + enum class DescriptorUpdateTemplateCreateFlagBits + { + }; + + using DescriptorUpdateTemplateCreateFlags = Flags; + + using DescriptorUpdateTemplateCreateFlagsKHR = DescriptorUpdateTemplateCreateFlags; + + enum class DisplayModeCreateFlagBitsKHR + { + }; + + using DisplayModeCreateFlagsKHR = Flags; + + enum class DisplaySurfaceCreateFlagBitsKHR + { + }; + + using DisplaySurfaceCreateFlagsKHR = Flags; + +#ifdef VK_USE_PLATFORM_ANDROID_KHR + enum class AndroidSurfaceCreateFlagBitsKHR + { + }; +#endif /*VK_USE_PLATFORM_ANDROID_KHR*/ + +#ifdef VK_USE_PLATFORM_ANDROID_KHR + using AndroidSurfaceCreateFlagsKHR = Flags; +#endif /*VK_USE_PLATFORM_ANDROID_KHR*/ + +#ifdef VK_USE_PLATFORM_VI_NN + enum class ViSurfaceCreateFlagBitsNN + { + }; +#endif /*VK_USE_PLATFORM_VI_NN*/ + +#ifdef VK_USE_PLATFORM_VI_NN + using ViSurfaceCreateFlagsNN = Flags; +#endif /*VK_USE_PLATFORM_VI_NN*/ + +#ifdef VK_USE_PLATFORM_WAYLAND_KHR + enum class WaylandSurfaceCreateFlagBitsKHR + { + }; +#endif /*VK_USE_PLATFORM_WAYLAND_KHR*/ + +#ifdef VK_USE_PLATFORM_WAYLAND_KHR + using WaylandSurfaceCreateFlagsKHR = Flags; +#endif /*VK_USE_PLATFORM_WAYLAND_KHR*/ + +#ifdef VK_USE_PLATFORM_WIN32_KHR + enum class Win32SurfaceCreateFlagBitsKHR + { + }; +#endif /*VK_USE_PLATFORM_WIN32_KHR*/ + +#ifdef VK_USE_PLATFORM_WIN32_KHR + using Win32SurfaceCreateFlagsKHR = Flags; +#endif /*VK_USE_PLATFORM_WIN32_KHR*/ + +#ifdef VK_USE_PLATFORM_XLIB_KHR + enum class XlibSurfaceCreateFlagBitsKHR + { + }; +#endif /*VK_USE_PLATFORM_XLIB_KHR*/ + +#ifdef VK_USE_PLATFORM_XLIB_KHR + using XlibSurfaceCreateFlagsKHR = Flags; +#endif /*VK_USE_PLATFORM_XLIB_KHR*/ + +#ifdef VK_USE_PLATFORM_XCB_KHR + enum class XcbSurfaceCreateFlagBitsKHR + { + }; +#endif /*VK_USE_PLATFORM_XCB_KHR*/ + +#ifdef VK_USE_PLATFORM_XCB_KHR + using XcbSurfaceCreateFlagsKHR = Flags; +#endif /*VK_USE_PLATFORM_XCB_KHR*/ + +#ifdef VK_USE_PLATFORM_IOS_MVK + enum class IOSSurfaceCreateFlagBitsMVK + { + }; +#endif /*VK_USE_PLATFORM_IOS_MVK*/ + +#ifdef VK_USE_PLATFORM_IOS_MVK + using IOSSurfaceCreateFlagsMVK = Flags; +#endif /*VK_USE_PLATFORM_IOS_MVK*/ + +#ifdef VK_USE_PLATFORM_MACOS_MVK + enum class MacOSSurfaceCreateFlagBitsMVK + { + }; +#endif /*VK_USE_PLATFORM_MACOS_MVK*/ + +#ifdef VK_USE_PLATFORM_MACOS_MVK + using MacOSSurfaceCreateFlagsMVK = Flags; +#endif /*VK_USE_PLATFORM_MACOS_MVK*/ + +#ifdef VK_USE_PLATFORM_FUCHSIA_FUCHSIA + enum class ImagePipeSurfaceCreateFlagBitsFUCHSIA + { + }; +#endif /*VK_USE_PLATFORM_FUCHSIA_FUCHSIA*/ + +#ifdef VK_USE_PLATFORM_FUCHSIA_FUCHSIA + using ImagePipeSurfaceCreateFlagsFUCHSIA = Flags; +#endif /*VK_USE_PLATFORM_FUCHSIA_FUCHSIA*/ + + enum class CommandPoolTrimFlagBits + { + }; + + using CommandPoolTrimFlags = Flags; + + using CommandPoolTrimFlagsKHR = CommandPoolTrimFlags; + + enum class PipelineViewportSwizzleStateCreateFlagBitsNV + { + }; + + using PipelineViewportSwizzleStateCreateFlagsNV = Flags; + + enum class PipelineDiscardRectangleStateCreateFlagBitsEXT + { + }; + + using PipelineDiscardRectangleStateCreateFlagsEXT = Flags; + + enum class PipelineCoverageToColorStateCreateFlagBitsNV + { + }; + + using PipelineCoverageToColorStateCreateFlagsNV = Flags; + + enum class PipelineCoverageModulationStateCreateFlagBitsNV + { + }; + + using PipelineCoverageModulationStateCreateFlagsNV = Flags; + + enum class ValidationCacheCreateFlagBitsEXT + { + }; + + using ValidationCacheCreateFlagsEXT = Flags; + + enum class DebugUtilsMessengerCreateFlagBitsEXT + { + }; + + using DebugUtilsMessengerCreateFlagsEXT = Flags; + + enum class DebugUtilsMessengerCallbackDataFlagBitsEXT + { + }; + + using DebugUtilsMessengerCallbackDataFlagsEXT = Flags; + + enum class PipelineRasterizationConservativeStateCreateFlagBitsEXT + { + }; + + using PipelineRasterizationConservativeStateCreateFlagsEXT = Flags; + + enum class PipelineRasterizationStateStreamCreateFlagBitsEXT + { + }; + + using PipelineRasterizationStateStreamCreateFlagsEXT = Flags; + + class DeviceMemory + { + public: + VULKAN_HPP_CONSTEXPR DeviceMemory() + : m_deviceMemory(VK_NULL_HANDLE) + {} + + VULKAN_HPP_CONSTEXPR DeviceMemory( std::nullptr_t ) + : m_deviceMemory(VK_NULL_HANDLE) + {} + + VULKAN_HPP_TYPESAFE_EXPLICIT DeviceMemory( VkDeviceMemory deviceMemory ) + : m_deviceMemory( deviceMemory ) + {} + +#if defined(VULKAN_HPP_TYPESAFE_CONVERSION) + DeviceMemory & operator=(VkDeviceMemory deviceMemory) + { + m_deviceMemory = deviceMemory; + return *this; + } +#endif + + DeviceMemory & operator=( std::nullptr_t ) + { + m_deviceMemory = VK_NULL_HANDLE; + return *this; + } + + bool operator==( DeviceMemory const & rhs ) const + { + return m_deviceMemory == rhs.m_deviceMemory; + } + + bool operator!=(DeviceMemory const & rhs ) const + { + return m_deviceMemory != rhs.m_deviceMemory; + } + + bool operator<(DeviceMemory const & rhs ) const + { + return m_deviceMemory < rhs.m_deviceMemory; + } + + + + VULKAN_HPP_TYPESAFE_EXPLICIT operator VkDeviceMemory() const + { + return m_deviceMemory; + } + + explicit operator bool() const + { + return m_deviceMemory != VK_NULL_HANDLE; + } + + bool operator!() const + { + return m_deviceMemory == VK_NULL_HANDLE; + } + + private: + VkDeviceMemory m_deviceMemory; + }; + + static_assert( sizeof( DeviceMemory ) == sizeof( VkDeviceMemory ), "handle and wrapper have different size!" ); + + class CommandPool + { + public: + VULKAN_HPP_CONSTEXPR CommandPool() + : m_commandPool(VK_NULL_HANDLE) + {} + + VULKAN_HPP_CONSTEXPR CommandPool( std::nullptr_t ) + : m_commandPool(VK_NULL_HANDLE) + {} + + VULKAN_HPP_TYPESAFE_EXPLICIT CommandPool( VkCommandPool commandPool ) + : m_commandPool( commandPool ) + {} + +#if defined(VULKAN_HPP_TYPESAFE_CONVERSION) + CommandPool & operator=(VkCommandPool commandPool) + { + m_commandPool = commandPool; + return *this; + } +#endif + + CommandPool & operator=( std::nullptr_t ) + { + m_commandPool = VK_NULL_HANDLE; + return *this; + } + + bool operator==( CommandPool const & rhs ) const + { + return m_commandPool == rhs.m_commandPool; + } + + bool operator!=(CommandPool const & rhs ) const + { + return m_commandPool != rhs.m_commandPool; + } + + bool operator<(CommandPool const & rhs ) const + { + return m_commandPool < rhs.m_commandPool; + } + + + + VULKAN_HPP_TYPESAFE_EXPLICIT operator VkCommandPool() const + { + return m_commandPool; + } + + explicit operator bool() const + { + return m_commandPool != VK_NULL_HANDLE; + } + + bool operator!() const + { + return m_commandPool == VK_NULL_HANDLE; + } + + private: + VkCommandPool m_commandPool; + }; + + static_assert( sizeof( CommandPool ) == sizeof( VkCommandPool ), "handle and wrapper have different size!" ); + + class Buffer + { + public: + VULKAN_HPP_CONSTEXPR Buffer() + : m_buffer(VK_NULL_HANDLE) + {} + + VULKAN_HPP_CONSTEXPR Buffer( std::nullptr_t ) + : m_buffer(VK_NULL_HANDLE) + {} + + VULKAN_HPP_TYPESAFE_EXPLICIT Buffer( VkBuffer buffer ) + : m_buffer( buffer ) + {} + +#if defined(VULKAN_HPP_TYPESAFE_CONVERSION) + Buffer & operator=(VkBuffer buffer) + { + m_buffer = buffer; + return *this; + } +#endif + + Buffer & operator=( std::nullptr_t ) + { + m_buffer = VK_NULL_HANDLE; + return *this; + } + + bool operator==( Buffer const & rhs ) const + { + return m_buffer == rhs.m_buffer; + } + + bool operator!=(Buffer const & rhs ) const + { + return m_buffer != rhs.m_buffer; + } + + bool operator<(Buffer const & rhs ) const + { + return m_buffer < rhs.m_buffer; + } + + + + VULKAN_HPP_TYPESAFE_EXPLICIT operator VkBuffer() const + { + return m_buffer; + } + + explicit operator bool() const + { + return m_buffer != VK_NULL_HANDLE; + } + + bool operator!() const + { + return m_buffer == VK_NULL_HANDLE; + } + + private: + VkBuffer m_buffer; + }; + + static_assert( sizeof( Buffer ) == sizeof( VkBuffer ), "handle and wrapper have different size!" ); + + class BufferView + { + public: + VULKAN_HPP_CONSTEXPR BufferView() + : m_bufferView(VK_NULL_HANDLE) + {} + + VULKAN_HPP_CONSTEXPR BufferView( std::nullptr_t ) + : m_bufferView(VK_NULL_HANDLE) + {} + + VULKAN_HPP_TYPESAFE_EXPLICIT BufferView( VkBufferView bufferView ) + : m_bufferView( bufferView ) + {} + +#if defined(VULKAN_HPP_TYPESAFE_CONVERSION) + BufferView & operator=(VkBufferView bufferView) + { + m_bufferView = bufferView; + return *this; + } +#endif + + BufferView & operator=( std::nullptr_t ) + { + m_bufferView = VK_NULL_HANDLE; + return *this; + } + + bool operator==( BufferView const & rhs ) const + { + return m_bufferView == rhs.m_bufferView; + } + + bool operator!=(BufferView const & rhs ) const + { + return m_bufferView != rhs.m_bufferView; + } + + bool operator<(BufferView const & rhs ) const + { + return m_bufferView < rhs.m_bufferView; + } + + + + VULKAN_HPP_TYPESAFE_EXPLICIT operator VkBufferView() const + { + return m_bufferView; + } + + explicit operator bool() const + { + return m_bufferView != VK_NULL_HANDLE; + } + + bool operator!() const + { + return m_bufferView == VK_NULL_HANDLE; + } + + private: + VkBufferView m_bufferView; + }; + + static_assert( sizeof( BufferView ) == sizeof( VkBufferView ), "handle and wrapper have different size!" ); + + class Image + { + public: + VULKAN_HPP_CONSTEXPR Image() + : m_image(VK_NULL_HANDLE) + {} + + VULKAN_HPP_CONSTEXPR Image( std::nullptr_t ) + : m_image(VK_NULL_HANDLE) + {} + + VULKAN_HPP_TYPESAFE_EXPLICIT Image( VkImage image ) + : m_image( image ) + {} + +#if defined(VULKAN_HPP_TYPESAFE_CONVERSION) + Image & operator=(VkImage image) + { + m_image = image; + return *this; + } +#endif + + Image & operator=( std::nullptr_t ) + { + m_image = VK_NULL_HANDLE; + return *this; + } + + bool operator==( Image const & rhs ) const + { + return m_image == rhs.m_image; + } + + bool operator!=(Image const & rhs ) const + { + return m_image != rhs.m_image; + } + + bool operator<(Image const & rhs ) const + { + return m_image < rhs.m_image; + } + + + + VULKAN_HPP_TYPESAFE_EXPLICIT operator VkImage() const + { + return m_image; + } + + explicit operator bool() const + { + return m_image != VK_NULL_HANDLE; + } + + bool operator!() const + { + return m_image == VK_NULL_HANDLE; + } + + private: + VkImage m_image; + }; + + static_assert( sizeof( Image ) == sizeof( VkImage ), "handle and wrapper have different size!" ); + + class ImageView + { + public: + VULKAN_HPP_CONSTEXPR ImageView() + : m_imageView(VK_NULL_HANDLE) + {} + + VULKAN_HPP_CONSTEXPR ImageView( std::nullptr_t ) + : m_imageView(VK_NULL_HANDLE) + {} + + VULKAN_HPP_TYPESAFE_EXPLICIT ImageView( VkImageView imageView ) + : m_imageView( imageView ) + {} + +#if defined(VULKAN_HPP_TYPESAFE_CONVERSION) + ImageView & operator=(VkImageView imageView) + { + m_imageView = imageView; + return *this; + } +#endif + + ImageView & operator=( std::nullptr_t ) + { + m_imageView = VK_NULL_HANDLE; + return *this; + } + + bool operator==( ImageView const & rhs ) const + { + return m_imageView == rhs.m_imageView; + } + + bool operator!=(ImageView const & rhs ) const + { + return m_imageView != rhs.m_imageView; + } + + bool operator<(ImageView const & rhs ) const + { + return m_imageView < rhs.m_imageView; + } + + + + VULKAN_HPP_TYPESAFE_EXPLICIT operator VkImageView() const + { + return m_imageView; + } + + explicit operator bool() const + { + return m_imageView != VK_NULL_HANDLE; + } + + bool operator!() const + { + return m_imageView == VK_NULL_HANDLE; + } + + private: + VkImageView m_imageView; + }; + + static_assert( sizeof( ImageView ) == sizeof( VkImageView ), "handle and wrapper have different size!" ); + + class ShaderModule + { + public: + VULKAN_HPP_CONSTEXPR ShaderModule() + : m_shaderModule(VK_NULL_HANDLE) + {} + + VULKAN_HPP_CONSTEXPR ShaderModule( std::nullptr_t ) + : m_shaderModule(VK_NULL_HANDLE) + {} + + VULKAN_HPP_TYPESAFE_EXPLICIT ShaderModule( VkShaderModule shaderModule ) + : m_shaderModule( shaderModule ) + {} + +#if defined(VULKAN_HPP_TYPESAFE_CONVERSION) + ShaderModule & operator=(VkShaderModule shaderModule) + { + m_shaderModule = shaderModule; + return *this; + } +#endif + + ShaderModule & operator=( std::nullptr_t ) + { + m_shaderModule = VK_NULL_HANDLE; + return *this; + } + + bool operator==( ShaderModule const & rhs ) const + { + return m_shaderModule == rhs.m_shaderModule; + } + + bool operator!=(ShaderModule const & rhs ) const + { + return m_shaderModule != rhs.m_shaderModule; + } + + bool operator<(ShaderModule const & rhs ) const + { + return m_shaderModule < rhs.m_shaderModule; + } + + + + VULKAN_HPP_TYPESAFE_EXPLICIT operator VkShaderModule() const + { + return m_shaderModule; + } + + explicit operator bool() const + { + return m_shaderModule != VK_NULL_HANDLE; + } + + bool operator!() const + { + return m_shaderModule == VK_NULL_HANDLE; + } + + private: + VkShaderModule m_shaderModule; + }; + + static_assert( sizeof( ShaderModule ) == sizeof( VkShaderModule ), "handle and wrapper have different size!" ); + + class Pipeline + { + public: + VULKAN_HPP_CONSTEXPR Pipeline() + : m_pipeline(VK_NULL_HANDLE) + {} + + VULKAN_HPP_CONSTEXPR Pipeline( std::nullptr_t ) + : m_pipeline(VK_NULL_HANDLE) + {} + + VULKAN_HPP_TYPESAFE_EXPLICIT Pipeline( VkPipeline pipeline ) + : m_pipeline( pipeline ) + {} + +#if defined(VULKAN_HPP_TYPESAFE_CONVERSION) + Pipeline & operator=(VkPipeline pipeline) + { + m_pipeline = pipeline; + return *this; + } +#endif + + Pipeline & operator=( std::nullptr_t ) + { + m_pipeline = VK_NULL_HANDLE; + return *this; + } + + bool operator==( Pipeline const & rhs ) const + { + return m_pipeline == rhs.m_pipeline; + } + + bool operator!=(Pipeline const & rhs ) const + { + return m_pipeline != rhs.m_pipeline; + } + + bool operator<(Pipeline const & rhs ) const + { + return m_pipeline < rhs.m_pipeline; + } + + + + VULKAN_HPP_TYPESAFE_EXPLICIT operator VkPipeline() const + { + return m_pipeline; + } + + explicit operator bool() const + { + return m_pipeline != VK_NULL_HANDLE; + } + + bool operator!() const + { + return m_pipeline == VK_NULL_HANDLE; + } + + private: + VkPipeline m_pipeline; + }; + + static_assert( sizeof( Pipeline ) == sizeof( VkPipeline ), "handle and wrapper have different size!" ); + + class PipelineLayout + { + public: + VULKAN_HPP_CONSTEXPR PipelineLayout() + : m_pipelineLayout(VK_NULL_HANDLE) + {} + + VULKAN_HPP_CONSTEXPR PipelineLayout( std::nullptr_t ) + : m_pipelineLayout(VK_NULL_HANDLE) + {} + + VULKAN_HPP_TYPESAFE_EXPLICIT PipelineLayout( VkPipelineLayout pipelineLayout ) + : m_pipelineLayout( pipelineLayout ) + {} + +#if defined(VULKAN_HPP_TYPESAFE_CONVERSION) + PipelineLayout & operator=(VkPipelineLayout pipelineLayout) + { + m_pipelineLayout = pipelineLayout; + return *this; + } +#endif + + PipelineLayout & operator=( std::nullptr_t ) + { + m_pipelineLayout = VK_NULL_HANDLE; + return *this; + } + + bool operator==( PipelineLayout const & rhs ) const + { + return m_pipelineLayout == rhs.m_pipelineLayout; + } + + bool operator!=(PipelineLayout const & rhs ) const + { + return m_pipelineLayout != rhs.m_pipelineLayout; + } + + bool operator<(PipelineLayout const & rhs ) const + { + return m_pipelineLayout < rhs.m_pipelineLayout; + } + + + + VULKAN_HPP_TYPESAFE_EXPLICIT operator VkPipelineLayout() const + { + return m_pipelineLayout; + } + + explicit operator bool() const + { + return m_pipelineLayout != VK_NULL_HANDLE; + } + + bool operator!() const + { + return m_pipelineLayout == VK_NULL_HANDLE; + } + + private: + VkPipelineLayout m_pipelineLayout; + }; + + static_assert( sizeof( PipelineLayout ) == sizeof( VkPipelineLayout ), "handle and wrapper have different size!" ); + + class Sampler + { + public: + VULKAN_HPP_CONSTEXPR Sampler() + : m_sampler(VK_NULL_HANDLE) + {} + + VULKAN_HPP_CONSTEXPR Sampler( std::nullptr_t ) + : m_sampler(VK_NULL_HANDLE) + {} + + VULKAN_HPP_TYPESAFE_EXPLICIT Sampler( VkSampler sampler ) + : m_sampler( sampler ) + {} + +#if defined(VULKAN_HPP_TYPESAFE_CONVERSION) + Sampler & operator=(VkSampler sampler) + { + m_sampler = sampler; + return *this; + } +#endif + + Sampler & operator=( std::nullptr_t ) + { + m_sampler = VK_NULL_HANDLE; + return *this; + } + + bool operator==( Sampler const & rhs ) const + { + return m_sampler == rhs.m_sampler; + } + + bool operator!=(Sampler const & rhs ) const + { + return m_sampler != rhs.m_sampler; + } + + bool operator<(Sampler const & rhs ) const + { + return m_sampler < rhs.m_sampler; + } + + + + VULKAN_HPP_TYPESAFE_EXPLICIT operator VkSampler() const + { + return m_sampler; + } + + explicit operator bool() const + { + return m_sampler != VK_NULL_HANDLE; + } + + bool operator!() const + { + return m_sampler == VK_NULL_HANDLE; + } + + private: + VkSampler m_sampler; + }; + + static_assert( sizeof( Sampler ) == sizeof( VkSampler ), "handle and wrapper have different size!" ); + + class DescriptorSet + { + public: + VULKAN_HPP_CONSTEXPR DescriptorSet() + : m_descriptorSet(VK_NULL_HANDLE) + {} + + VULKAN_HPP_CONSTEXPR DescriptorSet( std::nullptr_t ) + : m_descriptorSet(VK_NULL_HANDLE) + {} + + VULKAN_HPP_TYPESAFE_EXPLICIT DescriptorSet( VkDescriptorSet descriptorSet ) + : m_descriptorSet( descriptorSet ) + {} + +#if defined(VULKAN_HPP_TYPESAFE_CONVERSION) + DescriptorSet & operator=(VkDescriptorSet descriptorSet) + { + m_descriptorSet = descriptorSet; + return *this; + } +#endif + + DescriptorSet & operator=( std::nullptr_t ) + { + m_descriptorSet = VK_NULL_HANDLE; + return *this; + } + + bool operator==( DescriptorSet const & rhs ) const + { + return m_descriptorSet == rhs.m_descriptorSet; + } + + bool operator!=(DescriptorSet const & rhs ) const + { + return m_descriptorSet != rhs.m_descriptorSet; + } + + bool operator<(DescriptorSet const & rhs ) const + { + return m_descriptorSet < rhs.m_descriptorSet; + } + + + + VULKAN_HPP_TYPESAFE_EXPLICIT operator VkDescriptorSet() const + { + return m_descriptorSet; + } + + explicit operator bool() const + { + return m_descriptorSet != VK_NULL_HANDLE; + } + + bool operator!() const + { + return m_descriptorSet == VK_NULL_HANDLE; + } + + private: + VkDescriptorSet m_descriptorSet; + }; + + static_assert( sizeof( DescriptorSet ) == sizeof( VkDescriptorSet ), "handle and wrapper have different size!" ); + + class DescriptorSetLayout + { + public: + VULKAN_HPP_CONSTEXPR DescriptorSetLayout() + : m_descriptorSetLayout(VK_NULL_HANDLE) + {} + + VULKAN_HPP_CONSTEXPR DescriptorSetLayout( std::nullptr_t ) + : m_descriptorSetLayout(VK_NULL_HANDLE) + {} + + VULKAN_HPP_TYPESAFE_EXPLICIT DescriptorSetLayout( VkDescriptorSetLayout descriptorSetLayout ) + : m_descriptorSetLayout( descriptorSetLayout ) + {} + +#if defined(VULKAN_HPP_TYPESAFE_CONVERSION) + DescriptorSetLayout & operator=(VkDescriptorSetLayout descriptorSetLayout) + { + m_descriptorSetLayout = descriptorSetLayout; + return *this; + } +#endif + + DescriptorSetLayout & operator=( std::nullptr_t ) + { + m_descriptorSetLayout = VK_NULL_HANDLE; + return *this; + } + + bool operator==( DescriptorSetLayout const & rhs ) const + { + return m_descriptorSetLayout == rhs.m_descriptorSetLayout; + } + + bool operator!=(DescriptorSetLayout const & rhs ) const + { + return m_descriptorSetLayout != rhs.m_descriptorSetLayout; + } + + bool operator<(DescriptorSetLayout const & rhs ) const + { + return m_descriptorSetLayout < rhs.m_descriptorSetLayout; + } + + + + VULKAN_HPP_TYPESAFE_EXPLICIT operator VkDescriptorSetLayout() const + { + return m_descriptorSetLayout; + } + + explicit operator bool() const + { + return m_descriptorSetLayout != VK_NULL_HANDLE; + } + + bool operator!() const + { + return m_descriptorSetLayout == VK_NULL_HANDLE; + } + + private: + VkDescriptorSetLayout m_descriptorSetLayout; + }; + + static_assert( sizeof( DescriptorSetLayout ) == sizeof( VkDescriptorSetLayout ), "handle and wrapper have different size!" ); + + class DescriptorPool + { + public: + VULKAN_HPP_CONSTEXPR DescriptorPool() + : m_descriptorPool(VK_NULL_HANDLE) + {} + + VULKAN_HPP_CONSTEXPR DescriptorPool( std::nullptr_t ) + : m_descriptorPool(VK_NULL_HANDLE) + {} + + VULKAN_HPP_TYPESAFE_EXPLICIT DescriptorPool( VkDescriptorPool descriptorPool ) + : m_descriptorPool( descriptorPool ) + {} + +#if defined(VULKAN_HPP_TYPESAFE_CONVERSION) + DescriptorPool & operator=(VkDescriptorPool descriptorPool) + { + m_descriptorPool = descriptorPool; + return *this; + } +#endif + + DescriptorPool & operator=( std::nullptr_t ) + { + m_descriptorPool = VK_NULL_HANDLE; + return *this; + } + + bool operator==( DescriptorPool const & rhs ) const + { + return m_descriptorPool == rhs.m_descriptorPool; + } + + bool operator!=(DescriptorPool const & rhs ) const + { + return m_descriptorPool != rhs.m_descriptorPool; + } + + bool operator<(DescriptorPool const & rhs ) const + { + return m_descriptorPool < rhs.m_descriptorPool; + } + + + + VULKAN_HPP_TYPESAFE_EXPLICIT operator VkDescriptorPool() const + { + return m_descriptorPool; + } + + explicit operator bool() const + { + return m_descriptorPool != VK_NULL_HANDLE; + } + + bool operator!() const + { + return m_descriptorPool == VK_NULL_HANDLE; + } + + private: + VkDescriptorPool m_descriptorPool; + }; + + static_assert( sizeof( DescriptorPool ) == sizeof( VkDescriptorPool ), "handle and wrapper have different size!" ); + + class Fence + { + public: + VULKAN_HPP_CONSTEXPR Fence() + : m_fence(VK_NULL_HANDLE) + {} + + VULKAN_HPP_CONSTEXPR Fence( std::nullptr_t ) + : m_fence(VK_NULL_HANDLE) + {} + + VULKAN_HPP_TYPESAFE_EXPLICIT Fence( VkFence fence ) + : m_fence( fence ) + {} + +#if defined(VULKAN_HPP_TYPESAFE_CONVERSION) + Fence & operator=(VkFence fence) + { + m_fence = fence; + return *this; + } +#endif + + Fence & operator=( std::nullptr_t ) + { + m_fence = VK_NULL_HANDLE; + return *this; + } + + bool operator==( Fence const & rhs ) const + { + return m_fence == rhs.m_fence; + } + + bool operator!=(Fence const & rhs ) const + { + return m_fence != rhs.m_fence; + } + + bool operator<(Fence const & rhs ) const + { + return m_fence < rhs.m_fence; + } + + + + VULKAN_HPP_TYPESAFE_EXPLICIT operator VkFence() const + { + return m_fence; + } + + explicit operator bool() const + { + return m_fence != VK_NULL_HANDLE; + } + + bool operator!() const + { + return m_fence == VK_NULL_HANDLE; + } + + private: + VkFence m_fence; + }; + + static_assert( sizeof( Fence ) == sizeof( VkFence ), "handle and wrapper have different size!" ); + + class Semaphore + { + public: + VULKAN_HPP_CONSTEXPR Semaphore() + : m_semaphore(VK_NULL_HANDLE) + {} + + VULKAN_HPP_CONSTEXPR Semaphore( std::nullptr_t ) + : m_semaphore(VK_NULL_HANDLE) + {} + + VULKAN_HPP_TYPESAFE_EXPLICIT Semaphore( VkSemaphore semaphore ) + : m_semaphore( semaphore ) + {} + +#if defined(VULKAN_HPP_TYPESAFE_CONVERSION) + Semaphore & operator=(VkSemaphore semaphore) + { + m_semaphore = semaphore; + return *this; + } +#endif + + Semaphore & operator=( std::nullptr_t ) + { + m_semaphore = VK_NULL_HANDLE; + return *this; + } + + bool operator==( Semaphore const & rhs ) const + { + return m_semaphore == rhs.m_semaphore; + } + + bool operator!=(Semaphore const & rhs ) const + { + return m_semaphore != rhs.m_semaphore; + } + + bool operator<(Semaphore const & rhs ) const + { + return m_semaphore < rhs.m_semaphore; + } + + + + VULKAN_HPP_TYPESAFE_EXPLICIT operator VkSemaphore() const + { + return m_semaphore; + } + + explicit operator bool() const + { + return m_semaphore != VK_NULL_HANDLE; + } + + bool operator!() const + { + return m_semaphore == VK_NULL_HANDLE; + } + + private: + VkSemaphore m_semaphore; + }; + + static_assert( sizeof( Semaphore ) == sizeof( VkSemaphore ), "handle and wrapper have different size!" ); + + class Event + { + public: + VULKAN_HPP_CONSTEXPR Event() + : m_event(VK_NULL_HANDLE) + {} + + VULKAN_HPP_CONSTEXPR Event( std::nullptr_t ) + : m_event(VK_NULL_HANDLE) + {} + + VULKAN_HPP_TYPESAFE_EXPLICIT Event( VkEvent event ) + : m_event( event ) + {} + +#if defined(VULKAN_HPP_TYPESAFE_CONVERSION) + Event & operator=(VkEvent event) + { + m_event = event; + return *this; + } +#endif + + Event & operator=( std::nullptr_t ) + { + m_event = VK_NULL_HANDLE; + return *this; + } + + bool operator==( Event const & rhs ) const + { + return m_event == rhs.m_event; + } + + bool operator!=(Event const & rhs ) const + { + return m_event != rhs.m_event; + } + + bool operator<(Event const & rhs ) const + { + return m_event < rhs.m_event; + } + + + + VULKAN_HPP_TYPESAFE_EXPLICIT operator VkEvent() const + { + return m_event; + } + + explicit operator bool() const + { + return m_event != VK_NULL_HANDLE; + } + + bool operator!() const + { + return m_event == VK_NULL_HANDLE; + } + + private: + VkEvent m_event; + }; + + static_assert( sizeof( Event ) == sizeof( VkEvent ), "handle and wrapper have different size!" ); + + class QueryPool + { + public: + VULKAN_HPP_CONSTEXPR QueryPool() + : m_queryPool(VK_NULL_HANDLE) + {} + + VULKAN_HPP_CONSTEXPR QueryPool( std::nullptr_t ) + : m_queryPool(VK_NULL_HANDLE) + {} + + VULKAN_HPP_TYPESAFE_EXPLICIT QueryPool( VkQueryPool queryPool ) + : m_queryPool( queryPool ) + {} + +#if defined(VULKAN_HPP_TYPESAFE_CONVERSION) + QueryPool & operator=(VkQueryPool queryPool) + { + m_queryPool = queryPool; + return *this; + } +#endif + + QueryPool & operator=( std::nullptr_t ) + { + m_queryPool = VK_NULL_HANDLE; + return *this; + } + + bool operator==( QueryPool const & rhs ) const + { + return m_queryPool == rhs.m_queryPool; + } + + bool operator!=(QueryPool const & rhs ) const + { + return m_queryPool != rhs.m_queryPool; + } + + bool operator<(QueryPool const & rhs ) const + { + return m_queryPool < rhs.m_queryPool; + } + + + + VULKAN_HPP_TYPESAFE_EXPLICIT operator VkQueryPool() const + { + return m_queryPool; + } + + explicit operator bool() const + { + return m_queryPool != VK_NULL_HANDLE; + } + + bool operator!() const + { + return m_queryPool == VK_NULL_HANDLE; + } + + private: + VkQueryPool m_queryPool; + }; + + static_assert( sizeof( QueryPool ) == sizeof( VkQueryPool ), "handle and wrapper have different size!" ); + + class Framebuffer + { + public: + VULKAN_HPP_CONSTEXPR Framebuffer() + : m_framebuffer(VK_NULL_HANDLE) + {} + + VULKAN_HPP_CONSTEXPR Framebuffer( std::nullptr_t ) + : m_framebuffer(VK_NULL_HANDLE) + {} + + VULKAN_HPP_TYPESAFE_EXPLICIT Framebuffer( VkFramebuffer framebuffer ) + : m_framebuffer( framebuffer ) + {} + +#if defined(VULKAN_HPP_TYPESAFE_CONVERSION) + Framebuffer & operator=(VkFramebuffer framebuffer) + { + m_framebuffer = framebuffer; + return *this; + } +#endif + + Framebuffer & operator=( std::nullptr_t ) + { + m_framebuffer = VK_NULL_HANDLE; + return *this; + } + + bool operator==( Framebuffer const & rhs ) const + { + return m_framebuffer == rhs.m_framebuffer; + } + + bool operator!=(Framebuffer const & rhs ) const + { + return m_framebuffer != rhs.m_framebuffer; + } + + bool operator<(Framebuffer const & rhs ) const + { + return m_framebuffer < rhs.m_framebuffer; + } + + + + VULKAN_HPP_TYPESAFE_EXPLICIT operator VkFramebuffer() const + { + return m_framebuffer; + } + + explicit operator bool() const + { + return m_framebuffer != VK_NULL_HANDLE; + } + + bool operator!() const + { + return m_framebuffer == VK_NULL_HANDLE; + } + + private: + VkFramebuffer m_framebuffer; + }; + + static_assert( sizeof( Framebuffer ) == sizeof( VkFramebuffer ), "handle and wrapper have different size!" ); + + class RenderPass + { + public: + VULKAN_HPP_CONSTEXPR RenderPass() + : m_renderPass(VK_NULL_HANDLE) + {} + + VULKAN_HPP_CONSTEXPR RenderPass( std::nullptr_t ) + : m_renderPass(VK_NULL_HANDLE) + {} + + VULKAN_HPP_TYPESAFE_EXPLICIT RenderPass( VkRenderPass renderPass ) + : m_renderPass( renderPass ) + {} + +#if defined(VULKAN_HPP_TYPESAFE_CONVERSION) + RenderPass & operator=(VkRenderPass renderPass) + { + m_renderPass = renderPass; + return *this; + } +#endif + + RenderPass & operator=( std::nullptr_t ) + { + m_renderPass = VK_NULL_HANDLE; + return *this; + } + + bool operator==( RenderPass const & rhs ) const + { + return m_renderPass == rhs.m_renderPass; + } + + bool operator!=(RenderPass const & rhs ) const + { + return m_renderPass != rhs.m_renderPass; + } + + bool operator<(RenderPass const & rhs ) const + { + return m_renderPass < rhs.m_renderPass; + } + + + + VULKAN_HPP_TYPESAFE_EXPLICIT operator VkRenderPass() const + { + return m_renderPass; + } + + explicit operator bool() const + { + return m_renderPass != VK_NULL_HANDLE; + } + + bool operator!() const + { + return m_renderPass == VK_NULL_HANDLE; + } + + private: + VkRenderPass m_renderPass; + }; + + static_assert( sizeof( RenderPass ) == sizeof( VkRenderPass ), "handle and wrapper have different size!" ); + + class PipelineCache + { + public: + VULKAN_HPP_CONSTEXPR PipelineCache() + : m_pipelineCache(VK_NULL_HANDLE) + {} + + VULKAN_HPP_CONSTEXPR PipelineCache( std::nullptr_t ) + : m_pipelineCache(VK_NULL_HANDLE) + {} + + VULKAN_HPP_TYPESAFE_EXPLICIT PipelineCache( VkPipelineCache pipelineCache ) + : m_pipelineCache( pipelineCache ) + {} + +#if defined(VULKAN_HPP_TYPESAFE_CONVERSION) + PipelineCache & operator=(VkPipelineCache pipelineCache) + { + m_pipelineCache = pipelineCache; + return *this; + } +#endif + + PipelineCache & operator=( std::nullptr_t ) + { + m_pipelineCache = VK_NULL_HANDLE; + return *this; + } + + bool operator==( PipelineCache const & rhs ) const + { + return m_pipelineCache == rhs.m_pipelineCache; + } + + bool operator!=(PipelineCache const & rhs ) const + { + return m_pipelineCache != rhs.m_pipelineCache; + } + + bool operator<(PipelineCache const & rhs ) const + { + return m_pipelineCache < rhs.m_pipelineCache; + } + + + + VULKAN_HPP_TYPESAFE_EXPLICIT operator VkPipelineCache() const + { + return m_pipelineCache; + } + + explicit operator bool() const + { + return m_pipelineCache != VK_NULL_HANDLE; + } + + bool operator!() const + { + return m_pipelineCache == VK_NULL_HANDLE; + } + + private: + VkPipelineCache m_pipelineCache; + }; + + static_assert( sizeof( PipelineCache ) == sizeof( VkPipelineCache ), "handle and wrapper have different size!" ); + + class ObjectTableNVX + { + public: + VULKAN_HPP_CONSTEXPR ObjectTableNVX() + : m_objectTableNVX(VK_NULL_HANDLE) + {} + + VULKAN_HPP_CONSTEXPR ObjectTableNVX( std::nullptr_t ) + : m_objectTableNVX(VK_NULL_HANDLE) + {} + + VULKAN_HPP_TYPESAFE_EXPLICIT ObjectTableNVX( VkObjectTableNVX objectTableNVX ) + : m_objectTableNVX( objectTableNVX ) + {} + +#if defined(VULKAN_HPP_TYPESAFE_CONVERSION) + ObjectTableNVX & operator=(VkObjectTableNVX objectTableNVX) + { + m_objectTableNVX = objectTableNVX; + return *this; + } +#endif + + ObjectTableNVX & operator=( std::nullptr_t ) + { + m_objectTableNVX = VK_NULL_HANDLE; + return *this; + } + + bool operator==( ObjectTableNVX const & rhs ) const + { + return m_objectTableNVX == rhs.m_objectTableNVX; + } + + bool operator!=(ObjectTableNVX const & rhs ) const + { + return m_objectTableNVX != rhs.m_objectTableNVX; + } + + bool operator<(ObjectTableNVX const & rhs ) const + { + return m_objectTableNVX < rhs.m_objectTableNVX; + } + + + + VULKAN_HPP_TYPESAFE_EXPLICIT operator VkObjectTableNVX() const + { + return m_objectTableNVX; + } + + explicit operator bool() const + { + return m_objectTableNVX != VK_NULL_HANDLE; + } + + bool operator!() const + { + return m_objectTableNVX == VK_NULL_HANDLE; + } + + private: + VkObjectTableNVX m_objectTableNVX; + }; + + static_assert( sizeof( ObjectTableNVX ) == sizeof( VkObjectTableNVX ), "handle and wrapper have different size!" ); + + class IndirectCommandsLayoutNVX + { + public: + VULKAN_HPP_CONSTEXPR IndirectCommandsLayoutNVX() + : m_indirectCommandsLayoutNVX(VK_NULL_HANDLE) + {} + + VULKAN_HPP_CONSTEXPR IndirectCommandsLayoutNVX( std::nullptr_t ) + : m_indirectCommandsLayoutNVX(VK_NULL_HANDLE) + {} + + VULKAN_HPP_TYPESAFE_EXPLICIT IndirectCommandsLayoutNVX( VkIndirectCommandsLayoutNVX indirectCommandsLayoutNVX ) + : m_indirectCommandsLayoutNVX( indirectCommandsLayoutNVX ) + {} + +#if defined(VULKAN_HPP_TYPESAFE_CONVERSION) + IndirectCommandsLayoutNVX & operator=(VkIndirectCommandsLayoutNVX indirectCommandsLayoutNVX) + { + m_indirectCommandsLayoutNVX = indirectCommandsLayoutNVX; + return *this; + } +#endif + + IndirectCommandsLayoutNVX & operator=( std::nullptr_t ) + { + m_indirectCommandsLayoutNVX = VK_NULL_HANDLE; + return *this; + } + + bool operator==( IndirectCommandsLayoutNVX const & rhs ) const + { + return m_indirectCommandsLayoutNVX == rhs.m_indirectCommandsLayoutNVX; + } + + bool operator!=(IndirectCommandsLayoutNVX const & rhs ) const + { + return m_indirectCommandsLayoutNVX != rhs.m_indirectCommandsLayoutNVX; + } + + bool operator<(IndirectCommandsLayoutNVX const & rhs ) const + { + return m_indirectCommandsLayoutNVX < rhs.m_indirectCommandsLayoutNVX; + } + + + + VULKAN_HPP_TYPESAFE_EXPLICIT operator VkIndirectCommandsLayoutNVX() const + { + return m_indirectCommandsLayoutNVX; + } + + explicit operator bool() const + { + return m_indirectCommandsLayoutNVX != VK_NULL_HANDLE; + } + + bool operator!() const + { + return m_indirectCommandsLayoutNVX == VK_NULL_HANDLE; + } + + private: + VkIndirectCommandsLayoutNVX m_indirectCommandsLayoutNVX; + }; + + static_assert( sizeof( IndirectCommandsLayoutNVX ) == sizeof( VkIndirectCommandsLayoutNVX ), "handle and wrapper have different size!" ); + + class DescriptorUpdateTemplate + { + public: + VULKAN_HPP_CONSTEXPR DescriptorUpdateTemplate() + : m_descriptorUpdateTemplate(VK_NULL_HANDLE) + {} + + VULKAN_HPP_CONSTEXPR DescriptorUpdateTemplate( std::nullptr_t ) + : m_descriptorUpdateTemplate(VK_NULL_HANDLE) + {} + + VULKAN_HPP_TYPESAFE_EXPLICIT DescriptorUpdateTemplate( VkDescriptorUpdateTemplate descriptorUpdateTemplate ) + : m_descriptorUpdateTemplate( descriptorUpdateTemplate ) + {} + +#if defined(VULKAN_HPP_TYPESAFE_CONVERSION) + DescriptorUpdateTemplate & operator=(VkDescriptorUpdateTemplate descriptorUpdateTemplate) + { + m_descriptorUpdateTemplate = descriptorUpdateTemplate; + return *this; + } +#endif + + DescriptorUpdateTemplate & operator=( std::nullptr_t ) + { + m_descriptorUpdateTemplate = VK_NULL_HANDLE; + return *this; + } + + bool operator==( DescriptorUpdateTemplate const & rhs ) const + { + return m_descriptorUpdateTemplate == rhs.m_descriptorUpdateTemplate; + } + + bool operator!=(DescriptorUpdateTemplate const & rhs ) const + { + return m_descriptorUpdateTemplate != rhs.m_descriptorUpdateTemplate; + } + + bool operator<(DescriptorUpdateTemplate const & rhs ) const + { + return m_descriptorUpdateTemplate < rhs.m_descriptorUpdateTemplate; + } + + + + VULKAN_HPP_TYPESAFE_EXPLICIT operator VkDescriptorUpdateTemplate() const + { + return m_descriptorUpdateTemplate; + } + + explicit operator bool() const + { + return m_descriptorUpdateTemplate != VK_NULL_HANDLE; + } + + bool operator!() const + { + return m_descriptorUpdateTemplate == VK_NULL_HANDLE; + } + + private: + VkDescriptorUpdateTemplate m_descriptorUpdateTemplate; + }; + + static_assert( sizeof( DescriptorUpdateTemplate ) == sizeof( VkDescriptorUpdateTemplate ), "handle and wrapper have different size!" ); + + using DescriptorUpdateTemplateKHR = DescriptorUpdateTemplate; + + class SamplerYcbcrConversion + { + public: + VULKAN_HPP_CONSTEXPR SamplerYcbcrConversion() + : m_samplerYcbcrConversion(VK_NULL_HANDLE) + {} + + VULKAN_HPP_CONSTEXPR SamplerYcbcrConversion( std::nullptr_t ) + : m_samplerYcbcrConversion(VK_NULL_HANDLE) + {} + + VULKAN_HPP_TYPESAFE_EXPLICIT SamplerYcbcrConversion( VkSamplerYcbcrConversion samplerYcbcrConversion ) + : m_samplerYcbcrConversion( samplerYcbcrConversion ) + {} + +#if defined(VULKAN_HPP_TYPESAFE_CONVERSION) + SamplerYcbcrConversion & operator=(VkSamplerYcbcrConversion samplerYcbcrConversion) + { + m_samplerYcbcrConversion = samplerYcbcrConversion; + return *this; + } +#endif + + SamplerYcbcrConversion & operator=( std::nullptr_t ) + { + m_samplerYcbcrConversion = VK_NULL_HANDLE; + return *this; + } + + bool operator==( SamplerYcbcrConversion const & rhs ) const + { + return m_samplerYcbcrConversion == rhs.m_samplerYcbcrConversion; + } + + bool operator!=(SamplerYcbcrConversion const & rhs ) const + { + return m_samplerYcbcrConversion != rhs.m_samplerYcbcrConversion; + } + + bool operator<(SamplerYcbcrConversion const & rhs ) const + { + return m_samplerYcbcrConversion < rhs.m_samplerYcbcrConversion; + } + + + + VULKAN_HPP_TYPESAFE_EXPLICIT operator VkSamplerYcbcrConversion() const + { + return m_samplerYcbcrConversion; + } + + explicit operator bool() const + { + return m_samplerYcbcrConversion != VK_NULL_HANDLE; + } + + bool operator!() const + { + return m_samplerYcbcrConversion == VK_NULL_HANDLE; + } + + private: + VkSamplerYcbcrConversion m_samplerYcbcrConversion; + }; + + static_assert( sizeof( SamplerYcbcrConversion ) == sizeof( VkSamplerYcbcrConversion ), "handle and wrapper have different size!" ); + + using SamplerYcbcrConversionKHR = SamplerYcbcrConversion; + + class ValidationCacheEXT + { + public: + VULKAN_HPP_CONSTEXPR ValidationCacheEXT() + : m_validationCacheEXT(VK_NULL_HANDLE) + {} + + VULKAN_HPP_CONSTEXPR ValidationCacheEXT( std::nullptr_t ) + : m_validationCacheEXT(VK_NULL_HANDLE) + {} + + VULKAN_HPP_TYPESAFE_EXPLICIT ValidationCacheEXT( VkValidationCacheEXT validationCacheEXT ) + : m_validationCacheEXT( validationCacheEXT ) + {} + +#if defined(VULKAN_HPP_TYPESAFE_CONVERSION) + ValidationCacheEXT & operator=(VkValidationCacheEXT validationCacheEXT) + { + m_validationCacheEXT = validationCacheEXT; + return *this; + } +#endif + + ValidationCacheEXT & operator=( std::nullptr_t ) + { + m_validationCacheEXT = VK_NULL_HANDLE; + return *this; + } + + bool operator==( ValidationCacheEXT const & rhs ) const + { + return m_validationCacheEXT == rhs.m_validationCacheEXT; + } + + bool operator!=(ValidationCacheEXT const & rhs ) const + { + return m_validationCacheEXT != rhs.m_validationCacheEXT; + } + + bool operator<(ValidationCacheEXT const & rhs ) const + { + return m_validationCacheEXT < rhs.m_validationCacheEXT; + } + + + + VULKAN_HPP_TYPESAFE_EXPLICIT operator VkValidationCacheEXT() const + { + return m_validationCacheEXT; + } + + explicit operator bool() const + { + return m_validationCacheEXT != VK_NULL_HANDLE; + } + + bool operator!() const + { + return m_validationCacheEXT == VK_NULL_HANDLE; + } + + private: + VkValidationCacheEXT m_validationCacheEXT; + }; + + static_assert( sizeof( ValidationCacheEXT ) == sizeof( VkValidationCacheEXT ), "handle and wrapper have different size!" ); + + class AccelerationStructureNV + { + public: + VULKAN_HPP_CONSTEXPR AccelerationStructureNV() + : m_accelerationStructureNV(VK_NULL_HANDLE) + {} + + VULKAN_HPP_CONSTEXPR AccelerationStructureNV( std::nullptr_t ) + : m_accelerationStructureNV(VK_NULL_HANDLE) + {} + + VULKAN_HPP_TYPESAFE_EXPLICIT AccelerationStructureNV( VkAccelerationStructureNV accelerationStructureNV ) + : m_accelerationStructureNV( accelerationStructureNV ) + {} + +#if defined(VULKAN_HPP_TYPESAFE_CONVERSION) + AccelerationStructureNV & operator=(VkAccelerationStructureNV accelerationStructureNV) + { + m_accelerationStructureNV = accelerationStructureNV; + return *this; + } +#endif + + AccelerationStructureNV & operator=( std::nullptr_t ) + { + m_accelerationStructureNV = VK_NULL_HANDLE; + return *this; + } + + bool operator==( AccelerationStructureNV const & rhs ) const + { + return m_accelerationStructureNV == rhs.m_accelerationStructureNV; + } + + bool operator!=(AccelerationStructureNV const & rhs ) const + { + return m_accelerationStructureNV != rhs.m_accelerationStructureNV; + } + + bool operator<(AccelerationStructureNV const & rhs ) const + { + return m_accelerationStructureNV < rhs.m_accelerationStructureNV; + } + + + + VULKAN_HPP_TYPESAFE_EXPLICIT operator VkAccelerationStructureNV() const + { + return m_accelerationStructureNV; + } + + explicit operator bool() const + { + return m_accelerationStructureNV != VK_NULL_HANDLE; + } + + bool operator!() const + { + return m_accelerationStructureNV == VK_NULL_HANDLE; + } + + private: + VkAccelerationStructureNV m_accelerationStructureNV; + }; + + static_assert( sizeof( AccelerationStructureNV ) == sizeof( VkAccelerationStructureNV ), "handle and wrapper have different size!" ); + + class DisplayKHR + { + public: + VULKAN_HPP_CONSTEXPR DisplayKHR() + : m_displayKHR(VK_NULL_HANDLE) + {} + + VULKAN_HPP_CONSTEXPR DisplayKHR( std::nullptr_t ) + : m_displayKHR(VK_NULL_HANDLE) + {} + + VULKAN_HPP_TYPESAFE_EXPLICIT DisplayKHR( VkDisplayKHR displayKHR ) + : m_displayKHR( displayKHR ) + {} + +#if defined(VULKAN_HPP_TYPESAFE_CONVERSION) + DisplayKHR & operator=(VkDisplayKHR displayKHR) + { + m_displayKHR = displayKHR; + return *this; + } +#endif + + DisplayKHR & operator=( std::nullptr_t ) + { + m_displayKHR = VK_NULL_HANDLE; + return *this; + } + + bool operator==( DisplayKHR const & rhs ) const + { + return m_displayKHR == rhs.m_displayKHR; + } + + bool operator!=(DisplayKHR const & rhs ) const + { + return m_displayKHR != rhs.m_displayKHR; + } + + bool operator<(DisplayKHR const & rhs ) const + { + return m_displayKHR < rhs.m_displayKHR; + } + + + + VULKAN_HPP_TYPESAFE_EXPLICIT operator VkDisplayKHR() const + { + return m_displayKHR; + } + + explicit operator bool() const + { + return m_displayKHR != VK_NULL_HANDLE; + } + + bool operator!() const + { + return m_displayKHR == VK_NULL_HANDLE; + } + + private: + VkDisplayKHR m_displayKHR; + }; + + static_assert( sizeof( DisplayKHR ) == sizeof( VkDisplayKHR ), "handle and wrapper have different size!" ); + + class DisplayModeKHR + { + public: + VULKAN_HPP_CONSTEXPR DisplayModeKHR() + : m_displayModeKHR(VK_NULL_HANDLE) + {} + + VULKAN_HPP_CONSTEXPR DisplayModeKHR( std::nullptr_t ) + : m_displayModeKHR(VK_NULL_HANDLE) + {} + + VULKAN_HPP_TYPESAFE_EXPLICIT DisplayModeKHR( VkDisplayModeKHR displayModeKHR ) + : m_displayModeKHR( displayModeKHR ) + {} + +#if defined(VULKAN_HPP_TYPESAFE_CONVERSION) + DisplayModeKHR & operator=(VkDisplayModeKHR displayModeKHR) + { + m_displayModeKHR = displayModeKHR; + return *this; + } +#endif + + DisplayModeKHR & operator=( std::nullptr_t ) + { + m_displayModeKHR = VK_NULL_HANDLE; + return *this; + } + + bool operator==( DisplayModeKHR const & rhs ) const + { + return m_displayModeKHR == rhs.m_displayModeKHR; + } + + bool operator!=(DisplayModeKHR const & rhs ) const + { + return m_displayModeKHR != rhs.m_displayModeKHR; + } + + bool operator<(DisplayModeKHR const & rhs ) const + { + return m_displayModeKHR < rhs.m_displayModeKHR; + } + + + + VULKAN_HPP_TYPESAFE_EXPLICIT operator VkDisplayModeKHR() const + { + return m_displayModeKHR; + } + + explicit operator bool() const + { + return m_displayModeKHR != VK_NULL_HANDLE; + } + + bool operator!() const + { + return m_displayModeKHR == VK_NULL_HANDLE; + } + + private: + VkDisplayModeKHR m_displayModeKHR; + }; + + static_assert( sizeof( DisplayModeKHR ) == sizeof( VkDisplayModeKHR ), "handle and wrapper have different size!" ); + + class SurfaceKHR + { + public: + VULKAN_HPP_CONSTEXPR SurfaceKHR() + : m_surfaceKHR(VK_NULL_HANDLE) + {} + + VULKAN_HPP_CONSTEXPR SurfaceKHR( std::nullptr_t ) + : m_surfaceKHR(VK_NULL_HANDLE) + {} + + VULKAN_HPP_TYPESAFE_EXPLICIT SurfaceKHR( VkSurfaceKHR surfaceKHR ) + : m_surfaceKHR( surfaceKHR ) + {} + +#if defined(VULKAN_HPP_TYPESAFE_CONVERSION) + SurfaceKHR & operator=(VkSurfaceKHR surfaceKHR) + { + m_surfaceKHR = surfaceKHR; + return *this; + } +#endif + + SurfaceKHR & operator=( std::nullptr_t ) + { + m_surfaceKHR = VK_NULL_HANDLE; + return *this; + } + + bool operator==( SurfaceKHR const & rhs ) const + { + return m_surfaceKHR == rhs.m_surfaceKHR; + } + + bool operator!=(SurfaceKHR const & rhs ) const + { + return m_surfaceKHR != rhs.m_surfaceKHR; + } + + bool operator<(SurfaceKHR const & rhs ) const + { + return m_surfaceKHR < rhs.m_surfaceKHR; + } + + + + VULKAN_HPP_TYPESAFE_EXPLICIT operator VkSurfaceKHR() const + { + return m_surfaceKHR; + } + + explicit operator bool() const + { + return m_surfaceKHR != VK_NULL_HANDLE; + } + + bool operator!() const + { + return m_surfaceKHR == VK_NULL_HANDLE; + } + + private: + VkSurfaceKHR m_surfaceKHR; + }; + + static_assert( sizeof( SurfaceKHR ) == sizeof( VkSurfaceKHR ), "handle and wrapper have different size!" ); + + class SwapchainKHR + { + public: + VULKAN_HPP_CONSTEXPR SwapchainKHR() + : m_swapchainKHR(VK_NULL_HANDLE) + {} + + VULKAN_HPP_CONSTEXPR SwapchainKHR( std::nullptr_t ) + : m_swapchainKHR(VK_NULL_HANDLE) + {} + + VULKAN_HPP_TYPESAFE_EXPLICIT SwapchainKHR( VkSwapchainKHR swapchainKHR ) + : m_swapchainKHR( swapchainKHR ) + {} + +#if defined(VULKAN_HPP_TYPESAFE_CONVERSION) + SwapchainKHR & operator=(VkSwapchainKHR swapchainKHR) + { + m_swapchainKHR = swapchainKHR; + return *this; + } +#endif + + SwapchainKHR & operator=( std::nullptr_t ) + { + m_swapchainKHR = VK_NULL_HANDLE; + return *this; + } + + bool operator==( SwapchainKHR const & rhs ) const + { + return m_swapchainKHR == rhs.m_swapchainKHR; + } + + bool operator!=(SwapchainKHR const & rhs ) const + { + return m_swapchainKHR != rhs.m_swapchainKHR; + } + + bool operator<(SwapchainKHR const & rhs ) const + { + return m_swapchainKHR < rhs.m_swapchainKHR; + } + + + + VULKAN_HPP_TYPESAFE_EXPLICIT operator VkSwapchainKHR() const + { + return m_swapchainKHR; + } + + explicit operator bool() const + { + return m_swapchainKHR != VK_NULL_HANDLE; + } + + bool operator!() const + { + return m_swapchainKHR == VK_NULL_HANDLE; + } + + private: + VkSwapchainKHR m_swapchainKHR; + }; + + static_assert( sizeof( SwapchainKHR ) == sizeof( VkSwapchainKHR ), "handle and wrapper have different size!" ); + + class DebugReportCallbackEXT + { + public: + VULKAN_HPP_CONSTEXPR DebugReportCallbackEXT() + : m_debugReportCallbackEXT(VK_NULL_HANDLE) + {} + + VULKAN_HPP_CONSTEXPR DebugReportCallbackEXT( std::nullptr_t ) + : m_debugReportCallbackEXT(VK_NULL_HANDLE) + {} + + VULKAN_HPP_TYPESAFE_EXPLICIT DebugReportCallbackEXT( VkDebugReportCallbackEXT debugReportCallbackEXT ) + : m_debugReportCallbackEXT( debugReportCallbackEXT ) + {} + +#if defined(VULKAN_HPP_TYPESAFE_CONVERSION) + DebugReportCallbackEXT & operator=(VkDebugReportCallbackEXT debugReportCallbackEXT) + { + m_debugReportCallbackEXT = debugReportCallbackEXT; + return *this; + } +#endif + + DebugReportCallbackEXT & operator=( std::nullptr_t ) + { + m_debugReportCallbackEXT = VK_NULL_HANDLE; + return *this; + } + + bool operator==( DebugReportCallbackEXT const & rhs ) const + { + return m_debugReportCallbackEXT == rhs.m_debugReportCallbackEXT; + } + + bool operator!=(DebugReportCallbackEXT const & rhs ) const + { + return m_debugReportCallbackEXT != rhs.m_debugReportCallbackEXT; + } + + bool operator<(DebugReportCallbackEXT const & rhs ) const + { + return m_debugReportCallbackEXT < rhs.m_debugReportCallbackEXT; + } + + + + VULKAN_HPP_TYPESAFE_EXPLICIT operator VkDebugReportCallbackEXT() const + { + return m_debugReportCallbackEXT; + } + + explicit operator bool() const + { + return m_debugReportCallbackEXT != VK_NULL_HANDLE; + } + + bool operator!() const + { + return m_debugReportCallbackEXT == VK_NULL_HANDLE; + } + + private: + VkDebugReportCallbackEXT m_debugReportCallbackEXT; + }; + + static_assert( sizeof( DebugReportCallbackEXT ) == sizeof( VkDebugReportCallbackEXT ), "handle and wrapper have different size!" ); + + class DebugUtilsMessengerEXT + { + public: + VULKAN_HPP_CONSTEXPR DebugUtilsMessengerEXT() + : m_debugUtilsMessengerEXT(VK_NULL_HANDLE) + {} + + VULKAN_HPP_CONSTEXPR DebugUtilsMessengerEXT( std::nullptr_t ) + : m_debugUtilsMessengerEXT(VK_NULL_HANDLE) + {} + + VULKAN_HPP_TYPESAFE_EXPLICIT DebugUtilsMessengerEXT( VkDebugUtilsMessengerEXT debugUtilsMessengerEXT ) + : m_debugUtilsMessengerEXT( debugUtilsMessengerEXT ) + {} + +#if defined(VULKAN_HPP_TYPESAFE_CONVERSION) + DebugUtilsMessengerEXT & operator=(VkDebugUtilsMessengerEXT debugUtilsMessengerEXT) + { + m_debugUtilsMessengerEXT = debugUtilsMessengerEXT; + return *this; + } +#endif + + DebugUtilsMessengerEXT & operator=( std::nullptr_t ) + { + m_debugUtilsMessengerEXT = VK_NULL_HANDLE; + return *this; + } + + bool operator==( DebugUtilsMessengerEXT const & rhs ) const + { + return m_debugUtilsMessengerEXT == rhs.m_debugUtilsMessengerEXT; + } + + bool operator!=(DebugUtilsMessengerEXT const & rhs ) const + { + return m_debugUtilsMessengerEXT != rhs.m_debugUtilsMessengerEXT; + } + + bool operator<(DebugUtilsMessengerEXT const & rhs ) const + { + return m_debugUtilsMessengerEXT < rhs.m_debugUtilsMessengerEXT; + } + + + + VULKAN_HPP_TYPESAFE_EXPLICIT operator VkDebugUtilsMessengerEXT() const + { + return m_debugUtilsMessengerEXT; + } + + explicit operator bool() const + { + return m_debugUtilsMessengerEXT != VK_NULL_HANDLE; + } + + bool operator!() const + { + return m_debugUtilsMessengerEXT == VK_NULL_HANDLE; + } + + private: + VkDebugUtilsMessengerEXT m_debugUtilsMessengerEXT; + }; + + static_assert( sizeof( DebugUtilsMessengerEXT ) == sizeof( VkDebugUtilsMessengerEXT ), "handle and wrapper have different size!" ); + + struct Offset2D + { + Offset2D( int32_t x_ = 0, + int32_t y_ = 0 ) + : x( x_ ) + , y( y_ ) + { + } + + Offset2D( VkOffset2D const & rhs ) + { + memcpy( this, &rhs, sizeof( Offset2D ) ); + } + + Offset2D& operator=( VkOffset2D const & rhs ) + { + memcpy( this, &rhs, sizeof( Offset2D ) ); + return *this; + } + Offset2D& setX( int32_t x_ ) + { + x = x_; + return *this; + } + + Offset2D& setY( int32_t y_ ) + { + y = y_; + return *this; + } + + operator VkOffset2D const&() const + { + return *reinterpret_cast(this); + } + + operator VkOffset2D &() + { + return *reinterpret_cast(this); + } + + bool operator==( Offset2D const& rhs ) const + { + return ( x == rhs.x ) + && ( y == rhs.y ); + } + + bool operator!=( Offset2D const& rhs ) const + { + return !operator==( rhs ); + } + + int32_t x; + int32_t y; + }; + static_assert( sizeof( Offset2D ) == sizeof( VkOffset2D ), "struct and wrapper have different size!" ); + + struct Offset3D + { + Offset3D( int32_t x_ = 0, + int32_t y_ = 0, + int32_t z_ = 0 ) + : x( x_ ) + , y( y_ ) + , z( z_ ) + { + } + + explicit Offset3D( Offset2D const& offset2D, + int32_t z_ = 0 ) + : x( offset2D.x ) + , y( offset2D.y ) + , z( z_ ) + {} + + Offset3D( VkOffset3D const & rhs ) + { + memcpy( this, &rhs, sizeof( Offset3D ) ); + } + + Offset3D& operator=( VkOffset3D const & rhs ) + { + memcpy( this, &rhs, sizeof( Offset3D ) ); + return *this; + } + Offset3D& setX( int32_t x_ ) + { + x = x_; + return *this; + } + + Offset3D& setY( int32_t y_ ) + { + y = y_; + return *this; + } + + Offset3D& setZ( int32_t z_ ) + { + z = z_; + return *this; + } + + operator VkOffset3D const&() const + { + return *reinterpret_cast(this); + } + + operator VkOffset3D &() + { + return *reinterpret_cast(this); + } + + bool operator==( Offset3D const& rhs ) const + { + return ( x == rhs.x ) + && ( y == rhs.y ) + && ( z == rhs.z ); + } + + bool operator!=( Offset3D const& rhs ) const + { + return !operator==( rhs ); + } + + int32_t x; + int32_t y; + int32_t z; + }; + static_assert( sizeof( Offset3D ) == sizeof( VkOffset3D ), "struct and wrapper have different size!" ); + + struct Extent2D + { + Extent2D( uint32_t width_ = 0, + uint32_t height_ = 0 ) + : width( width_ ) + , height( height_ ) + { + } + + Extent2D( VkExtent2D const & rhs ) + { + memcpy( this, &rhs, sizeof( Extent2D ) ); + } + + Extent2D& operator=( VkExtent2D const & rhs ) + { + memcpy( this, &rhs, sizeof( Extent2D ) ); + return *this; + } + Extent2D& setWidth( uint32_t width_ ) + { + width = width_; + return *this; + } + + Extent2D& setHeight( uint32_t height_ ) + { + height = height_; + return *this; + } + + operator VkExtent2D const&() const + { + return *reinterpret_cast(this); + } + + operator VkExtent2D &() + { + return *reinterpret_cast(this); + } + + bool operator==( Extent2D const& rhs ) const + { + return ( width == rhs.width ) + && ( height == rhs.height ); + } + + bool operator!=( Extent2D const& rhs ) const + { + return !operator==( rhs ); + } + + uint32_t width; + uint32_t height; + }; + static_assert( sizeof( Extent2D ) == sizeof( VkExtent2D ), "struct and wrapper have different size!" ); + + struct Extent3D + { + Extent3D( uint32_t width_ = 0, + uint32_t height_ = 0, + uint32_t depth_ = 0 ) + : width( width_ ) + , height( height_ ) + , depth( depth_ ) + { + } + + explicit Extent3D( Extent2D const& extent2D, + uint32_t depth_ = 0 ) + : width( extent2D.width ) + , height( extent2D.height ) + , depth( depth_ ) + {} + + Extent3D( VkExtent3D const & rhs ) + { + memcpy( this, &rhs, sizeof( Extent3D ) ); + } + + Extent3D& operator=( VkExtent3D const & rhs ) + { + memcpy( this, &rhs, sizeof( Extent3D ) ); + return *this; + } + Extent3D& setWidth( uint32_t width_ ) + { + width = width_; + return *this; + } + + Extent3D& setHeight( uint32_t height_ ) + { + height = height_; + return *this; + } + + Extent3D& setDepth( uint32_t depth_ ) + { + depth = depth_; + return *this; + } + + operator VkExtent3D const&() const + { + return *reinterpret_cast(this); + } + + operator VkExtent3D &() + { + return *reinterpret_cast(this); + } + + bool operator==( Extent3D const& rhs ) const + { + return ( width == rhs.width ) + && ( height == rhs.height ) + && ( depth == rhs.depth ); + } + + bool operator!=( Extent3D const& rhs ) const + { + return !operator==( rhs ); + } + + uint32_t width; + uint32_t height; + uint32_t depth; + }; + static_assert( sizeof( Extent3D ) == sizeof( VkExtent3D ), "struct and wrapper have different size!" ); + + struct Viewport + { + Viewport( float x_ = 0, + float y_ = 0, + float width_ = 0, + float height_ = 0, + float minDepth_ = 0, + float maxDepth_ = 0 ) + : x( x_ ) + , y( y_ ) + , width( width_ ) + , height( height_ ) + , minDepth( minDepth_ ) + , maxDepth( maxDepth_ ) + { + } + + Viewport( VkViewport const & rhs ) + { + memcpy( this, &rhs, sizeof( Viewport ) ); + } + + Viewport& operator=( VkViewport const & rhs ) + { + memcpy( this, &rhs, sizeof( Viewport ) ); + return *this; + } + Viewport& setX( float x_ ) + { + x = x_; + return *this; + } + + Viewport& setY( float y_ ) + { + y = y_; + return *this; + } + + Viewport& setWidth( float width_ ) + { + width = width_; + return *this; + } + + Viewport& setHeight( float height_ ) + { + height = height_; + return *this; + } + + Viewport& setMinDepth( float minDepth_ ) + { + minDepth = minDepth_; + return *this; + } + + Viewport& setMaxDepth( float maxDepth_ ) + { + maxDepth = maxDepth_; + return *this; + } + + operator VkViewport const&() const + { + return *reinterpret_cast(this); + } + + operator VkViewport &() + { + return *reinterpret_cast(this); + } + + bool operator==( Viewport const& rhs ) const + { + return ( x == rhs.x ) + && ( y == rhs.y ) + && ( width == rhs.width ) + && ( height == rhs.height ) + && ( minDepth == rhs.minDepth ) + && ( maxDepth == rhs.maxDepth ); + } + + bool operator!=( Viewport const& rhs ) const + { + return !operator==( rhs ); + } + + float x; + float y; + float width; + float height; + float minDepth; + float maxDepth; + }; + static_assert( sizeof( Viewport ) == sizeof( VkViewport ), "struct and wrapper have different size!" ); + + struct Rect2D + { + Rect2D( Offset2D offset_ = Offset2D(), + Extent2D extent_ = Extent2D() ) + : offset( offset_ ) + , extent( extent_ ) + { + } + + Rect2D( VkRect2D const & rhs ) + { + memcpy( this, &rhs, sizeof( Rect2D ) ); + } + + Rect2D& operator=( VkRect2D const & rhs ) + { + memcpy( this, &rhs, sizeof( Rect2D ) ); + return *this; + } + Rect2D& setOffset( Offset2D offset_ ) + { + offset = offset_; + return *this; + } + + Rect2D& setExtent( Extent2D extent_ ) + { + extent = extent_; + return *this; + } + + operator VkRect2D const&() const + { + return *reinterpret_cast(this); + } + + operator VkRect2D &() + { + return *reinterpret_cast(this); + } + + bool operator==( Rect2D const& rhs ) const + { + return ( offset == rhs.offset ) + && ( extent == rhs.extent ); + } + + bool operator!=( Rect2D const& rhs ) const + { + return !operator==( rhs ); + } + + Offset2D offset; + Extent2D extent; + }; + static_assert( sizeof( Rect2D ) == sizeof( VkRect2D ), "struct and wrapper have different size!" ); + + struct ClearRect + { + ClearRect( Rect2D rect_ = Rect2D(), + uint32_t baseArrayLayer_ = 0, + uint32_t layerCount_ = 0 ) + : rect( rect_ ) + , baseArrayLayer( baseArrayLayer_ ) + , layerCount( layerCount_ ) + { + } + + ClearRect( VkClearRect const & rhs ) + { + memcpy( this, &rhs, sizeof( ClearRect ) ); + } + + ClearRect& operator=( VkClearRect const & rhs ) + { + memcpy( this, &rhs, sizeof( ClearRect ) ); + return *this; + } + ClearRect& setRect( Rect2D rect_ ) + { + rect = rect_; + return *this; + } + + ClearRect& setBaseArrayLayer( uint32_t baseArrayLayer_ ) + { + baseArrayLayer = baseArrayLayer_; + return *this; + } + + ClearRect& setLayerCount( uint32_t layerCount_ ) + { + layerCount = layerCount_; + return *this; + } + + operator VkClearRect const&() const + { + return *reinterpret_cast(this); + } + + operator VkClearRect &() + { + return *reinterpret_cast(this); + } + + bool operator==( ClearRect const& rhs ) const + { + return ( rect == rhs.rect ) + && ( baseArrayLayer == rhs.baseArrayLayer ) + && ( layerCount == rhs.layerCount ); + } + + bool operator!=( ClearRect const& rhs ) const + { + return !operator==( rhs ); + } + + Rect2D rect; + uint32_t baseArrayLayer; + uint32_t layerCount; + }; + static_assert( sizeof( ClearRect ) == sizeof( VkClearRect ), "struct and wrapper have different size!" ); + + struct ExtensionProperties + { + operator VkExtensionProperties const&() const + { + return *reinterpret_cast(this); + } + + operator VkExtensionProperties &() + { + return *reinterpret_cast(this); + } + + bool operator==( ExtensionProperties const& rhs ) const + { + return ( memcmp( extensionName, rhs.extensionName, VK_MAX_EXTENSION_NAME_SIZE * sizeof( char ) ) == 0 ) + && ( specVersion == rhs.specVersion ); + } + + bool operator!=( ExtensionProperties const& rhs ) const + { + return !operator==( rhs ); + } + + char extensionName[VK_MAX_EXTENSION_NAME_SIZE]; + uint32_t specVersion; + }; + static_assert( sizeof( ExtensionProperties ) == sizeof( VkExtensionProperties ), "struct and wrapper have different size!" ); + + struct LayerProperties + { + operator VkLayerProperties const&() const + { + return *reinterpret_cast(this); + } + + operator VkLayerProperties &() + { + return *reinterpret_cast(this); + } + + bool operator==( LayerProperties const& rhs ) const + { + return ( memcmp( layerName, rhs.layerName, VK_MAX_EXTENSION_NAME_SIZE * sizeof( char ) ) == 0 ) + && ( specVersion == rhs.specVersion ) + && ( implementationVersion == rhs.implementationVersion ) + && ( memcmp( description, rhs.description, VK_MAX_DESCRIPTION_SIZE * sizeof( char ) ) == 0 ); + } + + bool operator!=( LayerProperties const& rhs ) const + { + return !operator==( rhs ); + } + + char layerName[VK_MAX_EXTENSION_NAME_SIZE]; + uint32_t specVersion; + uint32_t implementationVersion; + char description[VK_MAX_DESCRIPTION_SIZE]; + }; + static_assert( sizeof( LayerProperties ) == sizeof( VkLayerProperties ), "struct and wrapper have different size!" ); + + struct AllocationCallbacks + { + AllocationCallbacks( void* pUserData_ = nullptr, + PFN_vkAllocationFunction pfnAllocation_ = nullptr, + PFN_vkReallocationFunction pfnReallocation_ = nullptr, + PFN_vkFreeFunction pfnFree_ = nullptr, + PFN_vkInternalAllocationNotification pfnInternalAllocation_ = nullptr, + PFN_vkInternalFreeNotification pfnInternalFree_ = nullptr ) + : pUserData( pUserData_ ) + , pfnAllocation( pfnAllocation_ ) + , pfnReallocation( pfnReallocation_ ) + , pfnFree( pfnFree_ ) + , pfnInternalAllocation( pfnInternalAllocation_ ) + , pfnInternalFree( pfnInternalFree_ ) + { + } + + AllocationCallbacks( VkAllocationCallbacks const & rhs ) + { + memcpy( this, &rhs, sizeof( AllocationCallbacks ) ); + } + + AllocationCallbacks& operator=( VkAllocationCallbacks const & rhs ) + { + memcpy( this, &rhs, sizeof( AllocationCallbacks ) ); + return *this; + } + AllocationCallbacks& setPUserData( void* pUserData_ ) + { + pUserData = pUserData_; + return *this; + } + + AllocationCallbacks& setPfnAllocation( PFN_vkAllocationFunction pfnAllocation_ ) + { + pfnAllocation = pfnAllocation_; + return *this; + } + + AllocationCallbacks& setPfnReallocation( PFN_vkReallocationFunction pfnReallocation_ ) + { + pfnReallocation = pfnReallocation_; + return *this; + } + + AllocationCallbacks& setPfnFree( PFN_vkFreeFunction pfnFree_ ) + { + pfnFree = pfnFree_; + return *this; + } + + AllocationCallbacks& setPfnInternalAllocation( PFN_vkInternalAllocationNotification pfnInternalAllocation_ ) + { + pfnInternalAllocation = pfnInternalAllocation_; + return *this; + } + + AllocationCallbacks& setPfnInternalFree( PFN_vkInternalFreeNotification pfnInternalFree_ ) + { + pfnInternalFree = pfnInternalFree_; + return *this; + } + + operator VkAllocationCallbacks const&() const + { + return *reinterpret_cast(this); + } + + operator VkAllocationCallbacks &() + { + return *reinterpret_cast(this); + } + + bool operator==( AllocationCallbacks const& rhs ) const + { + return ( pUserData == rhs.pUserData ) + && ( pfnAllocation == rhs.pfnAllocation ) + && ( pfnReallocation == rhs.pfnReallocation ) + && ( pfnFree == rhs.pfnFree ) + && ( pfnInternalAllocation == rhs.pfnInternalAllocation ) + && ( pfnInternalFree == rhs.pfnInternalFree ); + } + + bool operator!=( AllocationCallbacks const& rhs ) const + { + return !operator==( rhs ); + } + + void* pUserData; + PFN_vkAllocationFunction pfnAllocation; + PFN_vkReallocationFunction pfnReallocation; + PFN_vkFreeFunction pfnFree; + PFN_vkInternalAllocationNotification pfnInternalAllocation; + PFN_vkInternalFreeNotification pfnInternalFree; + }; + static_assert( sizeof( AllocationCallbacks ) == sizeof( VkAllocationCallbacks ), "struct and wrapper have different size!" ); + + struct MemoryRequirements + { + operator VkMemoryRequirements const&() const + { + return *reinterpret_cast(this); + } + + operator VkMemoryRequirements &() + { + return *reinterpret_cast(this); + } + + bool operator==( MemoryRequirements const& rhs ) const + { + return ( size == rhs.size ) + && ( alignment == rhs.alignment ) + && ( memoryTypeBits == rhs.memoryTypeBits ); + } + + bool operator!=( MemoryRequirements const& rhs ) const + { + return !operator==( rhs ); + } + + DeviceSize size; + DeviceSize alignment; + uint32_t memoryTypeBits; + }; + static_assert( sizeof( MemoryRequirements ) == sizeof( VkMemoryRequirements ), "struct and wrapper have different size!" ); + + struct DescriptorBufferInfo + { + DescriptorBufferInfo( Buffer buffer_ = Buffer(), + DeviceSize offset_ = 0, + DeviceSize range_ = 0 ) + : buffer( buffer_ ) + , offset( offset_ ) + , range( range_ ) + { + } + + DescriptorBufferInfo( VkDescriptorBufferInfo const & rhs ) + { + memcpy( this, &rhs, sizeof( DescriptorBufferInfo ) ); + } + + DescriptorBufferInfo& operator=( VkDescriptorBufferInfo const & rhs ) + { + memcpy( this, &rhs, sizeof( DescriptorBufferInfo ) ); + return *this; + } + DescriptorBufferInfo& setBuffer( Buffer buffer_ ) + { + buffer = buffer_; + return *this; + } + + DescriptorBufferInfo& setOffset( DeviceSize offset_ ) + { + offset = offset_; + return *this; + } + + DescriptorBufferInfo& setRange( DeviceSize range_ ) + { + range = range_; + return *this; + } + + operator VkDescriptorBufferInfo const&() const + { + return *reinterpret_cast(this); + } + + operator VkDescriptorBufferInfo &() + { + return *reinterpret_cast(this); + } + + bool operator==( DescriptorBufferInfo const& rhs ) const + { + return ( buffer == rhs.buffer ) + && ( offset == rhs.offset ) + && ( range == rhs.range ); + } + + bool operator!=( DescriptorBufferInfo const& rhs ) const + { + return !operator==( rhs ); + } + + Buffer buffer; + DeviceSize offset; + DeviceSize range; + }; + static_assert( sizeof( DescriptorBufferInfo ) == sizeof( VkDescriptorBufferInfo ), "struct and wrapper have different size!" ); + + struct SubresourceLayout + { + operator VkSubresourceLayout const&() const + { + return *reinterpret_cast(this); + } + + operator VkSubresourceLayout &() + { + return *reinterpret_cast(this); + } + + bool operator==( SubresourceLayout const& rhs ) const + { + return ( offset == rhs.offset ) + && ( size == rhs.size ) + && ( rowPitch == rhs.rowPitch ) + && ( arrayPitch == rhs.arrayPitch ) + && ( depthPitch == rhs.depthPitch ); + } + + bool operator!=( SubresourceLayout const& rhs ) const + { + return !operator==( rhs ); + } + + DeviceSize offset; + DeviceSize size; + DeviceSize rowPitch; + DeviceSize arrayPitch; + DeviceSize depthPitch; + }; + static_assert( sizeof( SubresourceLayout ) == sizeof( VkSubresourceLayout ), "struct and wrapper have different size!" ); + + struct BufferCopy + { + BufferCopy( DeviceSize srcOffset_ = 0, + DeviceSize dstOffset_ = 0, + DeviceSize size_ = 0 ) + : srcOffset( srcOffset_ ) + , dstOffset( dstOffset_ ) + , size( size_ ) + { + } + + BufferCopy( VkBufferCopy const & rhs ) + { + memcpy( this, &rhs, sizeof( BufferCopy ) ); + } + + BufferCopy& operator=( VkBufferCopy const & rhs ) + { + memcpy( this, &rhs, sizeof( BufferCopy ) ); + return *this; + } + BufferCopy& setSrcOffset( DeviceSize srcOffset_ ) + { + srcOffset = srcOffset_; + return *this; + } + + BufferCopy& setDstOffset( DeviceSize dstOffset_ ) + { + dstOffset = dstOffset_; + return *this; + } + + BufferCopy& setSize( DeviceSize size_ ) + { + size = size_; + return *this; + } + + operator VkBufferCopy const&() const + { + return *reinterpret_cast(this); + } + + operator VkBufferCopy &() + { + return *reinterpret_cast(this); + } + + bool operator==( BufferCopy const& rhs ) const + { + return ( srcOffset == rhs.srcOffset ) + && ( dstOffset == rhs.dstOffset ) + && ( size == rhs.size ); + } + + bool operator!=( BufferCopy const& rhs ) const + { + return !operator==( rhs ); + } + + DeviceSize srcOffset; + DeviceSize dstOffset; + DeviceSize size; + }; + static_assert( sizeof( BufferCopy ) == sizeof( VkBufferCopy ), "struct and wrapper have different size!" ); + + struct SpecializationMapEntry + { + SpecializationMapEntry( uint32_t constantID_ = 0, + uint32_t offset_ = 0, + size_t size_ = 0 ) + : constantID( constantID_ ) + , offset( offset_ ) + , size( size_ ) + { + } + + SpecializationMapEntry( VkSpecializationMapEntry const & rhs ) + { + memcpy( this, &rhs, sizeof( SpecializationMapEntry ) ); + } + + SpecializationMapEntry& operator=( VkSpecializationMapEntry const & rhs ) + { + memcpy( this, &rhs, sizeof( SpecializationMapEntry ) ); + return *this; + } + SpecializationMapEntry& setConstantID( uint32_t constantID_ ) + { + constantID = constantID_; + return *this; + } + + SpecializationMapEntry& setOffset( uint32_t offset_ ) + { + offset = offset_; + return *this; + } + + SpecializationMapEntry& setSize( size_t size_ ) + { + size = size_; + return *this; + } + + operator VkSpecializationMapEntry const&() const + { + return *reinterpret_cast(this); + } + + operator VkSpecializationMapEntry &() + { + return *reinterpret_cast(this); + } + + bool operator==( SpecializationMapEntry const& rhs ) const + { + return ( constantID == rhs.constantID ) + && ( offset == rhs.offset ) + && ( size == rhs.size ); + } + + bool operator!=( SpecializationMapEntry const& rhs ) const + { + return !operator==( rhs ); + } + + uint32_t constantID; + uint32_t offset; + size_t size; + }; + static_assert( sizeof( SpecializationMapEntry ) == sizeof( VkSpecializationMapEntry ), "struct and wrapper have different size!" ); + + struct SpecializationInfo + { + SpecializationInfo( uint32_t mapEntryCount_ = 0, + const SpecializationMapEntry* pMapEntries_ = nullptr, + size_t dataSize_ = 0, + const void* pData_ = nullptr ) + : mapEntryCount( mapEntryCount_ ) + , pMapEntries( pMapEntries_ ) + , dataSize( dataSize_ ) + , pData( pData_ ) + { + } + + SpecializationInfo( VkSpecializationInfo const & rhs ) + { + memcpy( this, &rhs, sizeof( SpecializationInfo ) ); + } + + SpecializationInfo& operator=( VkSpecializationInfo const & rhs ) + { + memcpy( this, &rhs, sizeof( SpecializationInfo ) ); + return *this; + } + SpecializationInfo& setMapEntryCount( uint32_t mapEntryCount_ ) + { + mapEntryCount = mapEntryCount_; + return *this; + } + + SpecializationInfo& setPMapEntries( const SpecializationMapEntry* pMapEntries_ ) + { + pMapEntries = pMapEntries_; + return *this; + } + + SpecializationInfo& setDataSize( size_t dataSize_ ) + { + dataSize = dataSize_; + return *this; + } + + SpecializationInfo& setPData( const void* pData_ ) + { + pData = pData_; + return *this; + } + + operator VkSpecializationInfo const&() const + { + return *reinterpret_cast(this); + } + + operator VkSpecializationInfo &() + { + return *reinterpret_cast(this); + } + + bool operator==( SpecializationInfo const& rhs ) const + { + return ( mapEntryCount == rhs.mapEntryCount ) + && ( pMapEntries == rhs.pMapEntries ) + && ( dataSize == rhs.dataSize ) + && ( pData == rhs.pData ); + } + + bool operator!=( SpecializationInfo const& rhs ) const + { + return !operator==( rhs ); + } + + uint32_t mapEntryCount; + const SpecializationMapEntry* pMapEntries; + size_t dataSize; + const void* pData; + }; + static_assert( sizeof( SpecializationInfo ) == sizeof( VkSpecializationInfo ), "struct and wrapper have different size!" ); + + union ClearColorValue + { + ClearColorValue( const std::array& float32_ = { {0} } ) + { + memcpy( &float32, float32_.data(), 4 * sizeof( float ) ); + } + + ClearColorValue( const std::array& int32_ ) + { + memcpy( &int32, int32_.data(), 4 * sizeof( int32_t ) ); + } + + ClearColorValue( const std::array& uint32_ ) + { + memcpy( &uint32, uint32_.data(), 4 * sizeof( uint32_t ) ); + } + + ClearColorValue& setFloat32( std::array float32_ ) + { + memcpy( &float32, float32_.data(), 4 * sizeof( float ) ); + return *this; + } + + ClearColorValue& setInt32( std::array int32_ ) + { + memcpy( &int32, int32_.data(), 4 * sizeof( int32_t ) ); + return *this; + } + + ClearColorValue& setUint32( std::array uint32_ ) + { + memcpy( &uint32, uint32_.data(), 4 * sizeof( uint32_t ) ); + return *this; + } + + operator VkClearColorValue const&() const + { + return *reinterpret_cast(this); + } + + operator VkClearColorValue &() + { + return *reinterpret_cast(this); + } + + float float32[4]; + int32_t int32[4]; + uint32_t uint32[4]; + }; + + struct ClearDepthStencilValue + { + ClearDepthStencilValue( float depth_ = 0, + uint32_t stencil_ = 0 ) + : depth( depth_ ) + , stencil( stencil_ ) + { + } + + ClearDepthStencilValue( VkClearDepthStencilValue const & rhs ) + { + memcpy( this, &rhs, sizeof( ClearDepthStencilValue ) ); + } + + ClearDepthStencilValue& operator=( VkClearDepthStencilValue const & rhs ) + { + memcpy( this, &rhs, sizeof( ClearDepthStencilValue ) ); + return *this; + } + ClearDepthStencilValue& setDepth( float depth_ ) + { + depth = depth_; + return *this; + } + + ClearDepthStencilValue& setStencil( uint32_t stencil_ ) + { + stencil = stencil_; + return *this; + } + + operator VkClearDepthStencilValue const&() const + { + return *reinterpret_cast(this); + } + + operator VkClearDepthStencilValue &() + { + return *reinterpret_cast(this); + } + + bool operator==( ClearDepthStencilValue const& rhs ) const + { + return ( depth == rhs.depth ) + && ( stencil == rhs.stencil ); + } + + bool operator!=( ClearDepthStencilValue const& rhs ) const + { + return !operator==( rhs ); + } + + float depth; + uint32_t stencil; + }; + static_assert( sizeof( ClearDepthStencilValue ) == sizeof( VkClearDepthStencilValue ), "struct and wrapper have different size!" ); + + union ClearValue + { + ClearValue( ClearColorValue color_ = ClearColorValue() ) + { + color = color_; + } + + ClearValue( ClearDepthStencilValue depthStencil_ ) + { + depthStencil = depthStencil_; + } + + ClearValue& setColor( ClearColorValue color_ ) + { + color = color_; + return *this; + } + + ClearValue& setDepthStencil( ClearDepthStencilValue depthStencil_ ) + { + depthStencil = depthStencil_; + return *this; + } + + operator VkClearValue const&() const + { + return *reinterpret_cast(this); + } + + operator VkClearValue &() + { + return *reinterpret_cast(this); + } + +#ifdef VULKAN_HPP_HAS_UNRESTRICTED_UNIONS + ClearColorValue color; + ClearDepthStencilValue depthStencil; +#else + VkClearColorValue color; + VkClearDepthStencilValue depthStencil; +#endif // VULKAN_HPP_HAS_UNRESTRICTED_UNIONS + }; + + struct PhysicalDeviceFeatures + { + PhysicalDeviceFeatures( Bool32 robustBufferAccess_ = 0, + Bool32 fullDrawIndexUint32_ = 0, + Bool32 imageCubeArray_ = 0, + Bool32 independentBlend_ = 0, + Bool32 geometryShader_ = 0, + Bool32 tessellationShader_ = 0, + Bool32 sampleRateShading_ = 0, + Bool32 dualSrcBlend_ = 0, + Bool32 logicOp_ = 0, + Bool32 multiDrawIndirect_ = 0, + Bool32 drawIndirectFirstInstance_ = 0, + Bool32 depthClamp_ = 0, + Bool32 depthBiasClamp_ = 0, + Bool32 fillModeNonSolid_ = 0, + Bool32 depthBounds_ = 0, + Bool32 wideLines_ = 0, + Bool32 largePoints_ = 0, + Bool32 alphaToOne_ = 0, + Bool32 multiViewport_ = 0, + Bool32 samplerAnisotropy_ = 0, + Bool32 textureCompressionETC2_ = 0, + Bool32 textureCompressionASTC_LDR_ = 0, + Bool32 textureCompressionBC_ = 0, + Bool32 occlusionQueryPrecise_ = 0, + Bool32 pipelineStatisticsQuery_ = 0, + Bool32 vertexPipelineStoresAndAtomics_ = 0, + Bool32 fragmentStoresAndAtomics_ = 0, + Bool32 shaderTessellationAndGeometryPointSize_ = 0, + Bool32 shaderImageGatherExtended_ = 0, + Bool32 shaderStorageImageExtendedFormats_ = 0, + Bool32 shaderStorageImageMultisample_ = 0, + Bool32 shaderStorageImageReadWithoutFormat_ = 0, + Bool32 shaderStorageImageWriteWithoutFormat_ = 0, + Bool32 shaderUniformBufferArrayDynamicIndexing_ = 0, + Bool32 shaderSampledImageArrayDynamicIndexing_ = 0, + Bool32 shaderStorageBufferArrayDynamicIndexing_ = 0, + Bool32 shaderStorageImageArrayDynamicIndexing_ = 0, + Bool32 shaderClipDistance_ = 0, + Bool32 shaderCullDistance_ = 0, + Bool32 shaderFloat64_ = 0, + Bool32 shaderInt64_ = 0, + Bool32 shaderInt16_ = 0, + Bool32 shaderResourceResidency_ = 0, + Bool32 shaderResourceMinLod_ = 0, + Bool32 sparseBinding_ = 0, + Bool32 sparseResidencyBuffer_ = 0, + Bool32 sparseResidencyImage2D_ = 0, + Bool32 sparseResidencyImage3D_ = 0, + Bool32 sparseResidency2Samples_ = 0, + Bool32 sparseResidency4Samples_ = 0, + Bool32 sparseResidency8Samples_ = 0, + Bool32 sparseResidency16Samples_ = 0, + Bool32 sparseResidencyAliased_ = 0, + Bool32 variableMultisampleRate_ = 0, + Bool32 inheritedQueries_ = 0 ) + : robustBufferAccess( robustBufferAccess_ ) + , fullDrawIndexUint32( fullDrawIndexUint32_ ) + , imageCubeArray( imageCubeArray_ ) + , independentBlend( independentBlend_ ) + , geometryShader( geometryShader_ ) + , tessellationShader( tessellationShader_ ) + , sampleRateShading( sampleRateShading_ ) + , dualSrcBlend( dualSrcBlend_ ) + , logicOp( logicOp_ ) + , multiDrawIndirect( multiDrawIndirect_ ) + , drawIndirectFirstInstance( drawIndirectFirstInstance_ ) + , depthClamp( depthClamp_ ) + , depthBiasClamp( depthBiasClamp_ ) + , fillModeNonSolid( fillModeNonSolid_ ) + , depthBounds( depthBounds_ ) + , wideLines( wideLines_ ) + , largePoints( largePoints_ ) + , alphaToOne( alphaToOne_ ) + , multiViewport( multiViewport_ ) + , samplerAnisotropy( samplerAnisotropy_ ) + , textureCompressionETC2( textureCompressionETC2_ ) + , textureCompressionASTC_LDR( textureCompressionASTC_LDR_ ) + , textureCompressionBC( textureCompressionBC_ ) + , occlusionQueryPrecise( occlusionQueryPrecise_ ) + , pipelineStatisticsQuery( pipelineStatisticsQuery_ ) + , vertexPipelineStoresAndAtomics( vertexPipelineStoresAndAtomics_ ) + , fragmentStoresAndAtomics( fragmentStoresAndAtomics_ ) + , shaderTessellationAndGeometryPointSize( shaderTessellationAndGeometryPointSize_ ) + , shaderImageGatherExtended( shaderImageGatherExtended_ ) + , shaderStorageImageExtendedFormats( shaderStorageImageExtendedFormats_ ) + , shaderStorageImageMultisample( shaderStorageImageMultisample_ ) + , shaderStorageImageReadWithoutFormat( shaderStorageImageReadWithoutFormat_ ) + , shaderStorageImageWriteWithoutFormat( shaderStorageImageWriteWithoutFormat_ ) + , shaderUniformBufferArrayDynamicIndexing( shaderUniformBufferArrayDynamicIndexing_ ) + , shaderSampledImageArrayDynamicIndexing( shaderSampledImageArrayDynamicIndexing_ ) + , shaderStorageBufferArrayDynamicIndexing( shaderStorageBufferArrayDynamicIndexing_ ) + , shaderStorageImageArrayDynamicIndexing( shaderStorageImageArrayDynamicIndexing_ ) + , shaderClipDistance( shaderClipDistance_ ) + , shaderCullDistance( shaderCullDistance_ ) + , shaderFloat64( shaderFloat64_ ) + , shaderInt64( shaderInt64_ ) + , shaderInt16( shaderInt16_ ) + , shaderResourceResidency( shaderResourceResidency_ ) + , shaderResourceMinLod( shaderResourceMinLod_ ) + , sparseBinding( sparseBinding_ ) + , sparseResidencyBuffer( sparseResidencyBuffer_ ) + , sparseResidencyImage2D( sparseResidencyImage2D_ ) + , sparseResidencyImage3D( sparseResidencyImage3D_ ) + , sparseResidency2Samples( sparseResidency2Samples_ ) + , sparseResidency4Samples( sparseResidency4Samples_ ) + , sparseResidency8Samples( sparseResidency8Samples_ ) + , sparseResidency16Samples( sparseResidency16Samples_ ) + , sparseResidencyAliased( sparseResidencyAliased_ ) + , variableMultisampleRate( variableMultisampleRate_ ) + , inheritedQueries( inheritedQueries_ ) + { + } + + PhysicalDeviceFeatures( VkPhysicalDeviceFeatures const & rhs ) + { + memcpy( this, &rhs, sizeof( PhysicalDeviceFeatures ) ); + } + + PhysicalDeviceFeatures& operator=( VkPhysicalDeviceFeatures const & rhs ) + { + memcpy( this, &rhs, sizeof( PhysicalDeviceFeatures ) ); + return *this; + } + PhysicalDeviceFeatures& setRobustBufferAccess( Bool32 robustBufferAccess_ ) + { + robustBufferAccess = robustBufferAccess_; + return *this; + } + + PhysicalDeviceFeatures& setFullDrawIndexUint32( Bool32 fullDrawIndexUint32_ ) + { + fullDrawIndexUint32 = fullDrawIndexUint32_; + return *this; + } + + PhysicalDeviceFeatures& setImageCubeArray( Bool32 imageCubeArray_ ) + { + imageCubeArray = imageCubeArray_; + return *this; + } + + PhysicalDeviceFeatures& setIndependentBlend( Bool32 independentBlend_ ) + { + independentBlend = independentBlend_; + return *this; + } + + PhysicalDeviceFeatures& setGeometryShader( Bool32 geometryShader_ ) + { + geometryShader = geometryShader_; + return *this; + } + + PhysicalDeviceFeatures& setTessellationShader( Bool32 tessellationShader_ ) + { + tessellationShader = tessellationShader_; + return *this; + } + + PhysicalDeviceFeatures& setSampleRateShading( Bool32 sampleRateShading_ ) + { + sampleRateShading = sampleRateShading_; + return *this; + } + + PhysicalDeviceFeatures& setDualSrcBlend( Bool32 dualSrcBlend_ ) + { + dualSrcBlend = dualSrcBlend_; + return *this; + } + + PhysicalDeviceFeatures& setLogicOp( Bool32 logicOp_ ) + { + logicOp = logicOp_; + return *this; + } + + PhysicalDeviceFeatures& setMultiDrawIndirect( Bool32 multiDrawIndirect_ ) + { + multiDrawIndirect = multiDrawIndirect_; + return *this; + } + + PhysicalDeviceFeatures& setDrawIndirectFirstInstance( Bool32 drawIndirectFirstInstance_ ) + { + drawIndirectFirstInstance = drawIndirectFirstInstance_; + return *this; + } + + PhysicalDeviceFeatures& setDepthClamp( Bool32 depthClamp_ ) + { + depthClamp = depthClamp_; + return *this; + } + + PhysicalDeviceFeatures& setDepthBiasClamp( Bool32 depthBiasClamp_ ) + { + depthBiasClamp = depthBiasClamp_; + return *this; + } + + PhysicalDeviceFeatures& setFillModeNonSolid( Bool32 fillModeNonSolid_ ) + { + fillModeNonSolid = fillModeNonSolid_; + return *this; + } + + PhysicalDeviceFeatures& setDepthBounds( Bool32 depthBounds_ ) + { + depthBounds = depthBounds_; + return *this; + } + + PhysicalDeviceFeatures& setWideLines( Bool32 wideLines_ ) + { + wideLines = wideLines_; + return *this; + } + + PhysicalDeviceFeatures& setLargePoints( Bool32 largePoints_ ) + { + largePoints = largePoints_; + return *this; + } + + PhysicalDeviceFeatures& setAlphaToOne( Bool32 alphaToOne_ ) + { + alphaToOne = alphaToOne_; + return *this; + } + + PhysicalDeviceFeatures& setMultiViewport( Bool32 multiViewport_ ) + { + multiViewport = multiViewport_; + return *this; + } + + PhysicalDeviceFeatures& setSamplerAnisotropy( Bool32 samplerAnisotropy_ ) + { + samplerAnisotropy = samplerAnisotropy_; + return *this; + } + + PhysicalDeviceFeatures& setTextureCompressionETC2( Bool32 textureCompressionETC2_ ) + { + textureCompressionETC2 = textureCompressionETC2_; + return *this; + } + + PhysicalDeviceFeatures& setTextureCompressionASTC_LDR( Bool32 textureCompressionASTC_LDR_ ) + { + textureCompressionASTC_LDR = textureCompressionASTC_LDR_; + return *this; + } + + PhysicalDeviceFeatures& setTextureCompressionBC( Bool32 textureCompressionBC_ ) + { + textureCompressionBC = textureCompressionBC_; + return *this; + } + + PhysicalDeviceFeatures& setOcclusionQueryPrecise( Bool32 occlusionQueryPrecise_ ) + { + occlusionQueryPrecise = occlusionQueryPrecise_; + return *this; + } + + PhysicalDeviceFeatures& setPipelineStatisticsQuery( Bool32 pipelineStatisticsQuery_ ) + { + pipelineStatisticsQuery = pipelineStatisticsQuery_; + return *this; + } + + PhysicalDeviceFeatures& setVertexPipelineStoresAndAtomics( Bool32 vertexPipelineStoresAndAtomics_ ) + { + vertexPipelineStoresAndAtomics = vertexPipelineStoresAndAtomics_; + return *this; + } + + PhysicalDeviceFeatures& setFragmentStoresAndAtomics( Bool32 fragmentStoresAndAtomics_ ) + { + fragmentStoresAndAtomics = fragmentStoresAndAtomics_; + return *this; + } + + PhysicalDeviceFeatures& setShaderTessellationAndGeometryPointSize( Bool32 shaderTessellationAndGeometryPointSize_ ) + { + shaderTessellationAndGeometryPointSize = shaderTessellationAndGeometryPointSize_; + return *this; + } + + PhysicalDeviceFeatures& setShaderImageGatherExtended( Bool32 shaderImageGatherExtended_ ) + { + shaderImageGatherExtended = shaderImageGatherExtended_; + return *this; + } + + PhysicalDeviceFeatures& setShaderStorageImageExtendedFormats( Bool32 shaderStorageImageExtendedFormats_ ) + { + shaderStorageImageExtendedFormats = shaderStorageImageExtendedFormats_; + return *this; + } + + PhysicalDeviceFeatures& setShaderStorageImageMultisample( Bool32 shaderStorageImageMultisample_ ) + { + shaderStorageImageMultisample = shaderStorageImageMultisample_; + return *this; + } + + PhysicalDeviceFeatures& setShaderStorageImageReadWithoutFormat( Bool32 shaderStorageImageReadWithoutFormat_ ) + { + shaderStorageImageReadWithoutFormat = shaderStorageImageReadWithoutFormat_; + return *this; + } + + PhysicalDeviceFeatures& setShaderStorageImageWriteWithoutFormat( Bool32 shaderStorageImageWriteWithoutFormat_ ) + { + shaderStorageImageWriteWithoutFormat = shaderStorageImageWriteWithoutFormat_; + return *this; + } + + PhysicalDeviceFeatures& setShaderUniformBufferArrayDynamicIndexing( Bool32 shaderUniformBufferArrayDynamicIndexing_ ) + { + shaderUniformBufferArrayDynamicIndexing = shaderUniformBufferArrayDynamicIndexing_; + return *this; + } + + PhysicalDeviceFeatures& setShaderSampledImageArrayDynamicIndexing( Bool32 shaderSampledImageArrayDynamicIndexing_ ) + { + shaderSampledImageArrayDynamicIndexing = shaderSampledImageArrayDynamicIndexing_; + return *this; + } + + PhysicalDeviceFeatures& setShaderStorageBufferArrayDynamicIndexing( Bool32 shaderStorageBufferArrayDynamicIndexing_ ) + { + shaderStorageBufferArrayDynamicIndexing = shaderStorageBufferArrayDynamicIndexing_; + return *this; + } + + PhysicalDeviceFeatures& setShaderStorageImageArrayDynamicIndexing( Bool32 shaderStorageImageArrayDynamicIndexing_ ) + { + shaderStorageImageArrayDynamicIndexing = shaderStorageImageArrayDynamicIndexing_; + return *this; + } + + PhysicalDeviceFeatures& setShaderClipDistance( Bool32 shaderClipDistance_ ) + { + shaderClipDistance = shaderClipDistance_; + return *this; + } + + PhysicalDeviceFeatures& setShaderCullDistance( Bool32 shaderCullDistance_ ) + { + shaderCullDistance = shaderCullDistance_; + return *this; + } + + PhysicalDeviceFeatures& setShaderFloat64( Bool32 shaderFloat64_ ) + { + shaderFloat64 = shaderFloat64_; + return *this; + } + + PhysicalDeviceFeatures& setShaderInt64( Bool32 shaderInt64_ ) + { + shaderInt64 = shaderInt64_; + return *this; + } + + PhysicalDeviceFeatures& setShaderInt16( Bool32 shaderInt16_ ) + { + shaderInt16 = shaderInt16_; + return *this; + } + + PhysicalDeviceFeatures& setShaderResourceResidency( Bool32 shaderResourceResidency_ ) + { + shaderResourceResidency = shaderResourceResidency_; + return *this; + } + + PhysicalDeviceFeatures& setShaderResourceMinLod( Bool32 shaderResourceMinLod_ ) + { + shaderResourceMinLod = shaderResourceMinLod_; + return *this; + } + + PhysicalDeviceFeatures& setSparseBinding( Bool32 sparseBinding_ ) + { + sparseBinding = sparseBinding_; + return *this; + } + + PhysicalDeviceFeatures& setSparseResidencyBuffer( Bool32 sparseResidencyBuffer_ ) + { + sparseResidencyBuffer = sparseResidencyBuffer_; + return *this; + } + + PhysicalDeviceFeatures& setSparseResidencyImage2D( Bool32 sparseResidencyImage2D_ ) + { + sparseResidencyImage2D = sparseResidencyImage2D_; + return *this; + } + + PhysicalDeviceFeatures& setSparseResidencyImage3D( Bool32 sparseResidencyImage3D_ ) + { + sparseResidencyImage3D = sparseResidencyImage3D_; + return *this; + } + + PhysicalDeviceFeatures& setSparseResidency2Samples( Bool32 sparseResidency2Samples_ ) + { + sparseResidency2Samples = sparseResidency2Samples_; + return *this; + } + + PhysicalDeviceFeatures& setSparseResidency4Samples( Bool32 sparseResidency4Samples_ ) + { + sparseResidency4Samples = sparseResidency4Samples_; + return *this; + } + + PhysicalDeviceFeatures& setSparseResidency8Samples( Bool32 sparseResidency8Samples_ ) + { + sparseResidency8Samples = sparseResidency8Samples_; + return *this; + } + + PhysicalDeviceFeatures& setSparseResidency16Samples( Bool32 sparseResidency16Samples_ ) + { + sparseResidency16Samples = sparseResidency16Samples_; + return *this; + } + + PhysicalDeviceFeatures& setSparseResidencyAliased( Bool32 sparseResidencyAliased_ ) + { + sparseResidencyAliased = sparseResidencyAliased_; + return *this; + } + + PhysicalDeviceFeatures& setVariableMultisampleRate( Bool32 variableMultisampleRate_ ) + { + variableMultisampleRate = variableMultisampleRate_; + return *this; + } + + PhysicalDeviceFeatures& setInheritedQueries( Bool32 inheritedQueries_ ) + { + inheritedQueries = inheritedQueries_; + return *this; + } + + operator VkPhysicalDeviceFeatures const&() const + { + return *reinterpret_cast(this); + } + + operator VkPhysicalDeviceFeatures &() + { + return *reinterpret_cast(this); + } + + bool operator==( PhysicalDeviceFeatures const& rhs ) const + { + return ( robustBufferAccess == rhs.robustBufferAccess ) + && ( fullDrawIndexUint32 == rhs.fullDrawIndexUint32 ) + && ( imageCubeArray == rhs.imageCubeArray ) + && ( independentBlend == rhs.independentBlend ) + && ( geometryShader == rhs.geometryShader ) + && ( tessellationShader == rhs.tessellationShader ) + && ( sampleRateShading == rhs.sampleRateShading ) + && ( dualSrcBlend == rhs.dualSrcBlend ) + && ( logicOp == rhs.logicOp ) + && ( multiDrawIndirect == rhs.multiDrawIndirect ) + && ( drawIndirectFirstInstance == rhs.drawIndirectFirstInstance ) + && ( depthClamp == rhs.depthClamp ) + && ( depthBiasClamp == rhs.depthBiasClamp ) + && ( fillModeNonSolid == rhs.fillModeNonSolid ) + && ( depthBounds == rhs.depthBounds ) + && ( wideLines == rhs.wideLines ) + && ( largePoints == rhs.largePoints ) + && ( alphaToOne == rhs.alphaToOne ) + && ( multiViewport == rhs.multiViewport ) + && ( samplerAnisotropy == rhs.samplerAnisotropy ) + && ( textureCompressionETC2 == rhs.textureCompressionETC2 ) + && ( textureCompressionASTC_LDR == rhs.textureCompressionASTC_LDR ) + && ( textureCompressionBC == rhs.textureCompressionBC ) + && ( occlusionQueryPrecise == rhs.occlusionQueryPrecise ) + && ( pipelineStatisticsQuery == rhs.pipelineStatisticsQuery ) + && ( vertexPipelineStoresAndAtomics == rhs.vertexPipelineStoresAndAtomics ) + && ( fragmentStoresAndAtomics == rhs.fragmentStoresAndAtomics ) + && ( shaderTessellationAndGeometryPointSize == rhs.shaderTessellationAndGeometryPointSize ) + && ( shaderImageGatherExtended == rhs.shaderImageGatherExtended ) + && ( shaderStorageImageExtendedFormats == rhs.shaderStorageImageExtendedFormats ) + && ( shaderStorageImageMultisample == rhs.shaderStorageImageMultisample ) + && ( shaderStorageImageReadWithoutFormat == rhs.shaderStorageImageReadWithoutFormat ) + && ( shaderStorageImageWriteWithoutFormat == rhs.shaderStorageImageWriteWithoutFormat ) + && ( shaderUniformBufferArrayDynamicIndexing == rhs.shaderUniformBufferArrayDynamicIndexing ) + && ( shaderSampledImageArrayDynamicIndexing == rhs.shaderSampledImageArrayDynamicIndexing ) + && ( shaderStorageBufferArrayDynamicIndexing == rhs.shaderStorageBufferArrayDynamicIndexing ) + && ( shaderStorageImageArrayDynamicIndexing == rhs.shaderStorageImageArrayDynamicIndexing ) + && ( shaderClipDistance == rhs.shaderClipDistance ) + && ( shaderCullDistance == rhs.shaderCullDistance ) + && ( shaderFloat64 == rhs.shaderFloat64 ) + && ( shaderInt64 == rhs.shaderInt64 ) + && ( shaderInt16 == rhs.shaderInt16 ) + && ( shaderResourceResidency == rhs.shaderResourceResidency ) + && ( shaderResourceMinLod == rhs.shaderResourceMinLod ) + && ( sparseBinding == rhs.sparseBinding ) + && ( sparseResidencyBuffer == rhs.sparseResidencyBuffer ) + && ( sparseResidencyImage2D == rhs.sparseResidencyImage2D ) + && ( sparseResidencyImage3D == rhs.sparseResidencyImage3D ) + && ( sparseResidency2Samples == rhs.sparseResidency2Samples ) + && ( sparseResidency4Samples == rhs.sparseResidency4Samples ) + && ( sparseResidency8Samples == rhs.sparseResidency8Samples ) + && ( sparseResidency16Samples == rhs.sparseResidency16Samples ) + && ( sparseResidencyAliased == rhs.sparseResidencyAliased ) + && ( variableMultisampleRate == rhs.variableMultisampleRate ) + && ( inheritedQueries == rhs.inheritedQueries ); + } + + bool operator!=( PhysicalDeviceFeatures const& rhs ) const + { + return !operator==( rhs ); + } + + Bool32 robustBufferAccess; + Bool32 fullDrawIndexUint32; + Bool32 imageCubeArray; + Bool32 independentBlend; + Bool32 geometryShader; + Bool32 tessellationShader; + Bool32 sampleRateShading; + Bool32 dualSrcBlend; + Bool32 logicOp; + Bool32 multiDrawIndirect; + Bool32 drawIndirectFirstInstance; + Bool32 depthClamp; + Bool32 depthBiasClamp; + Bool32 fillModeNonSolid; + Bool32 depthBounds; + Bool32 wideLines; + Bool32 largePoints; + Bool32 alphaToOne; + Bool32 multiViewport; + Bool32 samplerAnisotropy; + Bool32 textureCompressionETC2; + Bool32 textureCompressionASTC_LDR; + Bool32 textureCompressionBC; + Bool32 occlusionQueryPrecise; + Bool32 pipelineStatisticsQuery; + Bool32 vertexPipelineStoresAndAtomics; + Bool32 fragmentStoresAndAtomics; + Bool32 shaderTessellationAndGeometryPointSize; + Bool32 shaderImageGatherExtended; + Bool32 shaderStorageImageExtendedFormats; + Bool32 shaderStorageImageMultisample; + Bool32 shaderStorageImageReadWithoutFormat; + Bool32 shaderStorageImageWriteWithoutFormat; + Bool32 shaderUniformBufferArrayDynamicIndexing; + Bool32 shaderSampledImageArrayDynamicIndexing; + Bool32 shaderStorageBufferArrayDynamicIndexing; + Bool32 shaderStorageImageArrayDynamicIndexing; + Bool32 shaderClipDistance; + Bool32 shaderCullDistance; + Bool32 shaderFloat64; + Bool32 shaderInt64; + Bool32 shaderInt16; + Bool32 shaderResourceResidency; + Bool32 shaderResourceMinLod; + Bool32 sparseBinding; + Bool32 sparseResidencyBuffer; + Bool32 sparseResidencyImage2D; + Bool32 sparseResidencyImage3D; + Bool32 sparseResidency2Samples; + Bool32 sparseResidency4Samples; + Bool32 sparseResidency8Samples; + Bool32 sparseResidency16Samples; + Bool32 sparseResidencyAliased; + Bool32 variableMultisampleRate; + Bool32 inheritedQueries; + }; + static_assert( sizeof( PhysicalDeviceFeatures ) == sizeof( VkPhysicalDeviceFeatures ), "struct and wrapper have different size!" ); + + struct PhysicalDeviceSparseProperties + { + operator VkPhysicalDeviceSparseProperties const&() const + { + return *reinterpret_cast(this); + } + + operator VkPhysicalDeviceSparseProperties &() + { + return *reinterpret_cast(this); + } + + bool operator==( PhysicalDeviceSparseProperties const& rhs ) const + { + return ( residencyStandard2DBlockShape == rhs.residencyStandard2DBlockShape ) + && ( residencyStandard2DMultisampleBlockShape == rhs.residencyStandard2DMultisampleBlockShape ) + && ( residencyStandard3DBlockShape == rhs.residencyStandard3DBlockShape ) + && ( residencyAlignedMipSize == rhs.residencyAlignedMipSize ) + && ( residencyNonResidentStrict == rhs.residencyNonResidentStrict ); + } + + bool operator!=( PhysicalDeviceSparseProperties const& rhs ) const + { + return !operator==( rhs ); + } + + Bool32 residencyStandard2DBlockShape; + Bool32 residencyStandard2DMultisampleBlockShape; + Bool32 residencyStandard3DBlockShape; + Bool32 residencyAlignedMipSize; + Bool32 residencyNonResidentStrict; + }; + static_assert( sizeof( PhysicalDeviceSparseProperties ) == sizeof( VkPhysicalDeviceSparseProperties ), "struct and wrapper have different size!" ); + + struct DrawIndirectCommand + { + DrawIndirectCommand( uint32_t vertexCount_ = 0, + uint32_t instanceCount_ = 0, + uint32_t firstVertex_ = 0, + uint32_t firstInstance_ = 0 ) + : vertexCount( vertexCount_ ) + , instanceCount( instanceCount_ ) + , firstVertex( firstVertex_ ) + , firstInstance( firstInstance_ ) + { + } + + DrawIndirectCommand( VkDrawIndirectCommand const & rhs ) + { + memcpy( this, &rhs, sizeof( DrawIndirectCommand ) ); + } + + DrawIndirectCommand& operator=( VkDrawIndirectCommand const & rhs ) + { + memcpy( this, &rhs, sizeof( DrawIndirectCommand ) ); + return *this; + } + DrawIndirectCommand& setVertexCount( uint32_t vertexCount_ ) + { + vertexCount = vertexCount_; + return *this; + } + + DrawIndirectCommand& setInstanceCount( uint32_t instanceCount_ ) + { + instanceCount = instanceCount_; + return *this; + } + + DrawIndirectCommand& setFirstVertex( uint32_t firstVertex_ ) + { + firstVertex = firstVertex_; + return *this; + } + + DrawIndirectCommand& setFirstInstance( uint32_t firstInstance_ ) + { + firstInstance = firstInstance_; + return *this; + } + + operator VkDrawIndirectCommand const&() const + { + return *reinterpret_cast(this); + } + + operator VkDrawIndirectCommand &() + { + return *reinterpret_cast(this); + } + + bool operator==( DrawIndirectCommand const& rhs ) const + { + return ( vertexCount == rhs.vertexCount ) + && ( instanceCount == rhs.instanceCount ) + && ( firstVertex == rhs.firstVertex ) + && ( firstInstance == rhs.firstInstance ); + } + + bool operator!=( DrawIndirectCommand const& rhs ) const + { + return !operator==( rhs ); + } + + uint32_t vertexCount; + uint32_t instanceCount; + uint32_t firstVertex; + uint32_t firstInstance; + }; + static_assert( sizeof( DrawIndirectCommand ) == sizeof( VkDrawIndirectCommand ), "struct and wrapper have different size!" ); + + struct DrawIndexedIndirectCommand + { + DrawIndexedIndirectCommand( uint32_t indexCount_ = 0, + uint32_t instanceCount_ = 0, + uint32_t firstIndex_ = 0, + int32_t vertexOffset_ = 0, + uint32_t firstInstance_ = 0 ) + : indexCount( indexCount_ ) + , instanceCount( instanceCount_ ) + , firstIndex( firstIndex_ ) + , vertexOffset( vertexOffset_ ) + , firstInstance( firstInstance_ ) + { + } + + DrawIndexedIndirectCommand( VkDrawIndexedIndirectCommand const & rhs ) + { + memcpy( this, &rhs, sizeof( DrawIndexedIndirectCommand ) ); + } + + DrawIndexedIndirectCommand& operator=( VkDrawIndexedIndirectCommand const & rhs ) + { + memcpy( this, &rhs, sizeof( DrawIndexedIndirectCommand ) ); + return *this; + } + DrawIndexedIndirectCommand& setIndexCount( uint32_t indexCount_ ) + { + indexCount = indexCount_; + return *this; + } + + DrawIndexedIndirectCommand& setInstanceCount( uint32_t instanceCount_ ) + { + instanceCount = instanceCount_; + return *this; + } + + DrawIndexedIndirectCommand& setFirstIndex( uint32_t firstIndex_ ) + { + firstIndex = firstIndex_; + return *this; + } + + DrawIndexedIndirectCommand& setVertexOffset( int32_t vertexOffset_ ) + { + vertexOffset = vertexOffset_; + return *this; + } + + DrawIndexedIndirectCommand& setFirstInstance( uint32_t firstInstance_ ) + { + firstInstance = firstInstance_; + return *this; + } + + operator VkDrawIndexedIndirectCommand const&() const + { + return *reinterpret_cast(this); + } + + operator VkDrawIndexedIndirectCommand &() + { + return *reinterpret_cast(this); + } + + bool operator==( DrawIndexedIndirectCommand const& rhs ) const + { + return ( indexCount == rhs.indexCount ) + && ( instanceCount == rhs.instanceCount ) + && ( firstIndex == rhs.firstIndex ) + && ( vertexOffset == rhs.vertexOffset ) + && ( firstInstance == rhs.firstInstance ); + } + + bool operator!=( DrawIndexedIndirectCommand const& rhs ) const + { + return !operator==( rhs ); + } + + uint32_t indexCount; + uint32_t instanceCount; + uint32_t firstIndex; + int32_t vertexOffset; + uint32_t firstInstance; + }; + static_assert( sizeof( DrawIndexedIndirectCommand ) == sizeof( VkDrawIndexedIndirectCommand ), "struct and wrapper have different size!" ); + + struct DispatchIndirectCommand + { + DispatchIndirectCommand( uint32_t x_ = 0, + uint32_t y_ = 0, + uint32_t z_ = 0 ) + : x( x_ ) + , y( y_ ) + , z( z_ ) + { + } + + DispatchIndirectCommand( VkDispatchIndirectCommand const & rhs ) + { + memcpy( this, &rhs, sizeof( DispatchIndirectCommand ) ); + } + + DispatchIndirectCommand& operator=( VkDispatchIndirectCommand const & rhs ) + { + memcpy( this, &rhs, sizeof( DispatchIndirectCommand ) ); + return *this; + } + DispatchIndirectCommand& setX( uint32_t x_ ) + { + x = x_; + return *this; + } + + DispatchIndirectCommand& setY( uint32_t y_ ) + { + y = y_; + return *this; + } + + DispatchIndirectCommand& setZ( uint32_t z_ ) + { + z = z_; + return *this; + } + + operator VkDispatchIndirectCommand const&() const + { + return *reinterpret_cast(this); + } + + operator VkDispatchIndirectCommand &() + { + return *reinterpret_cast(this); + } + + bool operator==( DispatchIndirectCommand const& rhs ) const + { + return ( x == rhs.x ) + && ( y == rhs.y ) + && ( z == rhs.z ); + } + + bool operator!=( DispatchIndirectCommand const& rhs ) const + { + return !operator==( rhs ); + } + + uint32_t x; + uint32_t y; + uint32_t z; + }; + static_assert( sizeof( DispatchIndirectCommand ) == sizeof( VkDispatchIndirectCommand ), "struct and wrapper have different size!" ); + + struct DisplayPlanePropertiesKHR + { + operator VkDisplayPlanePropertiesKHR const&() const + { + return *reinterpret_cast(this); + } + + operator VkDisplayPlanePropertiesKHR &() + { + return *reinterpret_cast(this); + } + + bool operator==( DisplayPlanePropertiesKHR const& rhs ) const + { + return ( currentDisplay == rhs.currentDisplay ) + && ( currentStackIndex == rhs.currentStackIndex ); + } + + bool operator!=( DisplayPlanePropertiesKHR const& rhs ) const + { + return !operator==( rhs ); + } + + DisplayKHR currentDisplay; + uint32_t currentStackIndex; + }; + static_assert( sizeof( DisplayPlanePropertiesKHR ) == sizeof( VkDisplayPlanePropertiesKHR ), "struct and wrapper have different size!" ); + + struct DisplayModeParametersKHR + { + DisplayModeParametersKHR( Extent2D visibleRegion_ = Extent2D(), + uint32_t refreshRate_ = 0 ) + : visibleRegion( visibleRegion_ ) + , refreshRate( refreshRate_ ) + { + } + + DisplayModeParametersKHR( VkDisplayModeParametersKHR const & rhs ) + { + memcpy( this, &rhs, sizeof( DisplayModeParametersKHR ) ); + } + + DisplayModeParametersKHR& operator=( VkDisplayModeParametersKHR const & rhs ) + { + memcpy( this, &rhs, sizeof( DisplayModeParametersKHR ) ); + return *this; + } + DisplayModeParametersKHR& setVisibleRegion( Extent2D visibleRegion_ ) + { + visibleRegion = visibleRegion_; + return *this; + } + + DisplayModeParametersKHR& setRefreshRate( uint32_t refreshRate_ ) + { + refreshRate = refreshRate_; + return *this; + } + + operator VkDisplayModeParametersKHR const&() const + { + return *reinterpret_cast(this); + } + + operator VkDisplayModeParametersKHR &() + { + return *reinterpret_cast(this); + } + + bool operator==( DisplayModeParametersKHR const& rhs ) const + { + return ( visibleRegion == rhs.visibleRegion ) + && ( refreshRate == rhs.refreshRate ); + } + + bool operator!=( DisplayModeParametersKHR const& rhs ) const + { + return !operator==( rhs ); + } + + Extent2D visibleRegion; + uint32_t refreshRate; + }; + static_assert( sizeof( DisplayModeParametersKHR ) == sizeof( VkDisplayModeParametersKHR ), "struct and wrapper have different size!" ); + + struct DisplayModePropertiesKHR + { + operator VkDisplayModePropertiesKHR const&() const + { + return *reinterpret_cast(this); + } + + operator VkDisplayModePropertiesKHR &() + { + return *reinterpret_cast(this); + } + + bool operator==( DisplayModePropertiesKHR const& rhs ) const + { + return ( displayMode == rhs.displayMode ) + && ( parameters == rhs.parameters ); + } + + bool operator!=( DisplayModePropertiesKHR const& rhs ) const + { + return !operator==( rhs ); + } + + DisplayModeKHR displayMode; + DisplayModeParametersKHR parameters; + }; + static_assert( sizeof( DisplayModePropertiesKHR ) == sizeof( VkDisplayModePropertiesKHR ), "struct and wrapper have different size!" ); + + struct ConformanceVersionKHR + { + ConformanceVersionKHR( uint8_t major_ = 0, + uint8_t minor_ = 0, + uint8_t subminor_ = 0, + uint8_t patch_ = 0 ) + : major( major_ ) + , minor( minor_ ) + , subminor( subminor_ ) + , patch( patch_ ) + { + } + + ConformanceVersionKHR( VkConformanceVersionKHR const & rhs ) + { + memcpy( this, &rhs, sizeof( ConformanceVersionKHR ) ); + } + + ConformanceVersionKHR& operator=( VkConformanceVersionKHR const & rhs ) + { + memcpy( this, &rhs, sizeof( ConformanceVersionKHR ) ); + return *this; + } + ConformanceVersionKHR& setMajor( uint8_t major_ ) + { + major = major_; + return *this; + } + + ConformanceVersionKHR& setMinor( uint8_t minor_ ) + { + minor = minor_; + return *this; + } + + ConformanceVersionKHR& setSubminor( uint8_t subminor_ ) + { + subminor = subminor_; + return *this; + } + + ConformanceVersionKHR& setPatch( uint8_t patch_ ) + { + patch = patch_; + return *this; + } + + operator VkConformanceVersionKHR const&() const + { + return *reinterpret_cast(this); + } + + operator VkConformanceVersionKHR &() + { + return *reinterpret_cast(this); + } + + bool operator==( ConformanceVersionKHR const& rhs ) const + { + return ( major == rhs.major ) + && ( minor == rhs.minor ) + && ( subminor == rhs.subminor ) + && ( patch == rhs.patch ); + } + + bool operator!=( ConformanceVersionKHR const& rhs ) const + { + return !operator==( rhs ); + } + + uint8_t major; + uint8_t minor; + uint8_t subminor; + uint8_t patch; + }; + static_assert( sizeof( ConformanceVersionKHR ) == sizeof( VkConformanceVersionKHR ), "struct and wrapper have different size!" ); + + struct RectLayerKHR + { + RectLayerKHR( Offset2D offset_ = Offset2D(), + Extent2D extent_ = Extent2D(), + uint32_t layer_ = 0 ) + : offset( offset_ ) + , extent( extent_ ) + , layer( layer_ ) + { + } + + explicit RectLayerKHR( Rect2D const& rect2D, + uint32_t layer_ = 0 ) + : offset( rect2D.offset ) + , extent( rect2D.extent ) + , layer( layer_ ) + {} + + RectLayerKHR( VkRectLayerKHR const & rhs ) + { + memcpy( this, &rhs, sizeof( RectLayerKHR ) ); + } + + RectLayerKHR& operator=( VkRectLayerKHR const & rhs ) + { + memcpy( this, &rhs, sizeof( RectLayerKHR ) ); + return *this; + } + RectLayerKHR& setOffset( Offset2D offset_ ) + { + offset = offset_; + return *this; + } + + RectLayerKHR& setExtent( Extent2D extent_ ) + { + extent = extent_; + return *this; + } + + RectLayerKHR& setLayer( uint32_t layer_ ) + { + layer = layer_; + return *this; + } + + operator VkRectLayerKHR const&() const + { + return *reinterpret_cast(this); + } + + operator VkRectLayerKHR &() + { + return *reinterpret_cast(this); + } + + bool operator==( RectLayerKHR const& rhs ) const + { + return ( offset == rhs.offset ) + && ( extent == rhs.extent ) + && ( layer == rhs.layer ); + } + + bool operator!=( RectLayerKHR const& rhs ) const + { + return !operator==( rhs ); + } + + Offset2D offset; + Extent2D extent; + uint32_t layer; + }; + static_assert( sizeof( RectLayerKHR ) == sizeof( VkRectLayerKHR ), "struct and wrapper have different size!" ); + + struct PresentRegionKHR + { + PresentRegionKHR( uint32_t rectangleCount_ = 0, + const RectLayerKHR* pRectangles_ = nullptr ) + : rectangleCount( rectangleCount_ ) + , pRectangles( pRectangles_ ) + { + } + + PresentRegionKHR( VkPresentRegionKHR const & rhs ) + { + memcpy( this, &rhs, sizeof( PresentRegionKHR ) ); + } + + PresentRegionKHR& operator=( VkPresentRegionKHR const & rhs ) + { + memcpy( this, &rhs, sizeof( PresentRegionKHR ) ); + return *this; + } + PresentRegionKHR& setRectangleCount( uint32_t rectangleCount_ ) + { + rectangleCount = rectangleCount_; + return *this; + } + + PresentRegionKHR& setPRectangles( const RectLayerKHR* pRectangles_ ) + { + pRectangles = pRectangles_; + return *this; + } + + operator VkPresentRegionKHR const&() const + { + return *reinterpret_cast(this); + } + + operator VkPresentRegionKHR &() + { + return *reinterpret_cast(this); + } + + bool operator==( PresentRegionKHR const& rhs ) const + { + return ( rectangleCount == rhs.rectangleCount ) + && ( pRectangles == rhs.pRectangles ); + } + + bool operator!=( PresentRegionKHR const& rhs ) const + { + return !operator==( rhs ); + } + + uint32_t rectangleCount; + const RectLayerKHR* pRectangles; + }; + static_assert( sizeof( PresentRegionKHR ) == sizeof( VkPresentRegionKHR ), "struct and wrapper have different size!" ); + + struct XYColorEXT + { + XYColorEXT( float x_ = 0, + float y_ = 0 ) + : x( x_ ) + , y( y_ ) + { + } + + XYColorEXT( VkXYColorEXT const & rhs ) + { + memcpy( this, &rhs, sizeof( XYColorEXT ) ); + } + + XYColorEXT& operator=( VkXYColorEXT const & rhs ) + { + memcpy( this, &rhs, sizeof( XYColorEXT ) ); + return *this; + } + XYColorEXT& setX( float x_ ) + { + x = x_; + return *this; + } + + XYColorEXT& setY( float y_ ) + { + y = y_; + return *this; + } + + operator VkXYColorEXT const&() const + { + return *reinterpret_cast(this); + } + + operator VkXYColorEXT &() + { + return *reinterpret_cast(this); + } + + bool operator==( XYColorEXT const& rhs ) const + { + return ( x == rhs.x ) + && ( y == rhs.y ); + } + + bool operator!=( XYColorEXT const& rhs ) const + { + return !operator==( rhs ); + } + + float x; + float y; + }; + static_assert( sizeof( XYColorEXT ) == sizeof( VkXYColorEXT ), "struct and wrapper have different size!" ); + + struct RefreshCycleDurationGOOGLE + { + operator VkRefreshCycleDurationGOOGLE const&() const + { + return *reinterpret_cast(this); + } + + operator VkRefreshCycleDurationGOOGLE &() + { + return *reinterpret_cast(this); + } + + bool operator==( RefreshCycleDurationGOOGLE const& rhs ) const + { + return ( refreshDuration == rhs.refreshDuration ); + } + + bool operator!=( RefreshCycleDurationGOOGLE const& rhs ) const + { + return !operator==( rhs ); + } + + uint64_t refreshDuration; + }; + static_assert( sizeof( RefreshCycleDurationGOOGLE ) == sizeof( VkRefreshCycleDurationGOOGLE ), "struct and wrapper have different size!" ); + + struct PastPresentationTimingGOOGLE + { + operator VkPastPresentationTimingGOOGLE const&() const + { + return *reinterpret_cast(this); + } + + operator VkPastPresentationTimingGOOGLE &() + { + return *reinterpret_cast(this); + } + + bool operator==( PastPresentationTimingGOOGLE const& rhs ) const + { + return ( presentID == rhs.presentID ) + && ( desiredPresentTime == rhs.desiredPresentTime ) + && ( actualPresentTime == rhs.actualPresentTime ) + && ( earliestPresentTime == rhs.earliestPresentTime ) + && ( presentMargin == rhs.presentMargin ); + } + + bool operator!=( PastPresentationTimingGOOGLE const& rhs ) const + { + return !operator==( rhs ); + } + + uint32_t presentID; + uint64_t desiredPresentTime; + uint64_t actualPresentTime; + uint64_t earliestPresentTime; + uint64_t presentMargin; + }; + static_assert( sizeof( PastPresentationTimingGOOGLE ) == sizeof( VkPastPresentationTimingGOOGLE ), "struct and wrapper have different size!" ); + + struct PresentTimeGOOGLE + { + PresentTimeGOOGLE( uint32_t presentID_ = 0, + uint64_t desiredPresentTime_ = 0 ) + : presentID( presentID_ ) + , desiredPresentTime( desiredPresentTime_ ) + { + } + + PresentTimeGOOGLE( VkPresentTimeGOOGLE const & rhs ) + { + memcpy( this, &rhs, sizeof( PresentTimeGOOGLE ) ); + } + + PresentTimeGOOGLE& operator=( VkPresentTimeGOOGLE const & rhs ) + { + memcpy( this, &rhs, sizeof( PresentTimeGOOGLE ) ); + return *this; + } + PresentTimeGOOGLE& setPresentID( uint32_t presentID_ ) + { + presentID = presentID_; + return *this; + } + + PresentTimeGOOGLE& setDesiredPresentTime( uint64_t desiredPresentTime_ ) + { + desiredPresentTime = desiredPresentTime_; + return *this; + } + + operator VkPresentTimeGOOGLE const&() const + { + return *reinterpret_cast(this); + } + + operator VkPresentTimeGOOGLE &() + { + return *reinterpret_cast(this); + } + + bool operator==( PresentTimeGOOGLE const& rhs ) const + { + return ( presentID == rhs.presentID ) + && ( desiredPresentTime == rhs.desiredPresentTime ); + } + + bool operator!=( PresentTimeGOOGLE const& rhs ) const + { + return !operator==( rhs ); + } + + uint32_t presentID; + uint64_t desiredPresentTime; + }; + static_assert( sizeof( PresentTimeGOOGLE ) == sizeof( VkPresentTimeGOOGLE ), "struct and wrapper have different size!" ); + + struct ViewportWScalingNV + { + ViewportWScalingNV( float xcoeff_ = 0, + float ycoeff_ = 0 ) + : xcoeff( xcoeff_ ) + , ycoeff( ycoeff_ ) + { + } + + ViewportWScalingNV( VkViewportWScalingNV const & rhs ) + { + memcpy( this, &rhs, sizeof( ViewportWScalingNV ) ); + } + + ViewportWScalingNV& operator=( VkViewportWScalingNV const & rhs ) + { + memcpy( this, &rhs, sizeof( ViewportWScalingNV ) ); + return *this; + } + ViewportWScalingNV& setXcoeff( float xcoeff_ ) + { + xcoeff = xcoeff_; + return *this; + } + + ViewportWScalingNV& setYcoeff( float ycoeff_ ) + { + ycoeff = ycoeff_; + return *this; + } + + operator VkViewportWScalingNV const&() const + { + return *reinterpret_cast(this); + } + + operator VkViewportWScalingNV &() + { + return *reinterpret_cast(this); + } + + bool operator==( ViewportWScalingNV const& rhs ) const + { + return ( xcoeff == rhs.xcoeff ) + && ( ycoeff == rhs.ycoeff ); + } + + bool operator!=( ViewportWScalingNV const& rhs ) const + { + return !operator==( rhs ); + } + + float xcoeff; + float ycoeff; + }; + static_assert( sizeof( ViewportWScalingNV ) == sizeof( VkViewportWScalingNV ), "struct and wrapper have different size!" ); + + struct SampleLocationEXT + { + SampleLocationEXT( float x_ = 0, + float y_ = 0 ) + : x( x_ ) + , y( y_ ) + { + } + + SampleLocationEXT( VkSampleLocationEXT const & rhs ) + { + memcpy( this, &rhs, sizeof( SampleLocationEXT ) ); + } + + SampleLocationEXT& operator=( VkSampleLocationEXT const & rhs ) + { + memcpy( this, &rhs, sizeof( SampleLocationEXT ) ); + return *this; + } + SampleLocationEXT& setX( float x_ ) + { + x = x_; + return *this; + } + + SampleLocationEXT& setY( float y_ ) + { + y = y_; + return *this; + } + + operator VkSampleLocationEXT const&() const + { + return *reinterpret_cast(this); + } + + operator VkSampleLocationEXT &() + { + return *reinterpret_cast(this); + } + + bool operator==( SampleLocationEXT const& rhs ) const + { + return ( x == rhs.x ) + && ( y == rhs.y ); + } + + bool operator!=( SampleLocationEXT const& rhs ) const + { + return !operator==( rhs ); + } + + float x; + float y; + }; + static_assert( sizeof( SampleLocationEXT ) == sizeof( VkSampleLocationEXT ), "struct and wrapper have different size!" ); + + struct ShaderResourceUsageAMD + { + operator VkShaderResourceUsageAMD const&() const + { + return *reinterpret_cast(this); + } + + operator VkShaderResourceUsageAMD &() + { + return *reinterpret_cast(this); + } + + bool operator==( ShaderResourceUsageAMD const& rhs ) const + { + return ( numUsedVgprs == rhs.numUsedVgprs ) + && ( numUsedSgprs == rhs.numUsedSgprs ) + && ( ldsSizePerLocalWorkGroup == rhs.ldsSizePerLocalWorkGroup ) + && ( ldsUsageSizeInBytes == rhs.ldsUsageSizeInBytes ) + && ( scratchMemUsageInBytes == rhs.scratchMemUsageInBytes ); + } + + bool operator!=( ShaderResourceUsageAMD const& rhs ) const + { + return !operator==( rhs ); + } + + uint32_t numUsedVgprs; + uint32_t numUsedSgprs; + uint32_t ldsSizePerLocalWorkGroup; + size_t ldsUsageSizeInBytes; + size_t scratchMemUsageInBytes; + }; + static_assert( sizeof( ShaderResourceUsageAMD ) == sizeof( VkShaderResourceUsageAMD ), "struct and wrapper have different size!" ); + + struct VertexInputBindingDivisorDescriptionEXT + { + VertexInputBindingDivisorDescriptionEXT( uint32_t binding_ = 0, + uint32_t divisor_ = 0 ) + : binding( binding_ ) + , divisor( divisor_ ) + { + } + + VertexInputBindingDivisorDescriptionEXT( VkVertexInputBindingDivisorDescriptionEXT const & rhs ) + { + memcpy( this, &rhs, sizeof( VertexInputBindingDivisorDescriptionEXT ) ); + } + + VertexInputBindingDivisorDescriptionEXT& operator=( VkVertexInputBindingDivisorDescriptionEXT const & rhs ) + { + memcpy( this, &rhs, sizeof( VertexInputBindingDivisorDescriptionEXT ) ); + return *this; + } + VertexInputBindingDivisorDescriptionEXT& setBinding( uint32_t binding_ ) + { + binding = binding_; + return *this; + } + + VertexInputBindingDivisorDescriptionEXT& setDivisor( uint32_t divisor_ ) + { + divisor = divisor_; + return *this; + } + + operator VkVertexInputBindingDivisorDescriptionEXT const&() const + { + return *reinterpret_cast(this); + } + + operator VkVertexInputBindingDivisorDescriptionEXT &() + { + return *reinterpret_cast(this); + } + + bool operator==( VertexInputBindingDivisorDescriptionEXT const& rhs ) const + { + return ( binding == rhs.binding ) + && ( divisor == rhs.divisor ); + } + + bool operator!=( VertexInputBindingDivisorDescriptionEXT const& rhs ) const + { + return !operator==( rhs ); + } + + uint32_t binding; + uint32_t divisor; + }; + static_assert( sizeof( VertexInputBindingDivisorDescriptionEXT ) == sizeof( VkVertexInputBindingDivisorDescriptionEXT ), "struct and wrapper have different size!" ); + + struct CoarseSampleLocationNV + { + CoarseSampleLocationNV( uint32_t pixelX_ = 0, + uint32_t pixelY_ = 0, + uint32_t sample_ = 0 ) + : pixelX( pixelX_ ) + , pixelY( pixelY_ ) + , sample( sample_ ) + { + } + + CoarseSampleLocationNV( VkCoarseSampleLocationNV const & rhs ) + { + memcpy( this, &rhs, sizeof( CoarseSampleLocationNV ) ); + } + + CoarseSampleLocationNV& operator=( VkCoarseSampleLocationNV const & rhs ) + { + memcpy( this, &rhs, sizeof( CoarseSampleLocationNV ) ); + return *this; + } + CoarseSampleLocationNV& setPixelX( uint32_t pixelX_ ) + { + pixelX = pixelX_; + return *this; + } + + CoarseSampleLocationNV& setPixelY( uint32_t pixelY_ ) + { + pixelY = pixelY_; + return *this; + } + + CoarseSampleLocationNV& setSample( uint32_t sample_ ) + { + sample = sample_; + return *this; + } + + operator VkCoarseSampleLocationNV const&() const + { + return *reinterpret_cast(this); + } + + operator VkCoarseSampleLocationNV &() + { + return *reinterpret_cast(this); + } + + bool operator==( CoarseSampleLocationNV const& rhs ) const + { + return ( pixelX == rhs.pixelX ) + && ( pixelY == rhs.pixelY ) + && ( sample == rhs.sample ); + } + + bool operator!=( CoarseSampleLocationNV const& rhs ) const + { + return !operator==( rhs ); + } + + uint32_t pixelX; + uint32_t pixelY; + uint32_t sample; + }; + static_assert( sizeof( CoarseSampleLocationNV ) == sizeof( VkCoarseSampleLocationNV ), "struct and wrapper have different size!" ); + + struct DrawMeshTasksIndirectCommandNV + { + DrawMeshTasksIndirectCommandNV( uint32_t taskCount_ = 0, + uint32_t firstTask_ = 0 ) + : taskCount( taskCount_ ) + , firstTask( firstTask_ ) + { + } + + DrawMeshTasksIndirectCommandNV( VkDrawMeshTasksIndirectCommandNV const & rhs ) + { + memcpy( this, &rhs, sizeof( DrawMeshTasksIndirectCommandNV ) ); + } + + DrawMeshTasksIndirectCommandNV& operator=( VkDrawMeshTasksIndirectCommandNV const & rhs ) + { + memcpy( this, &rhs, sizeof( DrawMeshTasksIndirectCommandNV ) ); + return *this; + } + DrawMeshTasksIndirectCommandNV& setTaskCount( uint32_t taskCount_ ) + { + taskCount = taskCount_; + return *this; + } + + DrawMeshTasksIndirectCommandNV& setFirstTask( uint32_t firstTask_ ) + { + firstTask = firstTask_; + return *this; + } + + operator VkDrawMeshTasksIndirectCommandNV const&() const + { + return *reinterpret_cast(this); + } + + operator VkDrawMeshTasksIndirectCommandNV &() + { + return *reinterpret_cast(this); + } + + bool operator==( DrawMeshTasksIndirectCommandNV const& rhs ) const + { + return ( taskCount == rhs.taskCount ) + && ( firstTask == rhs.firstTask ); + } + + bool operator!=( DrawMeshTasksIndirectCommandNV const& rhs ) const + { + return !operator==( rhs ); + } + + uint32_t taskCount; + uint32_t firstTask; + }; + static_assert( sizeof( DrawMeshTasksIndirectCommandNV ) == sizeof( VkDrawMeshTasksIndirectCommandNV ), "struct and wrapper have different size!" ); + + enum class ImageLayout + { + eUndefined = VK_IMAGE_LAYOUT_UNDEFINED, + eGeneral = VK_IMAGE_LAYOUT_GENERAL, + eColorAttachmentOptimal = VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL, + eDepthStencilAttachmentOptimal = VK_IMAGE_LAYOUT_DEPTH_STENCIL_ATTACHMENT_OPTIMAL, + eDepthStencilReadOnlyOptimal = VK_IMAGE_LAYOUT_DEPTH_STENCIL_READ_ONLY_OPTIMAL, + eShaderReadOnlyOptimal = VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL, + eTransferSrcOptimal = VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL, + eTransferDstOptimal = VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL, + ePreinitialized = VK_IMAGE_LAYOUT_PREINITIALIZED, + eDepthReadOnlyStencilAttachmentOptimal = VK_IMAGE_LAYOUT_DEPTH_READ_ONLY_STENCIL_ATTACHMENT_OPTIMAL, + eDepthReadOnlyStencilAttachmentOptimalKHR = VK_IMAGE_LAYOUT_DEPTH_READ_ONLY_STENCIL_ATTACHMENT_OPTIMAL, + eDepthAttachmentStencilReadOnlyOptimal = VK_IMAGE_LAYOUT_DEPTH_ATTACHMENT_STENCIL_READ_ONLY_OPTIMAL, + eDepthAttachmentStencilReadOnlyOptimalKHR = VK_IMAGE_LAYOUT_DEPTH_ATTACHMENT_STENCIL_READ_ONLY_OPTIMAL, + ePresentSrcKHR = VK_IMAGE_LAYOUT_PRESENT_SRC_KHR, + eSharedPresentKHR = VK_IMAGE_LAYOUT_SHARED_PRESENT_KHR, + eShadingRateOptimalNV = VK_IMAGE_LAYOUT_SHADING_RATE_OPTIMAL_NV + }; + + struct DescriptorImageInfo + { + DescriptorImageInfo( Sampler sampler_ = Sampler(), + ImageView imageView_ = ImageView(), + ImageLayout imageLayout_ = ImageLayout::eUndefined ) + : sampler( sampler_ ) + , imageView( imageView_ ) + , imageLayout( imageLayout_ ) + { + } + + DescriptorImageInfo( VkDescriptorImageInfo const & rhs ) + { + memcpy( this, &rhs, sizeof( DescriptorImageInfo ) ); + } + + DescriptorImageInfo& operator=( VkDescriptorImageInfo const & rhs ) + { + memcpy( this, &rhs, sizeof( DescriptorImageInfo ) ); + return *this; + } + DescriptorImageInfo& setSampler( Sampler sampler_ ) + { + sampler = sampler_; + return *this; + } + + DescriptorImageInfo& setImageView( ImageView imageView_ ) + { + imageView = imageView_; + return *this; + } + + DescriptorImageInfo& setImageLayout( ImageLayout imageLayout_ ) + { + imageLayout = imageLayout_; + return *this; + } + + operator VkDescriptorImageInfo const&() const + { + return *reinterpret_cast(this); + } + + operator VkDescriptorImageInfo &() + { + return *reinterpret_cast(this); + } + + bool operator==( DescriptorImageInfo const& rhs ) const + { + return ( sampler == rhs.sampler ) + && ( imageView == rhs.imageView ) + && ( imageLayout == rhs.imageLayout ); + } + + bool operator!=( DescriptorImageInfo const& rhs ) const + { + return !operator==( rhs ); + } + + Sampler sampler; + ImageView imageView; + ImageLayout imageLayout; + }; + static_assert( sizeof( DescriptorImageInfo ) == sizeof( VkDescriptorImageInfo ), "struct and wrapper have different size!" ); + + struct AttachmentReference + { + AttachmentReference( uint32_t attachment_ = 0, + ImageLayout layout_ = ImageLayout::eUndefined ) + : attachment( attachment_ ) + , layout( layout_ ) + { + } + + AttachmentReference( VkAttachmentReference const & rhs ) + { + memcpy( this, &rhs, sizeof( AttachmentReference ) ); + } + + AttachmentReference& operator=( VkAttachmentReference const & rhs ) + { + memcpy( this, &rhs, sizeof( AttachmentReference ) ); + return *this; + } + AttachmentReference& setAttachment( uint32_t attachment_ ) + { + attachment = attachment_; + return *this; + } + + AttachmentReference& setLayout( ImageLayout layout_ ) + { + layout = layout_; + return *this; + } + + operator VkAttachmentReference const&() const + { + return *reinterpret_cast(this); + } + + operator VkAttachmentReference &() + { + return *reinterpret_cast(this); + } + + bool operator==( AttachmentReference const& rhs ) const + { + return ( attachment == rhs.attachment ) + && ( layout == rhs.layout ); + } + + bool operator!=( AttachmentReference const& rhs ) const + { + return !operator==( rhs ); + } + + uint32_t attachment; + ImageLayout layout; + }; + static_assert( sizeof( AttachmentReference ) == sizeof( VkAttachmentReference ), "struct and wrapper have different size!" ); + + enum class AttachmentLoadOp + { + eLoad = VK_ATTACHMENT_LOAD_OP_LOAD, + eClear = VK_ATTACHMENT_LOAD_OP_CLEAR, + eDontCare = VK_ATTACHMENT_LOAD_OP_DONT_CARE + }; + + enum class AttachmentStoreOp + { + eStore = VK_ATTACHMENT_STORE_OP_STORE, + eDontCare = VK_ATTACHMENT_STORE_OP_DONT_CARE + }; + + enum class ImageType + { + e1D = VK_IMAGE_TYPE_1D, + e2D = VK_IMAGE_TYPE_2D, + e3D = VK_IMAGE_TYPE_3D + }; + + enum class ImageTiling + { + eOptimal = VK_IMAGE_TILING_OPTIMAL, + eLinear = VK_IMAGE_TILING_LINEAR, + eDrmFormatModifierEXT = VK_IMAGE_TILING_DRM_FORMAT_MODIFIER_EXT + }; + + enum class ImageViewType + { + e1D = VK_IMAGE_VIEW_TYPE_1D, + e2D = VK_IMAGE_VIEW_TYPE_2D, + e3D = VK_IMAGE_VIEW_TYPE_3D, + eCube = VK_IMAGE_VIEW_TYPE_CUBE, + e1DArray = VK_IMAGE_VIEW_TYPE_1D_ARRAY, + e2DArray = VK_IMAGE_VIEW_TYPE_2D_ARRAY, + eCubeArray = VK_IMAGE_VIEW_TYPE_CUBE_ARRAY + }; + + enum class CommandBufferLevel + { + ePrimary = VK_COMMAND_BUFFER_LEVEL_PRIMARY, + eSecondary = VK_COMMAND_BUFFER_LEVEL_SECONDARY + }; + + enum class ComponentSwizzle + { + eIdentity = VK_COMPONENT_SWIZZLE_IDENTITY, + eZero = VK_COMPONENT_SWIZZLE_ZERO, + eOne = VK_COMPONENT_SWIZZLE_ONE, + eR = VK_COMPONENT_SWIZZLE_R, + eG = VK_COMPONENT_SWIZZLE_G, + eB = VK_COMPONENT_SWIZZLE_B, + eA = VK_COMPONENT_SWIZZLE_A + }; + + struct ComponentMapping + { + ComponentMapping( ComponentSwizzle r_ = ComponentSwizzle::eIdentity, + ComponentSwizzle g_ = ComponentSwizzle::eIdentity, + ComponentSwizzle b_ = ComponentSwizzle::eIdentity, + ComponentSwizzle a_ = ComponentSwizzle::eIdentity ) + : r( r_ ) + , g( g_ ) + , b( b_ ) + , a( a_ ) + { + } + + ComponentMapping( VkComponentMapping const & rhs ) + { + memcpy( this, &rhs, sizeof( ComponentMapping ) ); + } + + ComponentMapping& operator=( VkComponentMapping const & rhs ) + { + memcpy( this, &rhs, sizeof( ComponentMapping ) ); + return *this; + } + ComponentMapping& setR( ComponentSwizzle r_ ) + { + r = r_; + return *this; + } + + ComponentMapping& setG( ComponentSwizzle g_ ) + { + g = g_; + return *this; + } + + ComponentMapping& setB( ComponentSwizzle b_ ) + { + b = b_; + return *this; + } + + ComponentMapping& setA( ComponentSwizzle a_ ) + { + a = a_; + return *this; + } + + operator VkComponentMapping const&() const + { + return *reinterpret_cast(this); + } + + operator VkComponentMapping &() + { + return *reinterpret_cast(this); + } + + bool operator==( ComponentMapping const& rhs ) const + { + return ( r == rhs.r ) + && ( g == rhs.g ) + && ( b == rhs.b ) + && ( a == rhs.a ); + } + + bool operator!=( ComponentMapping const& rhs ) const + { + return !operator==( rhs ); + } + + ComponentSwizzle r; + ComponentSwizzle g; + ComponentSwizzle b; + ComponentSwizzle a; + }; + static_assert( sizeof( ComponentMapping ) == sizeof( VkComponentMapping ), "struct and wrapper have different size!" ); + + enum class DescriptorType + { + eSampler = VK_DESCRIPTOR_TYPE_SAMPLER, + eCombinedImageSampler = VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER, + eSampledImage = VK_DESCRIPTOR_TYPE_SAMPLED_IMAGE, + eStorageImage = VK_DESCRIPTOR_TYPE_STORAGE_IMAGE, + eUniformTexelBuffer = VK_DESCRIPTOR_TYPE_UNIFORM_TEXEL_BUFFER, + eStorageTexelBuffer = VK_DESCRIPTOR_TYPE_STORAGE_TEXEL_BUFFER, + eUniformBuffer = VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER, + eStorageBuffer = VK_DESCRIPTOR_TYPE_STORAGE_BUFFER, + eUniformBufferDynamic = VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER_DYNAMIC, + eStorageBufferDynamic = VK_DESCRIPTOR_TYPE_STORAGE_BUFFER_DYNAMIC, + eInputAttachment = VK_DESCRIPTOR_TYPE_INPUT_ATTACHMENT, + eInlineUniformBlockEXT = VK_DESCRIPTOR_TYPE_INLINE_UNIFORM_BLOCK_EXT, + eAccelerationStructureNV = VK_DESCRIPTOR_TYPE_ACCELERATION_STRUCTURE_NV + }; + + struct DescriptorPoolSize + { + DescriptorPoolSize( DescriptorType type_ = DescriptorType::eSampler, + uint32_t descriptorCount_ = 0 ) + : type( type_ ) + , descriptorCount( descriptorCount_ ) + { + } + + DescriptorPoolSize( VkDescriptorPoolSize const & rhs ) + { + memcpy( this, &rhs, sizeof( DescriptorPoolSize ) ); + } + + DescriptorPoolSize& operator=( VkDescriptorPoolSize const & rhs ) + { + memcpy( this, &rhs, sizeof( DescriptorPoolSize ) ); + return *this; + } + DescriptorPoolSize& setType( DescriptorType type_ ) + { + type = type_; + return *this; + } + + DescriptorPoolSize& setDescriptorCount( uint32_t descriptorCount_ ) + { + descriptorCount = descriptorCount_; + return *this; + } + + operator VkDescriptorPoolSize const&() const + { + return *reinterpret_cast(this); + } + + operator VkDescriptorPoolSize &() + { + return *reinterpret_cast(this); + } + + bool operator==( DescriptorPoolSize const& rhs ) const + { + return ( type == rhs.type ) + && ( descriptorCount == rhs.descriptorCount ); + } + + bool operator!=( DescriptorPoolSize const& rhs ) const + { + return !operator==( rhs ); + } + + DescriptorType type; + uint32_t descriptorCount; + }; + static_assert( sizeof( DescriptorPoolSize ) == sizeof( VkDescriptorPoolSize ), "struct and wrapper have different size!" ); + + struct DescriptorUpdateTemplateEntry + { + DescriptorUpdateTemplateEntry( uint32_t dstBinding_ = 0, + uint32_t dstArrayElement_ = 0, + uint32_t descriptorCount_ = 0, + DescriptorType descriptorType_ = DescriptorType::eSampler, + size_t offset_ = 0, + size_t stride_ = 0 ) + : dstBinding( dstBinding_ ) + , dstArrayElement( dstArrayElement_ ) + , descriptorCount( descriptorCount_ ) + , descriptorType( descriptorType_ ) + , offset( offset_ ) + , stride( stride_ ) + { + } + + DescriptorUpdateTemplateEntry( VkDescriptorUpdateTemplateEntry const & rhs ) + { + memcpy( this, &rhs, sizeof( DescriptorUpdateTemplateEntry ) ); + } + + DescriptorUpdateTemplateEntry& operator=( VkDescriptorUpdateTemplateEntry const & rhs ) + { + memcpy( this, &rhs, sizeof( DescriptorUpdateTemplateEntry ) ); + return *this; + } + DescriptorUpdateTemplateEntry& setDstBinding( uint32_t dstBinding_ ) + { + dstBinding = dstBinding_; + return *this; + } + + DescriptorUpdateTemplateEntry& setDstArrayElement( uint32_t dstArrayElement_ ) + { + dstArrayElement = dstArrayElement_; + return *this; + } + + DescriptorUpdateTemplateEntry& setDescriptorCount( uint32_t descriptorCount_ ) + { + descriptorCount = descriptorCount_; + return *this; + } + + DescriptorUpdateTemplateEntry& setDescriptorType( DescriptorType descriptorType_ ) + { + descriptorType = descriptorType_; + return *this; + } + + DescriptorUpdateTemplateEntry& setOffset( size_t offset_ ) + { + offset = offset_; + return *this; + } + + DescriptorUpdateTemplateEntry& setStride( size_t stride_ ) + { + stride = stride_; + return *this; + } + + operator VkDescriptorUpdateTemplateEntry const&() const + { + return *reinterpret_cast(this); + } + + operator VkDescriptorUpdateTemplateEntry &() + { + return *reinterpret_cast(this); + } + + bool operator==( DescriptorUpdateTemplateEntry const& rhs ) const + { + return ( dstBinding == rhs.dstBinding ) + && ( dstArrayElement == rhs.dstArrayElement ) + && ( descriptorCount == rhs.descriptorCount ) + && ( descriptorType == rhs.descriptorType ) + && ( offset == rhs.offset ) + && ( stride == rhs.stride ); + } + + bool operator!=( DescriptorUpdateTemplateEntry const& rhs ) const + { + return !operator==( rhs ); + } + + uint32_t dstBinding; + uint32_t dstArrayElement; + uint32_t descriptorCount; + DescriptorType descriptorType; + size_t offset; + size_t stride; + }; + static_assert( sizeof( DescriptorUpdateTemplateEntry ) == sizeof( VkDescriptorUpdateTemplateEntry ), "struct and wrapper have different size!" ); + + using DescriptorUpdateTemplateEntryKHR = DescriptorUpdateTemplateEntry; + + enum class QueryType + { + eOcclusion = VK_QUERY_TYPE_OCCLUSION, + ePipelineStatistics = VK_QUERY_TYPE_PIPELINE_STATISTICS, + eTimestamp = VK_QUERY_TYPE_TIMESTAMP, + eTransformFeedbackStreamEXT = VK_QUERY_TYPE_TRANSFORM_FEEDBACK_STREAM_EXT, + eAccelerationStructureCompactedSizeNV = VK_QUERY_TYPE_ACCELERATION_STRUCTURE_COMPACTED_SIZE_NV + }; + + enum class BorderColor + { + eFloatTransparentBlack = VK_BORDER_COLOR_FLOAT_TRANSPARENT_BLACK, + eIntTransparentBlack = VK_BORDER_COLOR_INT_TRANSPARENT_BLACK, + eFloatOpaqueBlack = VK_BORDER_COLOR_FLOAT_OPAQUE_BLACK, + eIntOpaqueBlack = VK_BORDER_COLOR_INT_OPAQUE_BLACK, + eFloatOpaqueWhite = VK_BORDER_COLOR_FLOAT_OPAQUE_WHITE, + eIntOpaqueWhite = VK_BORDER_COLOR_INT_OPAQUE_WHITE + }; + + enum class PipelineBindPoint + { + eGraphics = VK_PIPELINE_BIND_POINT_GRAPHICS, + eCompute = VK_PIPELINE_BIND_POINT_COMPUTE, + eRayTracingNV = VK_PIPELINE_BIND_POINT_RAY_TRACING_NV + }; + + enum class PipelineCacheHeaderVersion + { + eOne = VK_PIPELINE_CACHE_HEADER_VERSION_ONE + }; + + enum class PrimitiveTopology + { + ePointList = VK_PRIMITIVE_TOPOLOGY_POINT_LIST, + eLineList = VK_PRIMITIVE_TOPOLOGY_LINE_LIST, + eLineStrip = VK_PRIMITIVE_TOPOLOGY_LINE_STRIP, + eTriangleList = VK_PRIMITIVE_TOPOLOGY_TRIANGLE_LIST, + eTriangleStrip = VK_PRIMITIVE_TOPOLOGY_TRIANGLE_STRIP, + eTriangleFan = VK_PRIMITIVE_TOPOLOGY_TRIANGLE_FAN, + eLineListWithAdjacency = VK_PRIMITIVE_TOPOLOGY_LINE_LIST_WITH_ADJACENCY, + eLineStripWithAdjacency = VK_PRIMITIVE_TOPOLOGY_LINE_STRIP_WITH_ADJACENCY, + eTriangleListWithAdjacency = VK_PRIMITIVE_TOPOLOGY_TRIANGLE_LIST_WITH_ADJACENCY, + eTriangleStripWithAdjacency = VK_PRIMITIVE_TOPOLOGY_TRIANGLE_STRIP_WITH_ADJACENCY, + ePatchList = VK_PRIMITIVE_TOPOLOGY_PATCH_LIST + }; + + enum class SharingMode + { + eExclusive = VK_SHARING_MODE_EXCLUSIVE, + eConcurrent = VK_SHARING_MODE_CONCURRENT + }; + + enum class IndexType + { + eUint16 = VK_INDEX_TYPE_UINT16, + eUint32 = VK_INDEX_TYPE_UINT32, + eNoneNV = VK_INDEX_TYPE_NONE_NV + }; + + enum class Filter + { + eNearest = VK_FILTER_NEAREST, + eLinear = VK_FILTER_LINEAR, + eCubicIMG = VK_FILTER_CUBIC_IMG + }; + + enum class SamplerMipmapMode + { + eNearest = VK_SAMPLER_MIPMAP_MODE_NEAREST, + eLinear = VK_SAMPLER_MIPMAP_MODE_LINEAR + }; + + enum class SamplerAddressMode + { + eRepeat = VK_SAMPLER_ADDRESS_MODE_REPEAT, + eMirroredRepeat = VK_SAMPLER_ADDRESS_MODE_MIRRORED_REPEAT, + eClampToEdge = VK_SAMPLER_ADDRESS_MODE_CLAMP_TO_EDGE, + eClampToBorder = VK_SAMPLER_ADDRESS_MODE_CLAMP_TO_BORDER, + eMirrorClampToEdge = VK_SAMPLER_ADDRESS_MODE_MIRROR_CLAMP_TO_EDGE + }; + + enum class CompareOp + { + eNever = VK_COMPARE_OP_NEVER, + eLess = VK_COMPARE_OP_LESS, + eEqual = VK_COMPARE_OP_EQUAL, + eLessOrEqual = VK_COMPARE_OP_LESS_OR_EQUAL, + eGreater = VK_COMPARE_OP_GREATER, + eNotEqual = VK_COMPARE_OP_NOT_EQUAL, + eGreaterOrEqual = VK_COMPARE_OP_GREATER_OR_EQUAL, + eAlways = VK_COMPARE_OP_ALWAYS + }; + + enum class PolygonMode + { + eFill = VK_POLYGON_MODE_FILL, + eLine = VK_POLYGON_MODE_LINE, + ePoint = VK_POLYGON_MODE_POINT, + eFillRectangleNV = VK_POLYGON_MODE_FILL_RECTANGLE_NV + }; + + enum class CullModeFlagBits + { + eNone = VK_CULL_MODE_NONE, + eFront = VK_CULL_MODE_FRONT_BIT, + eBack = VK_CULL_MODE_BACK_BIT, + eFrontAndBack = VK_CULL_MODE_FRONT_AND_BACK + }; + + using CullModeFlags = Flags; + + VULKAN_HPP_INLINE CullModeFlags operator|( CullModeFlagBits bit0, CullModeFlagBits bit1 ) + { + return CullModeFlags( bit0 ) | bit1; + } + + VULKAN_HPP_INLINE CullModeFlags operator~( CullModeFlagBits bits ) + { + return ~( CullModeFlags( bits ) ); + } + + template <> struct FlagTraits + { + enum + { + allFlags = VkFlags(CullModeFlagBits::eNone) | VkFlags(CullModeFlagBits::eFront) | VkFlags(CullModeFlagBits::eBack) | VkFlags(CullModeFlagBits::eFrontAndBack) + }; + }; + + enum class FrontFace + { + eCounterClockwise = VK_FRONT_FACE_COUNTER_CLOCKWISE, + eClockwise = VK_FRONT_FACE_CLOCKWISE + }; + + enum class BlendFactor + { + eZero = VK_BLEND_FACTOR_ZERO, + eOne = VK_BLEND_FACTOR_ONE, + eSrcColor = VK_BLEND_FACTOR_SRC_COLOR, + eOneMinusSrcColor = VK_BLEND_FACTOR_ONE_MINUS_SRC_COLOR, + eDstColor = VK_BLEND_FACTOR_DST_COLOR, + eOneMinusDstColor = VK_BLEND_FACTOR_ONE_MINUS_DST_COLOR, + eSrcAlpha = VK_BLEND_FACTOR_SRC_ALPHA, + eOneMinusSrcAlpha = VK_BLEND_FACTOR_ONE_MINUS_SRC_ALPHA, + eDstAlpha = VK_BLEND_FACTOR_DST_ALPHA, + eOneMinusDstAlpha = VK_BLEND_FACTOR_ONE_MINUS_DST_ALPHA, + eConstantColor = VK_BLEND_FACTOR_CONSTANT_COLOR, + eOneMinusConstantColor = VK_BLEND_FACTOR_ONE_MINUS_CONSTANT_COLOR, + eConstantAlpha = VK_BLEND_FACTOR_CONSTANT_ALPHA, + eOneMinusConstantAlpha = VK_BLEND_FACTOR_ONE_MINUS_CONSTANT_ALPHA, + eSrcAlphaSaturate = VK_BLEND_FACTOR_SRC_ALPHA_SATURATE, + eSrc1Color = VK_BLEND_FACTOR_SRC1_COLOR, + eOneMinusSrc1Color = VK_BLEND_FACTOR_ONE_MINUS_SRC1_COLOR, + eSrc1Alpha = VK_BLEND_FACTOR_SRC1_ALPHA, + eOneMinusSrc1Alpha = VK_BLEND_FACTOR_ONE_MINUS_SRC1_ALPHA + }; + + enum class BlendOp + { + eAdd = VK_BLEND_OP_ADD, + eSubtract = VK_BLEND_OP_SUBTRACT, + eReverseSubtract = VK_BLEND_OP_REVERSE_SUBTRACT, + eMin = VK_BLEND_OP_MIN, + eMax = VK_BLEND_OP_MAX, + eZeroEXT = VK_BLEND_OP_ZERO_EXT, + eSrcEXT = VK_BLEND_OP_SRC_EXT, + eDstEXT = VK_BLEND_OP_DST_EXT, + eSrcOverEXT = VK_BLEND_OP_SRC_OVER_EXT, + eDstOverEXT = VK_BLEND_OP_DST_OVER_EXT, + eSrcInEXT = VK_BLEND_OP_SRC_IN_EXT, + eDstInEXT = VK_BLEND_OP_DST_IN_EXT, + eSrcOutEXT = VK_BLEND_OP_SRC_OUT_EXT, + eDstOutEXT = VK_BLEND_OP_DST_OUT_EXT, + eSrcAtopEXT = VK_BLEND_OP_SRC_ATOP_EXT, + eDstAtopEXT = VK_BLEND_OP_DST_ATOP_EXT, + eXorEXT = VK_BLEND_OP_XOR_EXT, + eMultiplyEXT = VK_BLEND_OP_MULTIPLY_EXT, + eScreenEXT = VK_BLEND_OP_SCREEN_EXT, + eOverlayEXT = VK_BLEND_OP_OVERLAY_EXT, + eDarkenEXT = VK_BLEND_OP_DARKEN_EXT, + eLightenEXT = VK_BLEND_OP_LIGHTEN_EXT, + eColordodgeEXT = VK_BLEND_OP_COLORDODGE_EXT, + eColorburnEXT = VK_BLEND_OP_COLORBURN_EXT, + eHardlightEXT = VK_BLEND_OP_HARDLIGHT_EXT, + eSoftlightEXT = VK_BLEND_OP_SOFTLIGHT_EXT, + eDifferenceEXT = VK_BLEND_OP_DIFFERENCE_EXT, + eExclusionEXT = VK_BLEND_OP_EXCLUSION_EXT, + eInvertEXT = VK_BLEND_OP_INVERT_EXT, + eInvertRgbEXT = VK_BLEND_OP_INVERT_RGB_EXT, + eLineardodgeEXT = VK_BLEND_OP_LINEARDODGE_EXT, + eLinearburnEXT = VK_BLEND_OP_LINEARBURN_EXT, + eVividlightEXT = VK_BLEND_OP_VIVIDLIGHT_EXT, + eLinearlightEXT = VK_BLEND_OP_LINEARLIGHT_EXT, + ePinlightEXT = VK_BLEND_OP_PINLIGHT_EXT, + eHardmixEXT = VK_BLEND_OP_HARDMIX_EXT, + eHslHueEXT = VK_BLEND_OP_HSL_HUE_EXT, + eHslSaturationEXT = VK_BLEND_OP_HSL_SATURATION_EXT, + eHslColorEXT = VK_BLEND_OP_HSL_COLOR_EXT, + eHslLuminosityEXT = VK_BLEND_OP_HSL_LUMINOSITY_EXT, + ePlusEXT = VK_BLEND_OP_PLUS_EXT, + ePlusClampedEXT = VK_BLEND_OP_PLUS_CLAMPED_EXT, + ePlusClampedAlphaEXT = VK_BLEND_OP_PLUS_CLAMPED_ALPHA_EXT, + ePlusDarkerEXT = VK_BLEND_OP_PLUS_DARKER_EXT, + eMinusEXT = VK_BLEND_OP_MINUS_EXT, + eMinusClampedEXT = VK_BLEND_OP_MINUS_CLAMPED_EXT, + eContrastEXT = VK_BLEND_OP_CONTRAST_EXT, + eInvertOvgEXT = VK_BLEND_OP_INVERT_OVG_EXT, + eRedEXT = VK_BLEND_OP_RED_EXT, + eGreenEXT = VK_BLEND_OP_GREEN_EXT, + eBlueEXT = VK_BLEND_OP_BLUE_EXT + }; + + enum class StencilOp + { + eKeep = VK_STENCIL_OP_KEEP, + eZero = VK_STENCIL_OP_ZERO, + eReplace = VK_STENCIL_OP_REPLACE, + eIncrementAndClamp = VK_STENCIL_OP_INCREMENT_AND_CLAMP, + eDecrementAndClamp = VK_STENCIL_OP_DECREMENT_AND_CLAMP, + eInvert = VK_STENCIL_OP_INVERT, + eIncrementAndWrap = VK_STENCIL_OP_INCREMENT_AND_WRAP, + eDecrementAndWrap = VK_STENCIL_OP_DECREMENT_AND_WRAP + }; + + struct StencilOpState + { + StencilOpState( StencilOp failOp_ = StencilOp::eKeep, + StencilOp passOp_ = StencilOp::eKeep, + StencilOp depthFailOp_ = StencilOp::eKeep, + CompareOp compareOp_ = CompareOp::eNever, + uint32_t compareMask_ = 0, + uint32_t writeMask_ = 0, + uint32_t reference_ = 0 ) + : failOp( failOp_ ) + , passOp( passOp_ ) + , depthFailOp( depthFailOp_ ) + , compareOp( compareOp_ ) + , compareMask( compareMask_ ) + , writeMask( writeMask_ ) + , reference( reference_ ) + { + } + + StencilOpState( VkStencilOpState const & rhs ) + { + memcpy( this, &rhs, sizeof( StencilOpState ) ); + } + + StencilOpState& operator=( VkStencilOpState const & rhs ) + { + memcpy( this, &rhs, sizeof( StencilOpState ) ); + return *this; + } + StencilOpState& setFailOp( StencilOp failOp_ ) + { + failOp = failOp_; + return *this; + } + + StencilOpState& setPassOp( StencilOp passOp_ ) + { + passOp = passOp_; + return *this; + } + + StencilOpState& setDepthFailOp( StencilOp depthFailOp_ ) + { + depthFailOp = depthFailOp_; + return *this; + } + + StencilOpState& setCompareOp( CompareOp compareOp_ ) + { + compareOp = compareOp_; + return *this; + } + + StencilOpState& setCompareMask( uint32_t compareMask_ ) + { + compareMask = compareMask_; + return *this; + } + + StencilOpState& setWriteMask( uint32_t writeMask_ ) + { + writeMask = writeMask_; + return *this; + } + + StencilOpState& setReference( uint32_t reference_ ) + { + reference = reference_; + return *this; + } + + operator VkStencilOpState const&() const + { + return *reinterpret_cast(this); + } + + operator VkStencilOpState &() + { + return *reinterpret_cast(this); + } + + bool operator==( StencilOpState const& rhs ) const + { + return ( failOp == rhs.failOp ) + && ( passOp == rhs.passOp ) + && ( depthFailOp == rhs.depthFailOp ) + && ( compareOp == rhs.compareOp ) + && ( compareMask == rhs.compareMask ) + && ( writeMask == rhs.writeMask ) + && ( reference == rhs.reference ); + } + + bool operator!=( StencilOpState const& rhs ) const + { + return !operator==( rhs ); + } + + StencilOp failOp; + StencilOp passOp; + StencilOp depthFailOp; + CompareOp compareOp; + uint32_t compareMask; + uint32_t writeMask; + uint32_t reference; + }; + static_assert( sizeof( StencilOpState ) == sizeof( VkStencilOpState ), "struct and wrapper have different size!" ); + + enum class LogicOp + { + eClear = VK_LOGIC_OP_CLEAR, + eAnd = VK_LOGIC_OP_AND, + eAndReverse = VK_LOGIC_OP_AND_REVERSE, + eCopy = VK_LOGIC_OP_COPY, + eAndInverted = VK_LOGIC_OP_AND_INVERTED, + eNoOp = VK_LOGIC_OP_NO_OP, + eXor = VK_LOGIC_OP_XOR, + eOr = VK_LOGIC_OP_OR, + eNor = VK_LOGIC_OP_NOR, + eEquivalent = VK_LOGIC_OP_EQUIVALENT, + eInvert = VK_LOGIC_OP_INVERT, + eOrReverse = VK_LOGIC_OP_OR_REVERSE, + eCopyInverted = VK_LOGIC_OP_COPY_INVERTED, + eOrInverted = VK_LOGIC_OP_OR_INVERTED, + eNand = VK_LOGIC_OP_NAND, + eSet = VK_LOGIC_OP_SET + }; + + enum class InternalAllocationType + { + eExecutable = VK_INTERNAL_ALLOCATION_TYPE_EXECUTABLE + }; + + enum class SystemAllocationScope + { + eCommand = VK_SYSTEM_ALLOCATION_SCOPE_COMMAND, + eObject = VK_SYSTEM_ALLOCATION_SCOPE_OBJECT, + eCache = VK_SYSTEM_ALLOCATION_SCOPE_CACHE, + eDevice = VK_SYSTEM_ALLOCATION_SCOPE_DEVICE, + eInstance = VK_SYSTEM_ALLOCATION_SCOPE_INSTANCE + }; + + enum class PhysicalDeviceType + { + eOther = VK_PHYSICAL_DEVICE_TYPE_OTHER, + eIntegratedGpu = VK_PHYSICAL_DEVICE_TYPE_INTEGRATED_GPU, + eDiscreteGpu = VK_PHYSICAL_DEVICE_TYPE_DISCRETE_GPU, + eVirtualGpu = VK_PHYSICAL_DEVICE_TYPE_VIRTUAL_GPU, + eCpu = VK_PHYSICAL_DEVICE_TYPE_CPU + }; + + enum class VertexInputRate + { + eVertex = VK_VERTEX_INPUT_RATE_VERTEX, + eInstance = VK_VERTEX_INPUT_RATE_INSTANCE + }; + + struct VertexInputBindingDescription + { + VertexInputBindingDescription( uint32_t binding_ = 0, + uint32_t stride_ = 0, + VertexInputRate inputRate_ = VertexInputRate::eVertex ) + : binding( binding_ ) + , stride( stride_ ) + , inputRate( inputRate_ ) + { + } + + VertexInputBindingDescription( VkVertexInputBindingDescription const & rhs ) + { + memcpy( this, &rhs, sizeof( VertexInputBindingDescription ) ); + } + + VertexInputBindingDescription& operator=( VkVertexInputBindingDescription const & rhs ) + { + memcpy( this, &rhs, sizeof( VertexInputBindingDescription ) ); + return *this; + } + VertexInputBindingDescription& setBinding( uint32_t binding_ ) + { + binding = binding_; + return *this; + } + + VertexInputBindingDescription& setStride( uint32_t stride_ ) + { + stride = stride_; + return *this; + } + + VertexInputBindingDescription& setInputRate( VertexInputRate inputRate_ ) + { + inputRate = inputRate_; + return *this; + } + + operator VkVertexInputBindingDescription const&() const + { + return *reinterpret_cast(this); + } + + operator VkVertexInputBindingDescription &() + { + return *reinterpret_cast(this); + } + + bool operator==( VertexInputBindingDescription const& rhs ) const + { + return ( binding == rhs.binding ) + && ( stride == rhs.stride ) + && ( inputRate == rhs.inputRate ); + } + + bool operator!=( VertexInputBindingDescription const& rhs ) const + { + return !operator==( rhs ); + } + + uint32_t binding; + uint32_t stride; + VertexInputRate inputRate; + }; + static_assert( sizeof( VertexInputBindingDescription ) == sizeof( VkVertexInputBindingDescription ), "struct and wrapper have different size!" ); + + enum class Format + { + eUndefined = VK_FORMAT_UNDEFINED, + eR4G4UnormPack8 = VK_FORMAT_R4G4_UNORM_PACK8, + eR4G4B4A4UnormPack16 = VK_FORMAT_R4G4B4A4_UNORM_PACK16, + eB4G4R4A4UnormPack16 = VK_FORMAT_B4G4R4A4_UNORM_PACK16, + eR5G6B5UnormPack16 = VK_FORMAT_R5G6B5_UNORM_PACK16, + eB5G6R5UnormPack16 = VK_FORMAT_B5G6R5_UNORM_PACK16, + eR5G5B5A1UnormPack16 = VK_FORMAT_R5G5B5A1_UNORM_PACK16, + eB5G5R5A1UnormPack16 = VK_FORMAT_B5G5R5A1_UNORM_PACK16, + eA1R5G5B5UnormPack16 = VK_FORMAT_A1R5G5B5_UNORM_PACK16, + eR8Unorm = VK_FORMAT_R8_UNORM, + eR8Snorm = VK_FORMAT_R8_SNORM, + eR8Uscaled = VK_FORMAT_R8_USCALED, + eR8Sscaled = VK_FORMAT_R8_SSCALED, + eR8Uint = VK_FORMAT_R8_UINT, + eR8Sint = VK_FORMAT_R8_SINT, + eR8Srgb = VK_FORMAT_R8_SRGB, + eR8G8Unorm = VK_FORMAT_R8G8_UNORM, + eR8G8Snorm = VK_FORMAT_R8G8_SNORM, + eR8G8Uscaled = VK_FORMAT_R8G8_USCALED, + eR8G8Sscaled = VK_FORMAT_R8G8_SSCALED, + eR8G8Uint = VK_FORMAT_R8G8_UINT, + eR8G8Sint = VK_FORMAT_R8G8_SINT, + eR8G8Srgb = VK_FORMAT_R8G8_SRGB, + eR8G8B8Unorm = VK_FORMAT_R8G8B8_UNORM, + eR8G8B8Snorm = VK_FORMAT_R8G8B8_SNORM, + eR8G8B8Uscaled = VK_FORMAT_R8G8B8_USCALED, + eR8G8B8Sscaled = VK_FORMAT_R8G8B8_SSCALED, + eR8G8B8Uint = VK_FORMAT_R8G8B8_UINT, + eR8G8B8Sint = VK_FORMAT_R8G8B8_SINT, + eR8G8B8Srgb = VK_FORMAT_R8G8B8_SRGB, + eB8G8R8Unorm = VK_FORMAT_B8G8R8_UNORM, + eB8G8R8Snorm = VK_FORMAT_B8G8R8_SNORM, + eB8G8R8Uscaled = VK_FORMAT_B8G8R8_USCALED, + eB8G8R8Sscaled = VK_FORMAT_B8G8R8_SSCALED, + eB8G8R8Uint = VK_FORMAT_B8G8R8_UINT, + eB8G8R8Sint = VK_FORMAT_B8G8R8_SINT, + eB8G8R8Srgb = VK_FORMAT_B8G8R8_SRGB, + eR8G8B8A8Unorm = VK_FORMAT_R8G8B8A8_UNORM, + eR8G8B8A8Snorm = VK_FORMAT_R8G8B8A8_SNORM, + eR8G8B8A8Uscaled = VK_FORMAT_R8G8B8A8_USCALED, + eR8G8B8A8Sscaled = VK_FORMAT_R8G8B8A8_SSCALED, + eR8G8B8A8Uint = VK_FORMAT_R8G8B8A8_UINT, + eR8G8B8A8Sint = VK_FORMAT_R8G8B8A8_SINT, + eR8G8B8A8Srgb = VK_FORMAT_R8G8B8A8_SRGB, + eB8G8R8A8Unorm = VK_FORMAT_B8G8R8A8_UNORM, + eB8G8R8A8Snorm = VK_FORMAT_B8G8R8A8_SNORM, + eB8G8R8A8Uscaled = VK_FORMAT_B8G8R8A8_USCALED, + eB8G8R8A8Sscaled = VK_FORMAT_B8G8R8A8_SSCALED, + eB8G8R8A8Uint = VK_FORMAT_B8G8R8A8_UINT, + eB8G8R8A8Sint = VK_FORMAT_B8G8R8A8_SINT, + eB8G8R8A8Srgb = VK_FORMAT_B8G8R8A8_SRGB, + eA8B8G8R8UnormPack32 = VK_FORMAT_A8B8G8R8_UNORM_PACK32, + eA8B8G8R8SnormPack32 = VK_FORMAT_A8B8G8R8_SNORM_PACK32, + eA8B8G8R8UscaledPack32 = VK_FORMAT_A8B8G8R8_USCALED_PACK32, + eA8B8G8R8SscaledPack32 = VK_FORMAT_A8B8G8R8_SSCALED_PACK32, + eA8B8G8R8UintPack32 = VK_FORMAT_A8B8G8R8_UINT_PACK32, + eA8B8G8R8SintPack32 = VK_FORMAT_A8B8G8R8_SINT_PACK32, + eA8B8G8R8SrgbPack32 = VK_FORMAT_A8B8G8R8_SRGB_PACK32, + eA2R10G10B10UnormPack32 = VK_FORMAT_A2R10G10B10_UNORM_PACK32, + eA2R10G10B10SnormPack32 = VK_FORMAT_A2R10G10B10_SNORM_PACK32, + eA2R10G10B10UscaledPack32 = VK_FORMAT_A2R10G10B10_USCALED_PACK32, + eA2R10G10B10SscaledPack32 = VK_FORMAT_A2R10G10B10_SSCALED_PACK32, + eA2R10G10B10UintPack32 = VK_FORMAT_A2R10G10B10_UINT_PACK32, + eA2R10G10B10SintPack32 = VK_FORMAT_A2R10G10B10_SINT_PACK32, + eA2B10G10R10UnormPack32 = VK_FORMAT_A2B10G10R10_UNORM_PACK32, + eA2B10G10R10SnormPack32 = VK_FORMAT_A2B10G10R10_SNORM_PACK32, + eA2B10G10R10UscaledPack32 = VK_FORMAT_A2B10G10R10_USCALED_PACK32, + eA2B10G10R10SscaledPack32 = VK_FORMAT_A2B10G10R10_SSCALED_PACK32, + eA2B10G10R10UintPack32 = VK_FORMAT_A2B10G10R10_UINT_PACK32, + eA2B10G10R10SintPack32 = VK_FORMAT_A2B10G10R10_SINT_PACK32, + eR16Unorm = VK_FORMAT_R16_UNORM, + eR16Snorm = VK_FORMAT_R16_SNORM, + eR16Uscaled = VK_FORMAT_R16_USCALED, + eR16Sscaled = VK_FORMAT_R16_SSCALED, + eR16Uint = VK_FORMAT_R16_UINT, + eR16Sint = VK_FORMAT_R16_SINT, + eR16Sfloat = VK_FORMAT_R16_SFLOAT, + eR16G16Unorm = VK_FORMAT_R16G16_UNORM, + eR16G16Snorm = VK_FORMAT_R16G16_SNORM, + eR16G16Uscaled = VK_FORMAT_R16G16_USCALED, + eR16G16Sscaled = VK_FORMAT_R16G16_SSCALED, + eR16G16Uint = VK_FORMAT_R16G16_UINT, + eR16G16Sint = VK_FORMAT_R16G16_SINT, + eR16G16Sfloat = VK_FORMAT_R16G16_SFLOAT, + eR16G16B16Unorm = VK_FORMAT_R16G16B16_UNORM, + eR16G16B16Snorm = VK_FORMAT_R16G16B16_SNORM, + eR16G16B16Uscaled = VK_FORMAT_R16G16B16_USCALED, + eR16G16B16Sscaled = VK_FORMAT_R16G16B16_SSCALED, + eR16G16B16Uint = VK_FORMAT_R16G16B16_UINT, + eR16G16B16Sint = VK_FORMAT_R16G16B16_SINT, + eR16G16B16Sfloat = VK_FORMAT_R16G16B16_SFLOAT, + eR16G16B16A16Unorm = VK_FORMAT_R16G16B16A16_UNORM, + eR16G16B16A16Snorm = VK_FORMAT_R16G16B16A16_SNORM, + eR16G16B16A16Uscaled = VK_FORMAT_R16G16B16A16_USCALED, + eR16G16B16A16Sscaled = VK_FORMAT_R16G16B16A16_SSCALED, + eR16G16B16A16Uint = VK_FORMAT_R16G16B16A16_UINT, + eR16G16B16A16Sint = VK_FORMAT_R16G16B16A16_SINT, + eR16G16B16A16Sfloat = VK_FORMAT_R16G16B16A16_SFLOAT, + eR32Uint = VK_FORMAT_R32_UINT, + eR32Sint = VK_FORMAT_R32_SINT, + eR32Sfloat = VK_FORMAT_R32_SFLOAT, + eR32G32Uint = VK_FORMAT_R32G32_UINT, + eR32G32Sint = VK_FORMAT_R32G32_SINT, + eR32G32Sfloat = VK_FORMAT_R32G32_SFLOAT, + eR32G32B32Uint = VK_FORMAT_R32G32B32_UINT, + eR32G32B32Sint = VK_FORMAT_R32G32B32_SINT, + eR32G32B32Sfloat = VK_FORMAT_R32G32B32_SFLOAT, + eR32G32B32A32Uint = VK_FORMAT_R32G32B32A32_UINT, + eR32G32B32A32Sint = VK_FORMAT_R32G32B32A32_SINT, + eR32G32B32A32Sfloat = VK_FORMAT_R32G32B32A32_SFLOAT, + eR64Uint = VK_FORMAT_R64_UINT, + eR64Sint = VK_FORMAT_R64_SINT, + eR64Sfloat = VK_FORMAT_R64_SFLOAT, + eR64G64Uint = VK_FORMAT_R64G64_UINT, + eR64G64Sint = VK_FORMAT_R64G64_SINT, + eR64G64Sfloat = VK_FORMAT_R64G64_SFLOAT, + eR64G64B64Uint = VK_FORMAT_R64G64B64_UINT, + eR64G64B64Sint = VK_FORMAT_R64G64B64_SINT, + eR64G64B64Sfloat = VK_FORMAT_R64G64B64_SFLOAT, + eR64G64B64A64Uint = VK_FORMAT_R64G64B64A64_UINT, + eR64G64B64A64Sint = VK_FORMAT_R64G64B64A64_SINT, + eR64G64B64A64Sfloat = VK_FORMAT_R64G64B64A64_SFLOAT, + eB10G11R11UfloatPack32 = VK_FORMAT_B10G11R11_UFLOAT_PACK32, + eE5B9G9R9UfloatPack32 = VK_FORMAT_E5B9G9R9_UFLOAT_PACK32, + eD16Unorm = VK_FORMAT_D16_UNORM, + eX8D24UnormPack32 = VK_FORMAT_X8_D24_UNORM_PACK32, + eD32Sfloat = VK_FORMAT_D32_SFLOAT, + eS8Uint = VK_FORMAT_S8_UINT, + eD16UnormS8Uint = VK_FORMAT_D16_UNORM_S8_UINT, + eD24UnormS8Uint = VK_FORMAT_D24_UNORM_S8_UINT, + eD32SfloatS8Uint = VK_FORMAT_D32_SFLOAT_S8_UINT, + eBc1RgbUnormBlock = VK_FORMAT_BC1_RGB_UNORM_BLOCK, + eBc1RgbSrgbBlock = VK_FORMAT_BC1_RGB_SRGB_BLOCK, + eBc1RgbaUnormBlock = VK_FORMAT_BC1_RGBA_UNORM_BLOCK, + eBc1RgbaSrgbBlock = VK_FORMAT_BC1_RGBA_SRGB_BLOCK, + eBc2UnormBlock = VK_FORMAT_BC2_UNORM_BLOCK, + eBc2SrgbBlock = VK_FORMAT_BC2_SRGB_BLOCK, + eBc3UnormBlock = VK_FORMAT_BC3_UNORM_BLOCK, + eBc3SrgbBlock = VK_FORMAT_BC3_SRGB_BLOCK, + eBc4UnormBlock = VK_FORMAT_BC4_UNORM_BLOCK, + eBc4SnormBlock = VK_FORMAT_BC4_SNORM_BLOCK, + eBc5UnormBlock = VK_FORMAT_BC5_UNORM_BLOCK, + eBc5SnormBlock = VK_FORMAT_BC5_SNORM_BLOCK, + eBc6HUfloatBlock = VK_FORMAT_BC6H_UFLOAT_BLOCK, + eBc6HSfloatBlock = VK_FORMAT_BC6H_SFLOAT_BLOCK, + eBc7UnormBlock = VK_FORMAT_BC7_UNORM_BLOCK, + eBc7SrgbBlock = VK_FORMAT_BC7_SRGB_BLOCK, + eEtc2R8G8B8UnormBlock = VK_FORMAT_ETC2_R8G8B8_UNORM_BLOCK, + eEtc2R8G8B8SrgbBlock = VK_FORMAT_ETC2_R8G8B8_SRGB_BLOCK, + eEtc2R8G8B8A1UnormBlock = VK_FORMAT_ETC2_R8G8B8A1_UNORM_BLOCK, + eEtc2R8G8B8A1SrgbBlock = VK_FORMAT_ETC2_R8G8B8A1_SRGB_BLOCK, + eEtc2R8G8B8A8UnormBlock = VK_FORMAT_ETC2_R8G8B8A8_UNORM_BLOCK, + eEtc2R8G8B8A8SrgbBlock = VK_FORMAT_ETC2_R8G8B8A8_SRGB_BLOCK, + eEacR11UnormBlock = VK_FORMAT_EAC_R11_UNORM_BLOCK, + eEacR11SnormBlock = VK_FORMAT_EAC_R11_SNORM_BLOCK, + eEacR11G11UnormBlock = VK_FORMAT_EAC_R11G11_UNORM_BLOCK, + eEacR11G11SnormBlock = VK_FORMAT_EAC_R11G11_SNORM_BLOCK, + eAstc4x4UnormBlock = VK_FORMAT_ASTC_4x4_UNORM_BLOCK, + eAstc4x4SrgbBlock = VK_FORMAT_ASTC_4x4_SRGB_BLOCK, + eAstc5x4UnormBlock = VK_FORMAT_ASTC_5x4_UNORM_BLOCK, + eAstc5x4SrgbBlock = VK_FORMAT_ASTC_5x4_SRGB_BLOCK, + eAstc5x5UnormBlock = VK_FORMAT_ASTC_5x5_UNORM_BLOCK, + eAstc5x5SrgbBlock = VK_FORMAT_ASTC_5x5_SRGB_BLOCK, + eAstc6x5UnormBlock = VK_FORMAT_ASTC_6x5_UNORM_BLOCK, + eAstc6x5SrgbBlock = VK_FORMAT_ASTC_6x5_SRGB_BLOCK, + eAstc6x6UnormBlock = VK_FORMAT_ASTC_6x6_UNORM_BLOCK, + eAstc6x6SrgbBlock = VK_FORMAT_ASTC_6x6_SRGB_BLOCK, + eAstc8x5UnormBlock = VK_FORMAT_ASTC_8x5_UNORM_BLOCK, + eAstc8x5SrgbBlock = VK_FORMAT_ASTC_8x5_SRGB_BLOCK, + eAstc8x6UnormBlock = VK_FORMAT_ASTC_8x6_UNORM_BLOCK, + eAstc8x6SrgbBlock = VK_FORMAT_ASTC_8x6_SRGB_BLOCK, + eAstc8x8UnormBlock = VK_FORMAT_ASTC_8x8_UNORM_BLOCK, + eAstc8x8SrgbBlock = VK_FORMAT_ASTC_8x8_SRGB_BLOCK, + eAstc10x5UnormBlock = VK_FORMAT_ASTC_10x5_UNORM_BLOCK, + eAstc10x5SrgbBlock = VK_FORMAT_ASTC_10x5_SRGB_BLOCK, + eAstc10x6UnormBlock = VK_FORMAT_ASTC_10x6_UNORM_BLOCK, + eAstc10x6SrgbBlock = VK_FORMAT_ASTC_10x6_SRGB_BLOCK, + eAstc10x8UnormBlock = VK_FORMAT_ASTC_10x8_UNORM_BLOCK, + eAstc10x8SrgbBlock = VK_FORMAT_ASTC_10x8_SRGB_BLOCK, + eAstc10x10UnormBlock = VK_FORMAT_ASTC_10x10_UNORM_BLOCK, + eAstc10x10SrgbBlock = VK_FORMAT_ASTC_10x10_SRGB_BLOCK, + eAstc12x10UnormBlock = VK_FORMAT_ASTC_12x10_UNORM_BLOCK, + eAstc12x10SrgbBlock = VK_FORMAT_ASTC_12x10_SRGB_BLOCK, + eAstc12x12UnormBlock = VK_FORMAT_ASTC_12x12_UNORM_BLOCK, + eAstc12x12SrgbBlock = VK_FORMAT_ASTC_12x12_SRGB_BLOCK, + eG8B8G8R8422Unorm = VK_FORMAT_G8B8G8R8_422_UNORM, + eG8B8G8R8422UnormKHR = VK_FORMAT_G8B8G8R8_422_UNORM, + eB8G8R8G8422Unorm = VK_FORMAT_B8G8R8G8_422_UNORM, + eB8G8R8G8422UnormKHR = VK_FORMAT_B8G8R8G8_422_UNORM, + eG8B8R83Plane420Unorm = VK_FORMAT_G8_B8_R8_3PLANE_420_UNORM, + eG8B8R83Plane420UnormKHR = VK_FORMAT_G8_B8_R8_3PLANE_420_UNORM, + eG8B8R82Plane420Unorm = VK_FORMAT_G8_B8R8_2PLANE_420_UNORM, + eG8B8R82Plane420UnormKHR = VK_FORMAT_G8_B8R8_2PLANE_420_UNORM, + eG8B8R83Plane422Unorm = VK_FORMAT_G8_B8_R8_3PLANE_422_UNORM, + eG8B8R83Plane422UnormKHR = VK_FORMAT_G8_B8_R8_3PLANE_422_UNORM, + eG8B8R82Plane422Unorm = VK_FORMAT_G8_B8R8_2PLANE_422_UNORM, + eG8B8R82Plane422UnormKHR = VK_FORMAT_G8_B8R8_2PLANE_422_UNORM, + eG8B8R83Plane444Unorm = VK_FORMAT_G8_B8_R8_3PLANE_444_UNORM, + eG8B8R83Plane444UnormKHR = VK_FORMAT_G8_B8_R8_3PLANE_444_UNORM, + eR10X6UnormPack16 = VK_FORMAT_R10X6_UNORM_PACK16, + eR10X6UnormPack16KHR = VK_FORMAT_R10X6_UNORM_PACK16, + eR10X6G10X6Unorm2Pack16 = VK_FORMAT_R10X6G10X6_UNORM_2PACK16, + eR10X6G10X6Unorm2Pack16KHR = VK_FORMAT_R10X6G10X6_UNORM_2PACK16, + eR10X6G10X6B10X6A10X6Unorm4Pack16 = VK_FORMAT_R10X6G10X6B10X6A10X6_UNORM_4PACK16, + eR10X6G10X6B10X6A10X6Unorm4Pack16KHR = VK_FORMAT_R10X6G10X6B10X6A10X6_UNORM_4PACK16, + eG10X6B10X6G10X6R10X6422Unorm4Pack16 = VK_FORMAT_G10X6B10X6G10X6R10X6_422_UNORM_4PACK16, + eG10X6B10X6G10X6R10X6422Unorm4Pack16KHR = VK_FORMAT_G10X6B10X6G10X6R10X6_422_UNORM_4PACK16, + eB10X6G10X6R10X6G10X6422Unorm4Pack16 = VK_FORMAT_B10X6G10X6R10X6G10X6_422_UNORM_4PACK16, + eB10X6G10X6R10X6G10X6422Unorm4Pack16KHR = VK_FORMAT_B10X6G10X6R10X6G10X6_422_UNORM_4PACK16, + eG10X6B10X6R10X63Plane420Unorm3Pack16 = VK_FORMAT_G10X6_B10X6_R10X6_3PLANE_420_UNORM_3PACK16, + eG10X6B10X6R10X63Plane420Unorm3Pack16KHR = VK_FORMAT_G10X6_B10X6_R10X6_3PLANE_420_UNORM_3PACK16, + eG10X6B10X6R10X62Plane420Unorm3Pack16 = VK_FORMAT_G10X6_B10X6R10X6_2PLANE_420_UNORM_3PACK16, + eG10X6B10X6R10X62Plane420Unorm3Pack16KHR = VK_FORMAT_G10X6_B10X6R10X6_2PLANE_420_UNORM_3PACK16, + eG10X6B10X6R10X63Plane422Unorm3Pack16 = VK_FORMAT_G10X6_B10X6_R10X6_3PLANE_422_UNORM_3PACK16, + eG10X6B10X6R10X63Plane422Unorm3Pack16KHR = VK_FORMAT_G10X6_B10X6_R10X6_3PLANE_422_UNORM_3PACK16, + eG10X6B10X6R10X62Plane422Unorm3Pack16 = VK_FORMAT_G10X6_B10X6R10X6_2PLANE_422_UNORM_3PACK16, + eG10X6B10X6R10X62Plane422Unorm3Pack16KHR = VK_FORMAT_G10X6_B10X6R10X6_2PLANE_422_UNORM_3PACK16, + eG10X6B10X6R10X63Plane444Unorm3Pack16 = VK_FORMAT_G10X6_B10X6_R10X6_3PLANE_444_UNORM_3PACK16, + eG10X6B10X6R10X63Plane444Unorm3Pack16KHR = VK_FORMAT_G10X6_B10X6_R10X6_3PLANE_444_UNORM_3PACK16, + eR12X4UnormPack16 = VK_FORMAT_R12X4_UNORM_PACK16, + eR12X4UnormPack16KHR = VK_FORMAT_R12X4_UNORM_PACK16, + eR12X4G12X4Unorm2Pack16 = VK_FORMAT_R12X4G12X4_UNORM_2PACK16, + eR12X4G12X4Unorm2Pack16KHR = VK_FORMAT_R12X4G12X4_UNORM_2PACK16, + eR12X4G12X4B12X4A12X4Unorm4Pack16 = VK_FORMAT_R12X4G12X4B12X4A12X4_UNORM_4PACK16, + eR12X4G12X4B12X4A12X4Unorm4Pack16KHR = VK_FORMAT_R12X4G12X4B12X4A12X4_UNORM_4PACK16, + eG12X4B12X4G12X4R12X4422Unorm4Pack16 = VK_FORMAT_G12X4B12X4G12X4R12X4_422_UNORM_4PACK16, + eG12X4B12X4G12X4R12X4422Unorm4Pack16KHR = VK_FORMAT_G12X4B12X4G12X4R12X4_422_UNORM_4PACK16, + eB12X4G12X4R12X4G12X4422Unorm4Pack16 = VK_FORMAT_B12X4G12X4R12X4G12X4_422_UNORM_4PACK16, + eB12X4G12X4R12X4G12X4422Unorm4Pack16KHR = VK_FORMAT_B12X4G12X4R12X4G12X4_422_UNORM_4PACK16, + eG12X4B12X4R12X43Plane420Unorm3Pack16 = VK_FORMAT_G12X4_B12X4_R12X4_3PLANE_420_UNORM_3PACK16, + eG12X4B12X4R12X43Plane420Unorm3Pack16KHR = VK_FORMAT_G12X4_B12X4_R12X4_3PLANE_420_UNORM_3PACK16, + eG12X4B12X4R12X42Plane420Unorm3Pack16 = VK_FORMAT_G12X4_B12X4R12X4_2PLANE_420_UNORM_3PACK16, + eG12X4B12X4R12X42Plane420Unorm3Pack16KHR = VK_FORMAT_G12X4_B12X4R12X4_2PLANE_420_UNORM_3PACK16, + eG12X4B12X4R12X43Plane422Unorm3Pack16 = VK_FORMAT_G12X4_B12X4_R12X4_3PLANE_422_UNORM_3PACK16, + eG12X4B12X4R12X43Plane422Unorm3Pack16KHR = VK_FORMAT_G12X4_B12X4_R12X4_3PLANE_422_UNORM_3PACK16, + eG12X4B12X4R12X42Plane422Unorm3Pack16 = VK_FORMAT_G12X4_B12X4R12X4_2PLANE_422_UNORM_3PACK16, + eG12X4B12X4R12X42Plane422Unorm3Pack16KHR = VK_FORMAT_G12X4_B12X4R12X4_2PLANE_422_UNORM_3PACK16, + eG12X4B12X4R12X43Plane444Unorm3Pack16 = VK_FORMAT_G12X4_B12X4_R12X4_3PLANE_444_UNORM_3PACK16, + eG12X4B12X4R12X43Plane444Unorm3Pack16KHR = VK_FORMAT_G12X4_B12X4_R12X4_3PLANE_444_UNORM_3PACK16, + eG16B16G16R16422Unorm = VK_FORMAT_G16B16G16R16_422_UNORM, + eG16B16G16R16422UnormKHR = VK_FORMAT_G16B16G16R16_422_UNORM, + eB16G16R16G16422Unorm = VK_FORMAT_B16G16R16G16_422_UNORM, + eB16G16R16G16422UnormKHR = VK_FORMAT_B16G16R16G16_422_UNORM, + eG16B16R163Plane420Unorm = VK_FORMAT_G16_B16_R16_3PLANE_420_UNORM, + eG16B16R163Plane420UnormKHR = VK_FORMAT_G16_B16_R16_3PLANE_420_UNORM, + eG16B16R162Plane420Unorm = VK_FORMAT_G16_B16R16_2PLANE_420_UNORM, + eG16B16R162Plane420UnormKHR = VK_FORMAT_G16_B16R16_2PLANE_420_UNORM, + eG16B16R163Plane422Unorm = VK_FORMAT_G16_B16_R16_3PLANE_422_UNORM, + eG16B16R163Plane422UnormKHR = VK_FORMAT_G16_B16_R16_3PLANE_422_UNORM, + eG16B16R162Plane422Unorm = VK_FORMAT_G16_B16R16_2PLANE_422_UNORM, + eG16B16R162Plane422UnormKHR = VK_FORMAT_G16_B16R16_2PLANE_422_UNORM, + eG16B16R163Plane444Unorm = VK_FORMAT_G16_B16_R16_3PLANE_444_UNORM, + eG16B16R163Plane444UnormKHR = VK_FORMAT_G16_B16_R16_3PLANE_444_UNORM, + ePvrtc12BppUnormBlockIMG = VK_FORMAT_PVRTC1_2BPP_UNORM_BLOCK_IMG, + ePvrtc14BppUnormBlockIMG = VK_FORMAT_PVRTC1_4BPP_UNORM_BLOCK_IMG, + ePvrtc22BppUnormBlockIMG = VK_FORMAT_PVRTC2_2BPP_UNORM_BLOCK_IMG, + ePvrtc24BppUnormBlockIMG = VK_FORMAT_PVRTC2_4BPP_UNORM_BLOCK_IMG, + ePvrtc12BppSrgbBlockIMG = VK_FORMAT_PVRTC1_2BPP_SRGB_BLOCK_IMG, + ePvrtc14BppSrgbBlockIMG = VK_FORMAT_PVRTC1_4BPP_SRGB_BLOCK_IMG, + ePvrtc22BppSrgbBlockIMG = VK_FORMAT_PVRTC2_2BPP_SRGB_BLOCK_IMG, + ePvrtc24BppSrgbBlockIMG = VK_FORMAT_PVRTC2_4BPP_SRGB_BLOCK_IMG + }; + + struct VertexInputAttributeDescription + { + VertexInputAttributeDescription( uint32_t location_ = 0, + uint32_t binding_ = 0, + Format format_ = Format::eUndefined, + uint32_t offset_ = 0 ) + : location( location_ ) + , binding( binding_ ) + , format( format_ ) + , offset( offset_ ) + { + } + + VertexInputAttributeDescription( VkVertexInputAttributeDescription const & rhs ) + { + memcpy( this, &rhs, sizeof( VertexInputAttributeDescription ) ); + } + + VertexInputAttributeDescription& operator=( VkVertexInputAttributeDescription const & rhs ) + { + memcpy( this, &rhs, sizeof( VertexInputAttributeDescription ) ); + return *this; + } + VertexInputAttributeDescription& setLocation( uint32_t location_ ) + { + location = location_; + return *this; + } + + VertexInputAttributeDescription& setBinding( uint32_t binding_ ) + { + binding = binding_; + return *this; + } + + VertexInputAttributeDescription& setFormat( Format format_ ) + { + format = format_; + return *this; + } + + VertexInputAttributeDescription& setOffset( uint32_t offset_ ) + { + offset = offset_; + return *this; + } + + operator VkVertexInputAttributeDescription const&() const + { + return *reinterpret_cast(this); + } + + operator VkVertexInputAttributeDescription &() + { + return *reinterpret_cast(this); + } + + bool operator==( VertexInputAttributeDescription const& rhs ) const + { + return ( location == rhs.location ) + && ( binding == rhs.binding ) + && ( format == rhs.format ) + && ( offset == rhs.offset ); + } + + bool operator!=( VertexInputAttributeDescription const& rhs ) const + { + return !operator==( rhs ); + } + + uint32_t location; + uint32_t binding; + Format format; + uint32_t offset; + }; + static_assert( sizeof( VertexInputAttributeDescription ) == sizeof( VkVertexInputAttributeDescription ), "struct and wrapper have different size!" ); + + enum class StructureType + { + eApplicationInfo = VK_STRUCTURE_TYPE_APPLICATION_INFO, + eInstanceCreateInfo = VK_STRUCTURE_TYPE_INSTANCE_CREATE_INFO, + eDeviceQueueCreateInfo = VK_STRUCTURE_TYPE_DEVICE_QUEUE_CREATE_INFO, + eDeviceCreateInfo = VK_STRUCTURE_TYPE_DEVICE_CREATE_INFO, + eSubmitInfo = VK_STRUCTURE_TYPE_SUBMIT_INFO, + eMemoryAllocateInfo = VK_STRUCTURE_TYPE_MEMORY_ALLOCATE_INFO, + eMappedMemoryRange = VK_STRUCTURE_TYPE_MAPPED_MEMORY_RANGE, + eBindSparseInfo = VK_STRUCTURE_TYPE_BIND_SPARSE_INFO, + eFenceCreateInfo = VK_STRUCTURE_TYPE_FENCE_CREATE_INFO, + eSemaphoreCreateInfo = VK_STRUCTURE_TYPE_SEMAPHORE_CREATE_INFO, + eEventCreateInfo = VK_STRUCTURE_TYPE_EVENT_CREATE_INFO, + eQueryPoolCreateInfo = VK_STRUCTURE_TYPE_QUERY_POOL_CREATE_INFO, + eBufferCreateInfo = VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO, + eBufferViewCreateInfo = VK_STRUCTURE_TYPE_BUFFER_VIEW_CREATE_INFO, + eImageCreateInfo = VK_STRUCTURE_TYPE_IMAGE_CREATE_INFO, + eImageViewCreateInfo = VK_STRUCTURE_TYPE_IMAGE_VIEW_CREATE_INFO, + eShaderModuleCreateInfo = VK_STRUCTURE_TYPE_SHADER_MODULE_CREATE_INFO, + ePipelineCacheCreateInfo = VK_STRUCTURE_TYPE_PIPELINE_CACHE_CREATE_INFO, + ePipelineShaderStageCreateInfo = VK_STRUCTURE_TYPE_PIPELINE_SHADER_STAGE_CREATE_INFO, + ePipelineVertexInputStateCreateInfo = VK_STRUCTURE_TYPE_PIPELINE_VERTEX_INPUT_STATE_CREATE_INFO, + ePipelineInputAssemblyStateCreateInfo = VK_STRUCTURE_TYPE_PIPELINE_INPUT_ASSEMBLY_STATE_CREATE_INFO, + ePipelineTessellationStateCreateInfo = VK_STRUCTURE_TYPE_PIPELINE_TESSELLATION_STATE_CREATE_INFO, + ePipelineViewportStateCreateInfo = VK_STRUCTURE_TYPE_PIPELINE_VIEWPORT_STATE_CREATE_INFO, + ePipelineRasterizationStateCreateInfo = VK_STRUCTURE_TYPE_PIPELINE_RASTERIZATION_STATE_CREATE_INFO, + ePipelineMultisampleStateCreateInfo = VK_STRUCTURE_TYPE_PIPELINE_MULTISAMPLE_STATE_CREATE_INFO, + ePipelineDepthStencilStateCreateInfo = VK_STRUCTURE_TYPE_PIPELINE_DEPTH_STENCIL_STATE_CREATE_INFO, + ePipelineColorBlendStateCreateInfo = VK_STRUCTURE_TYPE_PIPELINE_COLOR_BLEND_STATE_CREATE_INFO, + ePipelineDynamicStateCreateInfo = VK_STRUCTURE_TYPE_PIPELINE_DYNAMIC_STATE_CREATE_INFO, + eGraphicsPipelineCreateInfo = VK_STRUCTURE_TYPE_GRAPHICS_PIPELINE_CREATE_INFO, + eComputePipelineCreateInfo = VK_STRUCTURE_TYPE_COMPUTE_PIPELINE_CREATE_INFO, + ePipelineLayoutCreateInfo = VK_STRUCTURE_TYPE_PIPELINE_LAYOUT_CREATE_INFO, + eSamplerCreateInfo = VK_STRUCTURE_TYPE_SAMPLER_CREATE_INFO, + eDescriptorSetLayoutCreateInfo = VK_STRUCTURE_TYPE_DESCRIPTOR_SET_LAYOUT_CREATE_INFO, + eDescriptorPoolCreateInfo = VK_STRUCTURE_TYPE_DESCRIPTOR_POOL_CREATE_INFO, + eDescriptorSetAllocateInfo = VK_STRUCTURE_TYPE_DESCRIPTOR_SET_ALLOCATE_INFO, + eWriteDescriptorSet = VK_STRUCTURE_TYPE_WRITE_DESCRIPTOR_SET, + eCopyDescriptorSet = VK_STRUCTURE_TYPE_COPY_DESCRIPTOR_SET, + eFramebufferCreateInfo = VK_STRUCTURE_TYPE_FRAMEBUFFER_CREATE_INFO, + eRenderPassCreateInfo = VK_STRUCTURE_TYPE_RENDER_PASS_CREATE_INFO, + eCommandPoolCreateInfo = VK_STRUCTURE_TYPE_COMMAND_POOL_CREATE_INFO, + eCommandBufferAllocateInfo = VK_STRUCTURE_TYPE_COMMAND_BUFFER_ALLOCATE_INFO, + eCommandBufferInheritanceInfo = VK_STRUCTURE_TYPE_COMMAND_BUFFER_INHERITANCE_INFO, + eCommandBufferBeginInfo = VK_STRUCTURE_TYPE_COMMAND_BUFFER_BEGIN_INFO, + eRenderPassBeginInfo = VK_STRUCTURE_TYPE_RENDER_PASS_BEGIN_INFO, + eBufferMemoryBarrier = VK_STRUCTURE_TYPE_BUFFER_MEMORY_BARRIER, + eImageMemoryBarrier = VK_STRUCTURE_TYPE_IMAGE_MEMORY_BARRIER, + eMemoryBarrier = VK_STRUCTURE_TYPE_MEMORY_BARRIER, + eLoaderInstanceCreateInfo = VK_STRUCTURE_TYPE_LOADER_INSTANCE_CREATE_INFO, + eLoaderDeviceCreateInfo = VK_STRUCTURE_TYPE_LOADER_DEVICE_CREATE_INFO, + ePhysicalDeviceSubgroupProperties = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SUBGROUP_PROPERTIES, + eBindBufferMemoryInfo = VK_STRUCTURE_TYPE_BIND_BUFFER_MEMORY_INFO, + eBindBufferMemoryInfoKHR = VK_STRUCTURE_TYPE_BIND_BUFFER_MEMORY_INFO, + eBindImageMemoryInfo = VK_STRUCTURE_TYPE_BIND_IMAGE_MEMORY_INFO, + eBindImageMemoryInfoKHR = VK_STRUCTURE_TYPE_BIND_IMAGE_MEMORY_INFO, + ePhysicalDevice16BitStorageFeatures = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_16BIT_STORAGE_FEATURES, + ePhysicalDevice16BitStorageFeaturesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_16BIT_STORAGE_FEATURES, + eMemoryDedicatedRequirements = VK_STRUCTURE_TYPE_MEMORY_DEDICATED_REQUIREMENTS, + eMemoryDedicatedRequirementsKHR = VK_STRUCTURE_TYPE_MEMORY_DEDICATED_REQUIREMENTS, + eMemoryDedicatedAllocateInfo = VK_STRUCTURE_TYPE_MEMORY_DEDICATED_ALLOCATE_INFO, + eMemoryDedicatedAllocateInfoKHR = VK_STRUCTURE_TYPE_MEMORY_DEDICATED_ALLOCATE_INFO, + eMemoryAllocateFlagsInfo = VK_STRUCTURE_TYPE_MEMORY_ALLOCATE_FLAGS_INFO, + eMemoryAllocateFlagsInfoKHR = VK_STRUCTURE_TYPE_MEMORY_ALLOCATE_FLAGS_INFO, + eDeviceGroupRenderPassBeginInfo = VK_STRUCTURE_TYPE_DEVICE_GROUP_RENDER_PASS_BEGIN_INFO, + eDeviceGroupRenderPassBeginInfoKHR = VK_STRUCTURE_TYPE_DEVICE_GROUP_RENDER_PASS_BEGIN_INFO, + eDeviceGroupCommandBufferBeginInfo = VK_STRUCTURE_TYPE_DEVICE_GROUP_COMMAND_BUFFER_BEGIN_INFO, + eDeviceGroupCommandBufferBeginInfoKHR = VK_STRUCTURE_TYPE_DEVICE_GROUP_COMMAND_BUFFER_BEGIN_INFO, + eDeviceGroupSubmitInfo = VK_STRUCTURE_TYPE_DEVICE_GROUP_SUBMIT_INFO, + eDeviceGroupSubmitInfoKHR = VK_STRUCTURE_TYPE_DEVICE_GROUP_SUBMIT_INFO, + eDeviceGroupBindSparseInfo = VK_STRUCTURE_TYPE_DEVICE_GROUP_BIND_SPARSE_INFO, + eDeviceGroupBindSparseInfoKHR = VK_STRUCTURE_TYPE_DEVICE_GROUP_BIND_SPARSE_INFO, + eBindBufferMemoryDeviceGroupInfo = VK_STRUCTURE_TYPE_BIND_BUFFER_MEMORY_DEVICE_GROUP_INFO, + eBindBufferMemoryDeviceGroupInfoKHR = VK_STRUCTURE_TYPE_BIND_BUFFER_MEMORY_DEVICE_GROUP_INFO, + eBindImageMemoryDeviceGroupInfo = VK_STRUCTURE_TYPE_BIND_IMAGE_MEMORY_DEVICE_GROUP_INFO, + eBindImageMemoryDeviceGroupInfoKHR = VK_STRUCTURE_TYPE_BIND_IMAGE_MEMORY_DEVICE_GROUP_INFO, + ePhysicalDeviceGroupProperties = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_GROUP_PROPERTIES, + ePhysicalDeviceGroupPropertiesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_GROUP_PROPERTIES, + eDeviceGroupDeviceCreateInfo = VK_STRUCTURE_TYPE_DEVICE_GROUP_DEVICE_CREATE_INFO, + eDeviceGroupDeviceCreateInfoKHR = VK_STRUCTURE_TYPE_DEVICE_GROUP_DEVICE_CREATE_INFO, + eBufferMemoryRequirementsInfo2 = VK_STRUCTURE_TYPE_BUFFER_MEMORY_REQUIREMENTS_INFO_2, + eBufferMemoryRequirementsInfo2KHR = VK_STRUCTURE_TYPE_BUFFER_MEMORY_REQUIREMENTS_INFO_2, + eImageMemoryRequirementsInfo2 = VK_STRUCTURE_TYPE_IMAGE_MEMORY_REQUIREMENTS_INFO_2, + eImageMemoryRequirementsInfo2KHR = VK_STRUCTURE_TYPE_IMAGE_MEMORY_REQUIREMENTS_INFO_2, + eImageSparseMemoryRequirementsInfo2 = VK_STRUCTURE_TYPE_IMAGE_SPARSE_MEMORY_REQUIREMENTS_INFO_2, + eImageSparseMemoryRequirementsInfo2KHR = VK_STRUCTURE_TYPE_IMAGE_SPARSE_MEMORY_REQUIREMENTS_INFO_2, + eMemoryRequirements2 = VK_STRUCTURE_TYPE_MEMORY_REQUIREMENTS_2, + eMemoryRequirements2KHR = VK_STRUCTURE_TYPE_MEMORY_REQUIREMENTS_2, + eSparseImageMemoryRequirements2 = VK_STRUCTURE_TYPE_SPARSE_IMAGE_MEMORY_REQUIREMENTS_2, + eSparseImageMemoryRequirements2KHR = VK_STRUCTURE_TYPE_SPARSE_IMAGE_MEMORY_REQUIREMENTS_2, + ePhysicalDeviceFeatures2 = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FEATURES_2, + ePhysicalDeviceFeatures2KHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FEATURES_2, + ePhysicalDeviceProperties2 = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PROPERTIES_2, + ePhysicalDeviceProperties2KHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PROPERTIES_2, + eFormatProperties2 = VK_STRUCTURE_TYPE_FORMAT_PROPERTIES_2, + eFormatProperties2KHR = VK_STRUCTURE_TYPE_FORMAT_PROPERTIES_2, + eImageFormatProperties2 = VK_STRUCTURE_TYPE_IMAGE_FORMAT_PROPERTIES_2, + eImageFormatProperties2KHR = VK_STRUCTURE_TYPE_IMAGE_FORMAT_PROPERTIES_2, + ePhysicalDeviceImageFormatInfo2 = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_IMAGE_FORMAT_INFO_2, + ePhysicalDeviceImageFormatInfo2KHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_IMAGE_FORMAT_INFO_2, + eQueueFamilyProperties2 = VK_STRUCTURE_TYPE_QUEUE_FAMILY_PROPERTIES_2, + eQueueFamilyProperties2KHR = VK_STRUCTURE_TYPE_QUEUE_FAMILY_PROPERTIES_2, + ePhysicalDeviceMemoryProperties2 = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MEMORY_PROPERTIES_2, + ePhysicalDeviceMemoryProperties2KHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MEMORY_PROPERTIES_2, + eSparseImageFormatProperties2 = VK_STRUCTURE_TYPE_SPARSE_IMAGE_FORMAT_PROPERTIES_2, + eSparseImageFormatProperties2KHR = VK_STRUCTURE_TYPE_SPARSE_IMAGE_FORMAT_PROPERTIES_2, + ePhysicalDeviceSparseImageFormatInfo2 = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SPARSE_IMAGE_FORMAT_INFO_2, + ePhysicalDeviceSparseImageFormatInfo2KHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SPARSE_IMAGE_FORMAT_INFO_2, + ePhysicalDevicePointClippingProperties = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_POINT_CLIPPING_PROPERTIES, + ePhysicalDevicePointClippingPropertiesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_POINT_CLIPPING_PROPERTIES, + eRenderPassInputAttachmentAspectCreateInfo = VK_STRUCTURE_TYPE_RENDER_PASS_INPUT_ATTACHMENT_ASPECT_CREATE_INFO, + eRenderPassInputAttachmentAspectCreateInfoKHR = VK_STRUCTURE_TYPE_RENDER_PASS_INPUT_ATTACHMENT_ASPECT_CREATE_INFO, + eImageViewUsageCreateInfo = VK_STRUCTURE_TYPE_IMAGE_VIEW_USAGE_CREATE_INFO, + eImageViewUsageCreateInfoKHR = VK_STRUCTURE_TYPE_IMAGE_VIEW_USAGE_CREATE_INFO, + ePipelineTessellationDomainOriginStateCreateInfo = VK_STRUCTURE_TYPE_PIPELINE_TESSELLATION_DOMAIN_ORIGIN_STATE_CREATE_INFO, + ePipelineTessellationDomainOriginStateCreateInfoKHR = VK_STRUCTURE_TYPE_PIPELINE_TESSELLATION_DOMAIN_ORIGIN_STATE_CREATE_INFO, + eRenderPassMultiviewCreateInfo = VK_STRUCTURE_TYPE_RENDER_PASS_MULTIVIEW_CREATE_INFO, + eRenderPassMultiviewCreateInfoKHR = VK_STRUCTURE_TYPE_RENDER_PASS_MULTIVIEW_CREATE_INFO, + ePhysicalDeviceMultiviewFeatures = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MULTIVIEW_FEATURES, + ePhysicalDeviceMultiviewFeaturesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MULTIVIEW_FEATURES, + ePhysicalDeviceMultiviewProperties = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MULTIVIEW_PROPERTIES, + ePhysicalDeviceMultiviewPropertiesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MULTIVIEW_PROPERTIES, + ePhysicalDeviceVariablePointerFeatures = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VARIABLE_POINTER_FEATURES, + ePhysicalDeviceVariablePointerFeaturesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VARIABLE_POINTER_FEATURES, + eProtectedSubmitInfo = VK_STRUCTURE_TYPE_PROTECTED_SUBMIT_INFO, + ePhysicalDeviceProtectedMemoryFeatures = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PROTECTED_MEMORY_FEATURES, + ePhysicalDeviceProtectedMemoryProperties = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PROTECTED_MEMORY_PROPERTIES, + eDeviceQueueInfo2 = VK_STRUCTURE_TYPE_DEVICE_QUEUE_INFO_2, + eSamplerYcbcrConversionCreateInfo = VK_STRUCTURE_TYPE_SAMPLER_YCBCR_CONVERSION_CREATE_INFO, + eSamplerYcbcrConversionCreateInfoKHR = VK_STRUCTURE_TYPE_SAMPLER_YCBCR_CONVERSION_CREATE_INFO, + eSamplerYcbcrConversionInfo = VK_STRUCTURE_TYPE_SAMPLER_YCBCR_CONVERSION_INFO, + eSamplerYcbcrConversionInfoKHR = VK_STRUCTURE_TYPE_SAMPLER_YCBCR_CONVERSION_INFO, + eBindImagePlaneMemoryInfo = VK_STRUCTURE_TYPE_BIND_IMAGE_PLANE_MEMORY_INFO, + eBindImagePlaneMemoryInfoKHR = VK_STRUCTURE_TYPE_BIND_IMAGE_PLANE_MEMORY_INFO, + eImagePlaneMemoryRequirementsInfo = VK_STRUCTURE_TYPE_IMAGE_PLANE_MEMORY_REQUIREMENTS_INFO, + eImagePlaneMemoryRequirementsInfoKHR = VK_STRUCTURE_TYPE_IMAGE_PLANE_MEMORY_REQUIREMENTS_INFO, + ePhysicalDeviceSamplerYcbcrConversionFeatures = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SAMPLER_YCBCR_CONVERSION_FEATURES, + ePhysicalDeviceSamplerYcbcrConversionFeaturesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SAMPLER_YCBCR_CONVERSION_FEATURES, + eSamplerYcbcrConversionImageFormatProperties = VK_STRUCTURE_TYPE_SAMPLER_YCBCR_CONVERSION_IMAGE_FORMAT_PROPERTIES, + eSamplerYcbcrConversionImageFormatPropertiesKHR = VK_STRUCTURE_TYPE_SAMPLER_YCBCR_CONVERSION_IMAGE_FORMAT_PROPERTIES, + eDescriptorUpdateTemplateCreateInfo = VK_STRUCTURE_TYPE_DESCRIPTOR_UPDATE_TEMPLATE_CREATE_INFO, + eDescriptorUpdateTemplateCreateInfoKHR = VK_STRUCTURE_TYPE_DESCRIPTOR_UPDATE_TEMPLATE_CREATE_INFO, + ePhysicalDeviceExternalImageFormatInfo = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_EXTERNAL_IMAGE_FORMAT_INFO, + ePhysicalDeviceExternalImageFormatInfoKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_EXTERNAL_IMAGE_FORMAT_INFO, + eExternalImageFormatProperties = VK_STRUCTURE_TYPE_EXTERNAL_IMAGE_FORMAT_PROPERTIES, + eExternalImageFormatPropertiesKHR = VK_STRUCTURE_TYPE_EXTERNAL_IMAGE_FORMAT_PROPERTIES, + ePhysicalDeviceExternalBufferInfo = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_EXTERNAL_BUFFER_INFO, + ePhysicalDeviceExternalBufferInfoKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_EXTERNAL_BUFFER_INFO, + eExternalBufferProperties = VK_STRUCTURE_TYPE_EXTERNAL_BUFFER_PROPERTIES, + eExternalBufferPropertiesKHR = VK_STRUCTURE_TYPE_EXTERNAL_BUFFER_PROPERTIES, + ePhysicalDeviceIdProperties = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_ID_PROPERTIES, + ePhysicalDeviceIdPropertiesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_ID_PROPERTIES, + eExternalMemoryBufferCreateInfo = VK_STRUCTURE_TYPE_EXTERNAL_MEMORY_BUFFER_CREATE_INFO, + eExternalMemoryBufferCreateInfoKHR = VK_STRUCTURE_TYPE_EXTERNAL_MEMORY_BUFFER_CREATE_INFO, + eExternalMemoryImageCreateInfo = VK_STRUCTURE_TYPE_EXTERNAL_MEMORY_IMAGE_CREATE_INFO, + eExternalMemoryImageCreateInfoKHR = VK_STRUCTURE_TYPE_EXTERNAL_MEMORY_IMAGE_CREATE_INFO, + eExportMemoryAllocateInfo = VK_STRUCTURE_TYPE_EXPORT_MEMORY_ALLOCATE_INFO, + eExportMemoryAllocateInfoKHR = VK_STRUCTURE_TYPE_EXPORT_MEMORY_ALLOCATE_INFO, + ePhysicalDeviceExternalFenceInfo = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_EXTERNAL_FENCE_INFO, + ePhysicalDeviceExternalFenceInfoKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_EXTERNAL_FENCE_INFO, + eExternalFenceProperties = VK_STRUCTURE_TYPE_EXTERNAL_FENCE_PROPERTIES, + eExternalFencePropertiesKHR = VK_STRUCTURE_TYPE_EXTERNAL_FENCE_PROPERTIES, + eExportFenceCreateInfo = VK_STRUCTURE_TYPE_EXPORT_FENCE_CREATE_INFO, + eExportFenceCreateInfoKHR = VK_STRUCTURE_TYPE_EXPORT_FENCE_CREATE_INFO, + eExportSemaphoreCreateInfo = VK_STRUCTURE_TYPE_EXPORT_SEMAPHORE_CREATE_INFO, + eExportSemaphoreCreateInfoKHR = VK_STRUCTURE_TYPE_EXPORT_SEMAPHORE_CREATE_INFO, + ePhysicalDeviceExternalSemaphoreInfo = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_EXTERNAL_SEMAPHORE_INFO, + ePhysicalDeviceExternalSemaphoreInfoKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_EXTERNAL_SEMAPHORE_INFO, + eExternalSemaphoreProperties = VK_STRUCTURE_TYPE_EXTERNAL_SEMAPHORE_PROPERTIES, + eExternalSemaphorePropertiesKHR = VK_STRUCTURE_TYPE_EXTERNAL_SEMAPHORE_PROPERTIES, + ePhysicalDeviceMaintenance3Properties = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MAINTENANCE_3_PROPERTIES, + ePhysicalDeviceMaintenance3PropertiesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MAINTENANCE_3_PROPERTIES, + eDescriptorSetLayoutSupport = VK_STRUCTURE_TYPE_DESCRIPTOR_SET_LAYOUT_SUPPORT, + eDescriptorSetLayoutSupportKHR = VK_STRUCTURE_TYPE_DESCRIPTOR_SET_LAYOUT_SUPPORT, + ePhysicalDeviceShaderDrawParameterFeatures = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_DRAW_PARAMETER_FEATURES, + eSwapchainCreateInfoKHR = VK_STRUCTURE_TYPE_SWAPCHAIN_CREATE_INFO_KHR, + ePresentInfoKHR = VK_STRUCTURE_TYPE_PRESENT_INFO_KHR, + eDeviceGroupPresentCapabilitiesKHR = VK_STRUCTURE_TYPE_DEVICE_GROUP_PRESENT_CAPABILITIES_KHR, + eImageSwapchainCreateInfoKHR = VK_STRUCTURE_TYPE_IMAGE_SWAPCHAIN_CREATE_INFO_KHR, + eBindImageMemorySwapchainInfoKHR = VK_STRUCTURE_TYPE_BIND_IMAGE_MEMORY_SWAPCHAIN_INFO_KHR, + eAcquireNextImageInfoKHR = VK_STRUCTURE_TYPE_ACQUIRE_NEXT_IMAGE_INFO_KHR, + eDeviceGroupPresentInfoKHR = VK_STRUCTURE_TYPE_DEVICE_GROUP_PRESENT_INFO_KHR, + eDeviceGroupSwapchainCreateInfoKHR = VK_STRUCTURE_TYPE_DEVICE_GROUP_SWAPCHAIN_CREATE_INFO_KHR, + eDisplayModeCreateInfoKHR = VK_STRUCTURE_TYPE_DISPLAY_MODE_CREATE_INFO_KHR, + eDisplaySurfaceCreateInfoKHR = VK_STRUCTURE_TYPE_DISPLAY_SURFACE_CREATE_INFO_KHR, + eDisplayPresentInfoKHR = VK_STRUCTURE_TYPE_DISPLAY_PRESENT_INFO_KHR, + eXlibSurfaceCreateInfoKHR = VK_STRUCTURE_TYPE_XLIB_SURFACE_CREATE_INFO_KHR, + eXcbSurfaceCreateInfoKHR = VK_STRUCTURE_TYPE_XCB_SURFACE_CREATE_INFO_KHR, + eWaylandSurfaceCreateInfoKHR = VK_STRUCTURE_TYPE_WAYLAND_SURFACE_CREATE_INFO_KHR, + eAndroidSurfaceCreateInfoKHR = VK_STRUCTURE_TYPE_ANDROID_SURFACE_CREATE_INFO_KHR, + eWin32SurfaceCreateInfoKHR = VK_STRUCTURE_TYPE_WIN32_SURFACE_CREATE_INFO_KHR, + eDebugReportCallbackCreateInfoEXT = VK_STRUCTURE_TYPE_DEBUG_REPORT_CALLBACK_CREATE_INFO_EXT, + eDebugReportCreateInfoEXT = VK_STRUCTURE_TYPE_DEBUG_REPORT_CALLBACK_CREATE_INFO_EXT, + ePipelineRasterizationStateRasterizationOrderAMD = VK_STRUCTURE_TYPE_PIPELINE_RASTERIZATION_STATE_RASTERIZATION_ORDER_AMD, + eDebugMarkerObjectNameInfoEXT = VK_STRUCTURE_TYPE_DEBUG_MARKER_OBJECT_NAME_INFO_EXT, + eDebugMarkerObjectTagInfoEXT = VK_STRUCTURE_TYPE_DEBUG_MARKER_OBJECT_TAG_INFO_EXT, + eDebugMarkerMarkerInfoEXT = VK_STRUCTURE_TYPE_DEBUG_MARKER_MARKER_INFO_EXT, + eDedicatedAllocationImageCreateInfoNV = VK_STRUCTURE_TYPE_DEDICATED_ALLOCATION_IMAGE_CREATE_INFO_NV, + eDedicatedAllocationBufferCreateInfoNV = VK_STRUCTURE_TYPE_DEDICATED_ALLOCATION_BUFFER_CREATE_INFO_NV, + eDedicatedAllocationMemoryAllocateInfoNV = VK_STRUCTURE_TYPE_DEDICATED_ALLOCATION_MEMORY_ALLOCATE_INFO_NV, + ePhysicalDeviceTransformFeedbackFeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_TRANSFORM_FEEDBACK_FEATURES_EXT, + ePhysicalDeviceTransformFeedbackPropertiesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_TRANSFORM_FEEDBACK_PROPERTIES_EXT, + ePipelineRasterizationStateStreamCreateInfoEXT = VK_STRUCTURE_TYPE_PIPELINE_RASTERIZATION_STATE_STREAM_CREATE_INFO_EXT, + eTextureLodGatherFormatPropertiesAMD = VK_STRUCTURE_TYPE_TEXTURE_LOD_GATHER_FORMAT_PROPERTIES_AMD, + ePhysicalDeviceCornerSampledImageFeaturesNV = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_CORNER_SAMPLED_IMAGE_FEATURES_NV, + eExternalMemoryImageCreateInfoNV = VK_STRUCTURE_TYPE_EXTERNAL_MEMORY_IMAGE_CREATE_INFO_NV, + eExportMemoryAllocateInfoNV = VK_STRUCTURE_TYPE_EXPORT_MEMORY_ALLOCATE_INFO_NV, + eImportMemoryWin32HandleInfoNV = VK_STRUCTURE_TYPE_IMPORT_MEMORY_WIN32_HANDLE_INFO_NV, + eExportMemoryWin32HandleInfoNV = VK_STRUCTURE_TYPE_EXPORT_MEMORY_WIN32_HANDLE_INFO_NV, + eWin32KeyedMutexAcquireReleaseInfoNV = VK_STRUCTURE_TYPE_WIN32_KEYED_MUTEX_ACQUIRE_RELEASE_INFO_NV, + eValidationFlagsEXT = VK_STRUCTURE_TYPE_VALIDATION_FLAGS_EXT, + eViSurfaceCreateInfoNN = VK_STRUCTURE_TYPE_VI_SURFACE_CREATE_INFO_NN, + eImageViewAstcDecodeModeEXT = VK_STRUCTURE_TYPE_IMAGE_VIEW_ASTC_DECODE_MODE_EXT, + ePhysicalDeviceAstcDecodeFeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_ASTC_DECODE_FEATURES_EXT, + eImportMemoryWin32HandleInfoKHR = VK_STRUCTURE_TYPE_IMPORT_MEMORY_WIN32_HANDLE_INFO_KHR, + eExportMemoryWin32HandleInfoKHR = VK_STRUCTURE_TYPE_EXPORT_MEMORY_WIN32_HANDLE_INFO_KHR, + eMemoryWin32HandlePropertiesKHR = VK_STRUCTURE_TYPE_MEMORY_WIN32_HANDLE_PROPERTIES_KHR, + eMemoryGetWin32HandleInfoKHR = VK_STRUCTURE_TYPE_MEMORY_GET_WIN32_HANDLE_INFO_KHR, + eImportMemoryFdInfoKHR = VK_STRUCTURE_TYPE_IMPORT_MEMORY_FD_INFO_KHR, + eMemoryFdPropertiesKHR = VK_STRUCTURE_TYPE_MEMORY_FD_PROPERTIES_KHR, + eMemoryGetFdInfoKHR = VK_STRUCTURE_TYPE_MEMORY_GET_FD_INFO_KHR, + eWin32KeyedMutexAcquireReleaseInfoKHR = VK_STRUCTURE_TYPE_WIN32_KEYED_MUTEX_ACQUIRE_RELEASE_INFO_KHR, + eImportSemaphoreWin32HandleInfoKHR = VK_STRUCTURE_TYPE_IMPORT_SEMAPHORE_WIN32_HANDLE_INFO_KHR, + eExportSemaphoreWin32HandleInfoKHR = VK_STRUCTURE_TYPE_EXPORT_SEMAPHORE_WIN32_HANDLE_INFO_KHR, + eD3D12FenceSubmitInfoKHR = VK_STRUCTURE_TYPE_D3D12_FENCE_SUBMIT_INFO_KHR, + eSemaphoreGetWin32HandleInfoKHR = VK_STRUCTURE_TYPE_SEMAPHORE_GET_WIN32_HANDLE_INFO_KHR, + eImportSemaphoreFdInfoKHR = VK_STRUCTURE_TYPE_IMPORT_SEMAPHORE_FD_INFO_KHR, + eSemaphoreGetFdInfoKHR = VK_STRUCTURE_TYPE_SEMAPHORE_GET_FD_INFO_KHR, + ePhysicalDevicePushDescriptorPropertiesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PUSH_DESCRIPTOR_PROPERTIES_KHR, + eCommandBufferInheritanceConditionalRenderingInfoEXT = VK_STRUCTURE_TYPE_COMMAND_BUFFER_INHERITANCE_CONDITIONAL_RENDERING_INFO_EXT, + ePhysicalDeviceConditionalRenderingFeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_CONDITIONAL_RENDERING_FEATURES_EXT, + eConditionalRenderingBeginInfoEXT = VK_STRUCTURE_TYPE_CONDITIONAL_RENDERING_BEGIN_INFO_EXT, + ePresentRegionsKHR = VK_STRUCTURE_TYPE_PRESENT_REGIONS_KHR, + eObjectTableCreateInfoNVX = VK_STRUCTURE_TYPE_OBJECT_TABLE_CREATE_INFO_NVX, + eIndirectCommandsLayoutCreateInfoNVX = VK_STRUCTURE_TYPE_INDIRECT_COMMANDS_LAYOUT_CREATE_INFO_NVX, + eCmdProcessCommandsInfoNVX = VK_STRUCTURE_TYPE_CMD_PROCESS_COMMANDS_INFO_NVX, + eCmdReserveSpaceForCommandsInfoNVX = VK_STRUCTURE_TYPE_CMD_RESERVE_SPACE_FOR_COMMANDS_INFO_NVX, + eDeviceGeneratedCommandsLimitsNVX = VK_STRUCTURE_TYPE_DEVICE_GENERATED_COMMANDS_LIMITS_NVX, + eDeviceGeneratedCommandsFeaturesNVX = VK_STRUCTURE_TYPE_DEVICE_GENERATED_COMMANDS_FEATURES_NVX, + ePipelineViewportWScalingStateCreateInfoNV = VK_STRUCTURE_TYPE_PIPELINE_VIEWPORT_W_SCALING_STATE_CREATE_INFO_NV, + eSurfaceCapabilities2EXT = VK_STRUCTURE_TYPE_SURFACE_CAPABILITIES_2_EXT, + eDisplayPowerInfoEXT = VK_STRUCTURE_TYPE_DISPLAY_POWER_INFO_EXT, + eDeviceEventInfoEXT = VK_STRUCTURE_TYPE_DEVICE_EVENT_INFO_EXT, + eDisplayEventInfoEXT = VK_STRUCTURE_TYPE_DISPLAY_EVENT_INFO_EXT, + eSwapchainCounterCreateInfoEXT = VK_STRUCTURE_TYPE_SWAPCHAIN_COUNTER_CREATE_INFO_EXT, + ePresentTimesInfoGOOGLE = VK_STRUCTURE_TYPE_PRESENT_TIMES_INFO_GOOGLE, + ePhysicalDeviceMultiviewPerViewAttributesPropertiesNVX = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MULTIVIEW_PER_VIEW_ATTRIBUTES_PROPERTIES_NVX, + ePipelineViewportSwizzleStateCreateInfoNV = VK_STRUCTURE_TYPE_PIPELINE_VIEWPORT_SWIZZLE_STATE_CREATE_INFO_NV, + ePhysicalDeviceDiscardRectanglePropertiesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DISCARD_RECTANGLE_PROPERTIES_EXT, + ePipelineDiscardRectangleStateCreateInfoEXT = VK_STRUCTURE_TYPE_PIPELINE_DISCARD_RECTANGLE_STATE_CREATE_INFO_EXT, + ePhysicalDeviceConservativeRasterizationPropertiesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_CONSERVATIVE_RASTERIZATION_PROPERTIES_EXT, + ePipelineRasterizationConservativeStateCreateInfoEXT = VK_STRUCTURE_TYPE_PIPELINE_RASTERIZATION_CONSERVATIVE_STATE_CREATE_INFO_EXT, + eHdrMetadataEXT = VK_STRUCTURE_TYPE_HDR_METADATA_EXT, + eAttachmentDescription2KHR = VK_STRUCTURE_TYPE_ATTACHMENT_DESCRIPTION_2_KHR, + eAttachmentReference2KHR = VK_STRUCTURE_TYPE_ATTACHMENT_REFERENCE_2_KHR, + eSubpassDescription2KHR = VK_STRUCTURE_TYPE_SUBPASS_DESCRIPTION_2_KHR, + eSubpassDependency2KHR = VK_STRUCTURE_TYPE_SUBPASS_DEPENDENCY_2_KHR, + eRenderPassCreateInfo2KHR = VK_STRUCTURE_TYPE_RENDER_PASS_CREATE_INFO_2_KHR, + eSubpassBeginInfoKHR = VK_STRUCTURE_TYPE_SUBPASS_BEGIN_INFO_KHR, + eSubpassEndInfoKHR = VK_STRUCTURE_TYPE_SUBPASS_END_INFO_KHR, + eSharedPresentSurfaceCapabilitiesKHR = VK_STRUCTURE_TYPE_SHARED_PRESENT_SURFACE_CAPABILITIES_KHR, + eImportFenceWin32HandleInfoKHR = VK_STRUCTURE_TYPE_IMPORT_FENCE_WIN32_HANDLE_INFO_KHR, + eExportFenceWin32HandleInfoKHR = VK_STRUCTURE_TYPE_EXPORT_FENCE_WIN32_HANDLE_INFO_KHR, + eFenceGetWin32HandleInfoKHR = VK_STRUCTURE_TYPE_FENCE_GET_WIN32_HANDLE_INFO_KHR, + eImportFenceFdInfoKHR = VK_STRUCTURE_TYPE_IMPORT_FENCE_FD_INFO_KHR, + eFenceGetFdInfoKHR = VK_STRUCTURE_TYPE_FENCE_GET_FD_INFO_KHR, + ePhysicalDeviceSurfaceInfo2KHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SURFACE_INFO_2_KHR, + eSurfaceCapabilities2KHR = VK_STRUCTURE_TYPE_SURFACE_CAPABILITIES_2_KHR, + eSurfaceFormat2KHR = VK_STRUCTURE_TYPE_SURFACE_FORMAT_2_KHR, + eDisplayProperties2KHR = VK_STRUCTURE_TYPE_DISPLAY_PROPERTIES_2_KHR, + eDisplayPlaneProperties2KHR = VK_STRUCTURE_TYPE_DISPLAY_PLANE_PROPERTIES_2_KHR, + eDisplayModeProperties2KHR = VK_STRUCTURE_TYPE_DISPLAY_MODE_PROPERTIES_2_KHR, + eDisplayPlaneInfo2KHR = VK_STRUCTURE_TYPE_DISPLAY_PLANE_INFO_2_KHR, + eDisplayPlaneCapabilities2KHR = VK_STRUCTURE_TYPE_DISPLAY_PLANE_CAPABILITIES_2_KHR, + eIosSurfaceCreateInfoMVK = VK_STRUCTURE_TYPE_IOS_SURFACE_CREATE_INFO_MVK, + eMacosSurfaceCreateInfoMVK = VK_STRUCTURE_TYPE_MACOS_SURFACE_CREATE_INFO_MVK, + eDebugUtilsObjectNameInfoEXT = VK_STRUCTURE_TYPE_DEBUG_UTILS_OBJECT_NAME_INFO_EXT, + eDebugUtilsObjectTagInfoEXT = VK_STRUCTURE_TYPE_DEBUG_UTILS_OBJECT_TAG_INFO_EXT, + eDebugUtilsLabelEXT = VK_STRUCTURE_TYPE_DEBUG_UTILS_LABEL_EXT, + eDebugUtilsMessengerCallbackDataEXT = VK_STRUCTURE_TYPE_DEBUG_UTILS_MESSENGER_CALLBACK_DATA_EXT, + eDebugUtilsMessengerCreateInfoEXT = VK_STRUCTURE_TYPE_DEBUG_UTILS_MESSENGER_CREATE_INFO_EXT, + eAndroidHardwareBufferUsageANDROID = VK_STRUCTURE_TYPE_ANDROID_HARDWARE_BUFFER_USAGE_ANDROID, + eAndroidHardwareBufferPropertiesANDROID = VK_STRUCTURE_TYPE_ANDROID_HARDWARE_BUFFER_PROPERTIES_ANDROID, + eAndroidHardwareBufferFormatPropertiesANDROID = VK_STRUCTURE_TYPE_ANDROID_HARDWARE_BUFFER_FORMAT_PROPERTIES_ANDROID, + eImportAndroidHardwareBufferInfoANDROID = VK_STRUCTURE_TYPE_IMPORT_ANDROID_HARDWARE_BUFFER_INFO_ANDROID, + eMemoryGetAndroidHardwareBufferInfoANDROID = VK_STRUCTURE_TYPE_MEMORY_GET_ANDROID_HARDWARE_BUFFER_INFO_ANDROID, + eExternalFormatANDROID = VK_STRUCTURE_TYPE_EXTERNAL_FORMAT_ANDROID, + ePhysicalDeviceSamplerFilterMinmaxPropertiesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SAMPLER_FILTER_MINMAX_PROPERTIES_EXT, + eSamplerReductionModeCreateInfoEXT = VK_STRUCTURE_TYPE_SAMPLER_REDUCTION_MODE_CREATE_INFO_EXT, + ePhysicalDeviceInlineUniformBlockFeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_INLINE_UNIFORM_BLOCK_FEATURES_EXT, + ePhysicalDeviceInlineUniformBlockPropertiesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_INLINE_UNIFORM_BLOCK_PROPERTIES_EXT, + eWriteDescriptorSetInlineUniformBlockEXT = VK_STRUCTURE_TYPE_WRITE_DESCRIPTOR_SET_INLINE_UNIFORM_BLOCK_EXT, + eDescriptorPoolInlineUniformBlockCreateInfoEXT = VK_STRUCTURE_TYPE_DESCRIPTOR_POOL_INLINE_UNIFORM_BLOCK_CREATE_INFO_EXT, + eSampleLocationsInfoEXT = VK_STRUCTURE_TYPE_SAMPLE_LOCATIONS_INFO_EXT, + eRenderPassSampleLocationsBeginInfoEXT = VK_STRUCTURE_TYPE_RENDER_PASS_SAMPLE_LOCATIONS_BEGIN_INFO_EXT, + ePipelineSampleLocationsStateCreateInfoEXT = VK_STRUCTURE_TYPE_PIPELINE_SAMPLE_LOCATIONS_STATE_CREATE_INFO_EXT, + ePhysicalDeviceSampleLocationsPropertiesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SAMPLE_LOCATIONS_PROPERTIES_EXT, + eMultisamplePropertiesEXT = VK_STRUCTURE_TYPE_MULTISAMPLE_PROPERTIES_EXT, + eImageFormatListCreateInfoKHR = VK_STRUCTURE_TYPE_IMAGE_FORMAT_LIST_CREATE_INFO_KHR, + ePhysicalDeviceBlendOperationAdvancedFeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_BLEND_OPERATION_ADVANCED_FEATURES_EXT, + ePhysicalDeviceBlendOperationAdvancedPropertiesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_BLEND_OPERATION_ADVANCED_PROPERTIES_EXT, + ePipelineColorBlendAdvancedStateCreateInfoEXT = VK_STRUCTURE_TYPE_PIPELINE_COLOR_BLEND_ADVANCED_STATE_CREATE_INFO_EXT, + ePipelineCoverageToColorStateCreateInfoNV = VK_STRUCTURE_TYPE_PIPELINE_COVERAGE_TO_COLOR_STATE_CREATE_INFO_NV, + ePipelineCoverageModulationStateCreateInfoNV = VK_STRUCTURE_TYPE_PIPELINE_COVERAGE_MODULATION_STATE_CREATE_INFO_NV, + eDrmFormatModifierPropertiesListEXT = VK_STRUCTURE_TYPE_DRM_FORMAT_MODIFIER_PROPERTIES_LIST_EXT, + eDrmFormatModifierPropertiesEXT = VK_STRUCTURE_TYPE_DRM_FORMAT_MODIFIER_PROPERTIES_EXT, + ePhysicalDeviceImageDrmFormatModifierInfoEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_IMAGE_DRM_FORMAT_MODIFIER_INFO_EXT, + eImageDrmFormatModifierListCreateInfoEXT = VK_STRUCTURE_TYPE_IMAGE_DRM_FORMAT_MODIFIER_LIST_CREATE_INFO_EXT, + eImageDrmFormatModifierExplicitCreateInfoEXT = VK_STRUCTURE_TYPE_IMAGE_DRM_FORMAT_MODIFIER_EXPLICIT_CREATE_INFO_EXT, + eImageDrmFormatModifierPropertiesEXT = VK_STRUCTURE_TYPE_IMAGE_DRM_FORMAT_MODIFIER_PROPERTIES_EXT, + eValidationCacheCreateInfoEXT = VK_STRUCTURE_TYPE_VALIDATION_CACHE_CREATE_INFO_EXT, + eShaderModuleValidationCacheCreateInfoEXT = VK_STRUCTURE_TYPE_SHADER_MODULE_VALIDATION_CACHE_CREATE_INFO_EXT, + eDescriptorSetLayoutBindingFlagsCreateInfoEXT = VK_STRUCTURE_TYPE_DESCRIPTOR_SET_LAYOUT_BINDING_FLAGS_CREATE_INFO_EXT, + ePhysicalDeviceDescriptorIndexingFeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DESCRIPTOR_INDEXING_FEATURES_EXT, + ePhysicalDeviceDescriptorIndexingPropertiesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DESCRIPTOR_INDEXING_PROPERTIES_EXT, + eDescriptorSetVariableDescriptorCountAllocateInfoEXT = VK_STRUCTURE_TYPE_DESCRIPTOR_SET_VARIABLE_DESCRIPTOR_COUNT_ALLOCATE_INFO_EXT, + eDescriptorSetVariableDescriptorCountLayoutSupportEXT = VK_STRUCTURE_TYPE_DESCRIPTOR_SET_VARIABLE_DESCRIPTOR_COUNT_LAYOUT_SUPPORT_EXT, + ePipelineViewportShadingRateImageStateCreateInfoNV = VK_STRUCTURE_TYPE_PIPELINE_VIEWPORT_SHADING_RATE_IMAGE_STATE_CREATE_INFO_NV, + ePhysicalDeviceShadingRateImageFeaturesNV = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADING_RATE_IMAGE_FEATURES_NV, + ePhysicalDeviceShadingRateImagePropertiesNV = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADING_RATE_IMAGE_PROPERTIES_NV, + ePipelineViewportCoarseSampleOrderStateCreateInfoNV = VK_STRUCTURE_TYPE_PIPELINE_VIEWPORT_COARSE_SAMPLE_ORDER_STATE_CREATE_INFO_NV, + eRayTracingPipelineCreateInfoNV = VK_STRUCTURE_TYPE_RAY_TRACING_PIPELINE_CREATE_INFO_NV, + eAccelerationStructureCreateInfoNV = VK_STRUCTURE_TYPE_ACCELERATION_STRUCTURE_CREATE_INFO_NV, + eGeometryNV = VK_STRUCTURE_TYPE_GEOMETRY_NV, + eGeometryTrianglesNV = VK_STRUCTURE_TYPE_GEOMETRY_TRIANGLES_NV, + eGeometryAabbNV = VK_STRUCTURE_TYPE_GEOMETRY_AABB_NV, + eBindAccelerationStructureMemoryInfoNV = VK_STRUCTURE_TYPE_BIND_ACCELERATION_STRUCTURE_MEMORY_INFO_NV, + eWriteDescriptorSetAccelerationStructureNV = VK_STRUCTURE_TYPE_WRITE_DESCRIPTOR_SET_ACCELERATION_STRUCTURE_NV, + eAccelerationStructureMemoryRequirementsInfoNV = VK_STRUCTURE_TYPE_ACCELERATION_STRUCTURE_MEMORY_REQUIREMENTS_INFO_NV, + ePhysicalDeviceRayTracingPropertiesNV = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_RAY_TRACING_PROPERTIES_NV, + eRayTracingShaderGroupCreateInfoNV = VK_STRUCTURE_TYPE_RAY_TRACING_SHADER_GROUP_CREATE_INFO_NV, + eAccelerationStructureInfoNV = VK_STRUCTURE_TYPE_ACCELERATION_STRUCTURE_INFO_NV, + ePhysicalDeviceRepresentativeFragmentTestFeaturesNV = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_REPRESENTATIVE_FRAGMENT_TEST_FEATURES_NV, + ePipelineRepresentativeFragmentTestStateCreateInfoNV = VK_STRUCTURE_TYPE_PIPELINE_REPRESENTATIVE_FRAGMENT_TEST_STATE_CREATE_INFO_NV, + eDeviceQueueGlobalPriorityCreateInfoEXT = VK_STRUCTURE_TYPE_DEVICE_QUEUE_GLOBAL_PRIORITY_CREATE_INFO_EXT, + ePhysicalDevice8BitStorageFeaturesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_8BIT_STORAGE_FEATURES_KHR, + eImportMemoryHostPointerInfoEXT = VK_STRUCTURE_TYPE_IMPORT_MEMORY_HOST_POINTER_INFO_EXT, + eMemoryHostPointerPropertiesEXT = VK_STRUCTURE_TYPE_MEMORY_HOST_POINTER_PROPERTIES_EXT, + ePhysicalDeviceExternalMemoryHostPropertiesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_EXTERNAL_MEMORY_HOST_PROPERTIES_EXT, + ePhysicalDeviceShaderAtomicInt64FeaturesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_ATOMIC_INT64_FEATURES_KHR, + eCalibratedTimestampInfoEXT = VK_STRUCTURE_TYPE_CALIBRATED_TIMESTAMP_INFO_EXT, + ePhysicalDeviceShaderCorePropertiesAMD = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_CORE_PROPERTIES_AMD, + eDeviceMemoryOverallocationCreateInfoAMD = VK_STRUCTURE_TYPE_DEVICE_MEMORY_OVERALLOCATION_CREATE_INFO_AMD, + ePhysicalDeviceVertexAttributeDivisorPropertiesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VERTEX_ATTRIBUTE_DIVISOR_PROPERTIES_EXT, + ePipelineVertexInputDivisorStateCreateInfoEXT = VK_STRUCTURE_TYPE_PIPELINE_VERTEX_INPUT_DIVISOR_STATE_CREATE_INFO_EXT, + ePhysicalDeviceVertexAttributeDivisorFeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VERTEX_ATTRIBUTE_DIVISOR_FEATURES_EXT, + ePhysicalDeviceDriverPropertiesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DRIVER_PROPERTIES_KHR, + ePhysicalDeviceComputeShaderDerivativesFeaturesNV = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_COMPUTE_SHADER_DERIVATIVES_FEATURES_NV, + ePhysicalDeviceMeshShaderFeaturesNV = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MESH_SHADER_FEATURES_NV, + ePhysicalDeviceMeshShaderPropertiesNV = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MESH_SHADER_PROPERTIES_NV, + ePhysicalDeviceFragmentShaderBarycentricFeaturesNV = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FRAGMENT_SHADER_BARYCENTRIC_FEATURES_NV, + ePhysicalDeviceShaderImageFootprintFeaturesNV = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_IMAGE_FOOTPRINT_FEATURES_NV, + ePipelineViewportExclusiveScissorStateCreateInfoNV = VK_STRUCTURE_TYPE_PIPELINE_VIEWPORT_EXCLUSIVE_SCISSOR_STATE_CREATE_INFO_NV, + ePhysicalDeviceExclusiveScissorFeaturesNV = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_EXCLUSIVE_SCISSOR_FEATURES_NV, + eCheckpointDataNV = VK_STRUCTURE_TYPE_CHECKPOINT_DATA_NV, + eQueueFamilyCheckpointPropertiesNV = VK_STRUCTURE_TYPE_QUEUE_FAMILY_CHECKPOINT_PROPERTIES_NV, + ePhysicalDeviceVulkanMemoryModelFeaturesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VULKAN_MEMORY_MODEL_FEATURES_KHR, + ePhysicalDevicePciBusInfoPropertiesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PCI_BUS_INFO_PROPERTIES_EXT, + eImagepipeSurfaceCreateInfoFUCHSIA = VK_STRUCTURE_TYPE_IMAGEPIPE_SURFACE_CREATE_INFO_FUCHSIA + }; + + struct ApplicationInfo + { + ApplicationInfo( const char* pApplicationName_ = nullptr, + uint32_t applicationVersion_ = 0, + const char* pEngineName_ = nullptr, + uint32_t engineVersion_ = 0, + uint32_t apiVersion_ = 0 ) + : pApplicationName( pApplicationName_ ) + , applicationVersion( applicationVersion_ ) + , pEngineName( pEngineName_ ) + , engineVersion( engineVersion_ ) + , apiVersion( apiVersion_ ) + { + } + + ApplicationInfo( VkApplicationInfo const & rhs ) + { + memcpy( this, &rhs, sizeof( ApplicationInfo ) ); + } + + ApplicationInfo& operator=( VkApplicationInfo const & rhs ) + { + memcpy( this, &rhs, sizeof( ApplicationInfo ) ); + return *this; + } + ApplicationInfo& setPNext( const void* pNext_ ) + { + pNext = pNext_; + return *this; + } + + ApplicationInfo& setPApplicationName( const char* pApplicationName_ ) + { + pApplicationName = pApplicationName_; + return *this; + } + + ApplicationInfo& setApplicationVersion( uint32_t applicationVersion_ ) + { + applicationVersion = applicationVersion_; + return *this; + } + + ApplicationInfo& setPEngineName( const char* pEngineName_ ) + { + pEngineName = pEngineName_; + return *this; + } + + ApplicationInfo& setEngineVersion( uint32_t engineVersion_ ) + { + engineVersion = engineVersion_; + return *this; + } + + ApplicationInfo& setApiVersion( uint32_t apiVersion_ ) + { + apiVersion = apiVersion_; + return *this; + } + + operator VkApplicationInfo const&() const + { + return *reinterpret_cast(this); + } + + operator VkApplicationInfo &() + { + return *reinterpret_cast(this); + } + + bool operator==( ApplicationInfo const& rhs ) const + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( pApplicationName == rhs.pApplicationName ) + && ( applicationVersion == rhs.applicationVersion ) + && ( pEngineName == rhs.pEngineName ) + && ( engineVersion == rhs.engineVersion ) + && ( apiVersion == rhs.apiVersion ); + } + + bool operator!=( ApplicationInfo const& rhs ) const + { + return !operator==( rhs ); + } + + private: + StructureType sType = StructureType::eApplicationInfo; + + public: + const void* pNext = nullptr; + const char* pApplicationName; + uint32_t applicationVersion; + const char* pEngineName; + uint32_t engineVersion; + uint32_t apiVersion; + }; + static_assert( sizeof( ApplicationInfo ) == sizeof( VkApplicationInfo ), "struct and wrapper have different size!" ); + + struct InstanceCreateInfo + { + InstanceCreateInfo( InstanceCreateFlags flags_ = InstanceCreateFlags(), + const ApplicationInfo* pApplicationInfo_ = nullptr, + uint32_t enabledLayerCount_ = 0, + const char* const* ppEnabledLayerNames_ = nullptr, + uint32_t enabledExtensionCount_ = 0, + const char* const* ppEnabledExtensionNames_ = nullptr ) + : flags( flags_ ) + , pApplicationInfo( pApplicationInfo_ ) + , enabledLayerCount( enabledLayerCount_ ) + , ppEnabledLayerNames( ppEnabledLayerNames_ ) + , enabledExtensionCount( enabledExtensionCount_ ) + , ppEnabledExtensionNames( ppEnabledExtensionNames_ ) + { + } + + InstanceCreateInfo( VkInstanceCreateInfo const & rhs ) + { + memcpy( this, &rhs, sizeof( InstanceCreateInfo ) ); + } + + InstanceCreateInfo& operator=( VkInstanceCreateInfo const & rhs ) + { + memcpy( this, &rhs, sizeof( InstanceCreateInfo ) ); + return *this; + } + InstanceCreateInfo& setPNext( const void* pNext_ ) + { + pNext = pNext_; + return *this; + } + + InstanceCreateInfo& setFlags( InstanceCreateFlags flags_ ) + { + flags = flags_; + return *this; + } + + InstanceCreateInfo& setPApplicationInfo( const ApplicationInfo* pApplicationInfo_ ) + { + pApplicationInfo = pApplicationInfo_; + return *this; + } + + InstanceCreateInfo& setEnabledLayerCount( uint32_t enabledLayerCount_ ) + { + enabledLayerCount = enabledLayerCount_; + return *this; + } + + InstanceCreateInfo& setPpEnabledLayerNames( const char* const* ppEnabledLayerNames_ ) + { + ppEnabledLayerNames = ppEnabledLayerNames_; + return *this; + } + + InstanceCreateInfo& setEnabledExtensionCount( uint32_t enabledExtensionCount_ ) + { + enabledExtensionCount = enabledExtensionCount_; + return *this; + } + + InstanceCreateInfo& setPpEnabledExtensionNames( const char* const* ppEnabledExtensionNames_ ) + { + ppEnabledExtensionNames = ppEnabledExtensionNames_; + return *this; + } + + operator VkInstanceCreateInfo const&() const + { + return *reinterpret_cast(this); + } + + operator VkInstanceCreateInfo &() + { + return *reinterpret_cast(this); + } + + bool operator==( InstanceCreateInfo const& rhs ) const + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( flags == rhs.flags ) + && ( pApplicationInfo == rhs.pApplicationInfo ) + && ( enabledLayerCount == rhs.enabledLayerCount ) + && ( ppEnabledLayerNames == rhs.ppEnabledLayerNames ) + && ( enabledExtensionCount == rhs.enabledExtensionCount ) + && ( ppEnabledExtensionNames == rhs.ppEnabledExtensionNames ); + } + + bool operator!=( InstanceCreateInfo const& rhs ) const + { + return !operator==( rhs ); + } + + private: + StructureType sType = StructureType::eInstanceCreateInfo; + + public: + const void* pNext = nullptr; + InstanceCreateFlags flags; + const ApplicationInfo* pApplicationInfo; + uint32_t enabledLayerCount; + const char* const* ppEnabledLayerNames; + uint32_t enabledExtensionCount; + const char* const* ppEnabledExtensionNames; + }; + static_assert( sizeof( InstanceCreateInfo ) == sizeof( VkInstanceCreateInfo ), "struct and wrapper have different size!" ); + + struct MemoryAllocateInfo + { + MemoryAllocateInfo( DeviceSize allocationSize_ = 0, + uint32_t memoryTypeIndex_ = 0 ) + : allocationSize( allocationSize_ ) + , memoryTypeIndex( memoryTypeIndex_ ) + { + } + + MemoryAllocateInfo( VkMemoryAllocateInfo const & rhs ) + { + memcpy( this, &rhs, sizeof( MemoryAllocateInfo ) ); + } + + MemoryAllocateInfo& operator=( VkMemoryAllocateInfo const & rhs ) + { + memcpy( this, &rhs, sizeof( MemoryAllocateInfo ) ); + return *this; + } + MemoryAllocateInfo& setPNext( const void* pNext_ ) + { + pNext = pNext_; + return *this; + } + + MemoryAllocateInfo& setAllocationSize( DeviceSize allocationSize_ ) + { + allocationSize = allocationSize_; + return *this; + } + + MemoryAllocateInfo& setMemoryTypeIndex( uint32_t memoryTypeIndex_ ) + { + memoryTypeIndex = memoryTypeIndex_; + return *this; + } + + operator VkMemoryAllocateInfo const&() const + { + return *reinterpret_cast(this); + } + + operator VkMemoryAllocateInfo &() + { + return *reinterpret_cast(this); + } + + bool operator==( MemoryAllocateInfo const& rhs ) const + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( allocationSize == rhs.allocationSize ) + && ( memoryTypeIndex == rhs.memoryTypeIndex ); + } + + bool operator!=( MemoryAllocateInfo const& rhs ) const + { + return !operator==( rhs ); + } + + private: + StructureType sType = StructureType::eMemoryAllocateInfo; + + public: + const void* pNext = nullptr; + DeviceSize allocationSize; + uint32_t memoryTypeIndex; + }; + static_assert( sizeof( MemoryAllocateInfo ) == sizeof( VkMemoryAllocateInfo ), "struct and wrapper have different size!" ); + + struct MappedMemoryRange + { + MappedMemoryRange( DeviceMemory memory_ = DeviceMemory(), + DeviceSize offset_ = 0, + DeviceSize size_ = 0 ) + : memory( memory_ ) + , offset( offset_ ) + , size( size_ ) + { + } + + MappedMemoryRange( VkMappedMemoryRange const & rhs ) + { + memcpy( this, &rhs, sizeof( MappedMemoryRange ) ); + } + + MappedMemoryRange& operator=( VkMappedMemoryRange const & rhs ) + { + memcpy( this, &rhs, sizeof( MappedMemoryRange ) ); + return *this; + } + MappedMemoryRange& setPNext( const void* pNext_ ) + { + pNext = pNext_; + return *this; + } + + MappedMemoryRange& setMemory( DeviceMemory memory_ ) + { + memory = memory_; + return *this; + } + + MappedMemoryRange& setOffset( DeviceSize offset_ ) + { + offset = offset_; + return *this; + } + + MappedMemoryRange& setSize( DeviceSize size_ ) + { + size = size_; + return *this; + } + + operator VkMappedMemoryRange const&() const + { + return *reinterpret_cast(this); + } + + operator VkMappedMemoryRange &() + { + return *reinterpret_cast(this); + } + + bool operator==( MappedMemoryRange const& rhs ) const + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( memory == rhs.memory ) + && ( offset == rhs.offset ) + && ( size == rhs.size ); + } + + bool operator!=( MappedMemoryRange const& rhs ) const + { + return !operator==( rhs ); + } + + private: + StructureType sType = StructureType::eMappedMemoryRange; + + public: + const void* pNext = nullptr; + DeviceMemory memory; + DeviceSize offset; + DeviceSize size; + }; + static_assert( sizeof( MappedMemoryRange ) == sizeof( VkMappedMemoryRange ), "struct and wrapper have different size!" ); + + struct WriteDescriptorSet + { + WriteDescriptorSet( DescriptorSet dstSet_ = DescriptorSet(), + uint32_t dstBinding_ = 0, + uint32_t dstArrayElement_ = 0, + uint32_t descriptorCount_ = 0, + DescriptorType descriptorType_ = DescriptorType::eSampler, + const DescriptorImageInfo* pImageInfo_ = nullptr, + const DescriptorBufferInfo* pBufferInfo_ = nullptr, + const BufferView* pTexelBufferView_ = nullptr ) + : dstSet( dstSet_ ) + , dstBinding( dstBinding_ ) + , dstArrayElement( dstArrayElement_ ) + , descriptorCount( descriptorCount_ ) + , descriptorType( descriptorType_ ) + , pImageInfo( pImageInfo_ ) + , pBufferInfo( pBufferInfo_ ) + , pTexelBufferView( pTexelBufferView_ ) + { + } + + WriteDescriptorSet( VkWriteDescriptorSet const & rhs ) + { + memcpy( this, &rhs, sizeof( WriteDescriptorSet ) ); + } + + WriteDescriptorSet& operator=( VkWriteDescriptorSet const & rhs ) + { + memcpy( this, &rhs, sizeof( WriteDescriptorSet ) ); + return *this; + } + WriteDescriptorSet& setPNext( const void* pNext_ ) + { + pNext = pNext_; + return *this; + } + + WriteDescriptorSet& setDstSet( DescriptorSet dstSet_ ) + { + dstSet = dstSet_; + return *this; + } + + WriteDescriptorSet& setDstBinding( uint32_t dstBinding_ ) + { + dstBinding = dstBinding_; + return *this; + } + + WriteDescriptorSet& setDstArrayElement( uint32_t dstArrayElement_ ) + { + dstArrayElement = dstArrayElement_; + return *this; + } + + WriteDescriptorSet& setDescriptorCount( uint32_t descriptorCount_ ) + { + descriptorCount = descriptorCount_; + return *this; + } + + WriteDescriptorSet& setDescriptorType( DescriptorType descriptorType_ ) + { + descriptorType = descriptorType_; + return *this; + } + + WriteDescriptorSet& setPImageInfo( const DescriptorImageInfo* pImageInfo_ ) + { + pImageInfo = pImageInfo_; + return *this; + } + + WriteDescriptorSet& setPBufferInfo( const DescriptorBufferInfo* pBufferInfo_ ) + { + pBufferInfo = pBufferInfo_; + return *this; + } + + WriteDescriptorSet& setPTexelBufferView( const BufferView* pTexelBufferView_ ) + { + pTexelBufferView = pTexelBufferView_; + return *this; + } + + operator VkWriteDescriptorSet const&() const + { + return *reinterpret_cast(this); + } + + operator VkWriteDescriptorSet &() + { + return *reinterpret_cast(this); + } + + bool operator==( WriteDescriptorSet const& rhs ) const + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( dstSet == rhs.dstSet ) + && ( dstBinding == rhs.dstBinding ) + && ( dstArrayElement == rhs.dstArrayElement ) + && ( descriptorCount == rhs.descriptorCount ) + && ( descriptorType == rhs.descriptorType ) + && ( pImageInfo == rhs.pImageInfo ) + && ( pBufferInfo == rhs.pBufferInfo ) + && ( pTexelBufferView == rhs.pTexelBufferView ); + } + + bool operator!=( WriteDescriptorSet const& rhs ) const + { + return !operator==( rhs ); + } + + private: + StructureType sType = StructureType::eWriteDescriptorSet; + + public: + const void* pNext = nullptr; + DescriptorSet dstSet; + uint32_t dstBinding; + uint32_t dstArrayElement; + uint32_t descriptorCount; + DescriptorType descriptorType; + const DescriptorImageInfo* pImageInfo; + const DescriptorBufferInfo* pBufferInfo; + const BufferView* pTexelBufferView; + }; + static_assert( sizeof( WriteDescriptorSet ) == sizeof( VkWriteDescriptorSet ), "struct and wrapper have different size!" ); + + struct CopyDescriptorSet + { + CopyDescriptorSet( DescriptorSet srcSet_ = DescriptorSet(), + uint32_t srcBinding_ = 0, + uint32_t srcArrayElement_ = 0, + DescriptorSet dstSet_ = DescriptorSet(), + uint32_t dstBinding_ = 0, + uint32_t dstArrayElement_ = 0, + uint32_t descriptorCount_ = 0 ) + : srcSet( srcSet_ ) + , srcBinding( srcBinding_ ) + , srcArrayElement( srcArrayElement_ ) + , dstSet( dstSet_ ) + , dstBinding( dstBinding_ ) + , dstArrayElement( dstArrayElement_ ) + , descriptorCount( descriptorCount_ ) + { + } + + CopyDescriptorSet( VkCopyDescriptorSet const & rhs ) + { + memcpy( this, &rhs, sizeof( CopyDescriptorSet ) ); + } + + CopyDescriptorSet& operator=( VkCopyDescriptorSet const & rhs ) + { + memcpy( this, &rhs, sizeof( CopyDescriptorSet ) ); + return *this; + } + CopyDescriptorSet& setPNext( const void* pNext_ ) + { + pNext = pNext_; + return *this; + } + + CopyDescriptorSet& setSrcSet( DescriptorSet srcSet_ ) + { + srcSet = srcSet_; + return *this; + } + + CopyDescriptorSet& setSrcBinding( uint32_t srcBinding_ ) + { + srcBinding = srcBinding_; + return *this; + } + + CopyDescriptorSet& setSrcArrayElement( uint32_t srcArrayElement_ ) + { + srcArrayElement = srcArrayElement_; + return *this; + } + + CopyDescriptorSet& setDstSet( DescriptorSet dstSet_ ) + { + dstSet = dstSet_; + return *this; + } + + CopyDescriptorSet& setDstBinding( uint32_t dstBinding_ ) + { + dstBinding = dstBinding_; + return *this; + } + + CopyDescriptorSet& setDstArrayElement( uint32_t dstArrayElement_ ) + { + dstArrayElement = dstArrayElement_; + return *this; + } + + CopyDescriptorSet& setDescriptorCount( uint32_t descriptorCount_ ) + { + descriptorCount = descriptorCount_; + return *this; + } + + operator VkCopyDescriptorSet const&() const + { + return *reinterpret_cast(this); + } + + operator VkCopyDescriptorSet &() + { + return *reinterpret_cast(this); + } + + bool operator==( CopyDescriptorSet const& rhs ) const + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( srcSet == rhs.srcSet ) + && ( srcBinding == rhs.srcBinding ) + && ( srcArrayElement == rhs.srcArrayElement ) + && ( dstSet == rhs.dstSet ) + && ( dstBinding == rhs.dstBinding ) + && ( dstArrayElement == rhs.dstArrayElement ) + && ( descriptorCount == rhs.descriptorCount ); + } + + bool operator!=( CopyDescriptorSet const& rhs ) const + { + return !operator==( rhs ); + } + + private: + StructureType sType = StructureType::eCopyDescriptorSet; + + public: + const void* pNext = nullptr; + DescriptorSet srcSet; + uint32_t srcBinding; + uint32_t srcArrayElement; + DescriptorSet dstSet; + uint32_t dstBinding; + uint32_t dstArrayElement; + uint32_t descriptorCount; + }; + static_assert( sizeof( CopyDescriptorSet ) == sizeof( VkCopyDescriptorSet ), "struct and wrapper have different size!" ); + + struct BufferViewCreateInfo + { + BufferViewCreateInfo( BufferViewCreateFlags flags_ = BufferViewCreateFlags(), + Buffer buffer_ = Buffer(), + Format format_ = Format::eUndefined, + DeviceSize offset_ = 0, + DeviceSize range_ = 0 ) + : flags( flags_ ) + , buffer( buffer_ ) + , format( format_ ) + , offset( offset_ ) + , range( range_ ) + { + } + + BufferViewCreateInfo( VkBufferViewCreateInfo const & rhs ) + { + memcpy( this, &rhs, sizeof( BufferViewCreateInfo ) ); + } + + BufferViewCreateInfo& operator=( VkBufferViewCreateInfo const & rhs ) + { + memcpy( this, &rhs, sizeof( BufferViewCreateInfo ) ); + return *this; + } + BufferViewCreateInfo& setPNext( const void* pNext_ ) + { + pNext = pNext_; + return *this; + } + + BufferViewCreateInfo& setFlags( BufferViewCreateFlags flags_ ) + { + flags = flags_; + return *this; + } + + BufferViewCreateInfo& setBuffer( Buffer buffer_ ) + { + buffer = buffer_; + return *this; + } + + BufferViewCreateInfo& setFormat( Format format_ ) + { + format = format_; + return *this; + } + + BufferViewCreateInfo& setOffset( DeviceSize offset_ ) + { + offset = offset_; + return *this; + } + + BufferViewCreateInfo& setRange( DeviceSize range_ ) + { + range = range_; + return *this; + } + + operator VkBufferViewCreateInfo const&() const + { + return *reinterpret_cast(this); + } + + operator VkBufferViewCreateInfo &() + { + return *reinterpret_cast(this); + } + + bool operator==( BufferViewCreateInfo const& rhs ) const + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( flags == rhs.flags ) + && ( buffer == rhs.buffer ) + && ( format == rhs.format ) + && ( offset == rhs.offset ) + && ( range == rhs.range ); + } + + bool operator!=( BufferViewCreateInfo const& rhs ) const + { + return !operator==( rhs ); + } + + private: + StructureType sType = StructureType::eBufferViewCreateInfo; + + public: + const void* pNext = nullptr; + BufferViewCreateFlags flags; + Buffer buffer; + Format format; + DeviceSize offset; + DeviceSize range; + }; + static_assert( sizeof( BufferViewCreateInfo ) == sizeof( VkBufferViewCreateInfo ), "struct and wrapper have different size!" ); + + struct ShaderModuleCreateInfo + { + ShaderModuleCreateInfo( ShaderModuleCreateFlags flags_ = ShaderModuleCreateFlags(), + size_t codeSize_ = 0, + const uint32_t* pCode_ = nullptr ) + : flags( flags_ ) + , codeSize( codeSize_ ) + , pCode( pCode_ ) + { + } + + ShaderModuleCreateInfo( VkShaderModuleCreateInfo const & rhs ) + { + memcpy( this, &rhs, sizeof( ShaderModuleCreateInfo ) ); + } + + ShaderModuleCreateInfo& operator=( VkShaderModuleCreateInfo const & rhs ) + { + memcpy( this, &rhs, sizeof( ShaderModuleCreateInfo ) ); + return *this; + } + ShaderModuleCreateInfo& setPNext( const void* pNext_ ) + { + pNext = pNext_; + return *this; + } + + ShaderModuleCreateInfo& setFlags( ShaderModuleCreateFlags flags_ ) + { + flags = flags_; + return *this; + } + + ShaderModuleCreateInfo& setCodeSize( size_t codeSize_ ) + { + codeSize = codeSize_; + return *this; + } + + ShaderModuleCreateInfo& setPCode( const uint32_t* pCode_ ) + { + pCode = pCode_; + return *this; + } + + operator VkShaderModuleCreateInfo const&() const + { + return *reinterpret_cast(this); + } + + operator VkShaderModuleCreateInfo &() + { + return *reinterpret_cast(this); + } + + bool operator==( ShaderModuleCreateInfo const& rhs ) const + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( flags == rhs.flags ) + && ( codeSize == rhs.codeSize ) + && ( pCode == rhs.pCode ); + } + + bool operator!=( ShaderModuleCreateInfo const& rhs ) const + { + return !operator==( rhs ); + } + + private: + StructureType sType = StructureType::eShaderModuleCreateInfo; + + public: + const void* pNext = nullptr; + ShaderModuleCreateFlags flags; + size_t codeSize; + const uint32_t* pCode; + }; + static_assert( sizeof( ShaderModuleCreateInfo ) == sizeof( VkShaderModuleCreateInfo ), "struct and wrapper have different size!" ); + + struct DescriptorSetAllocateInfo + { + DescriptorSetAllocateInfo( DescriptorPool descriptorPool_ = DescriptorPool(), + uint32_t descriptorSetCount_ = 0, + const DescriptorSetLayout* pSetLayouts_ = nullptr ) + : descriptorPool( descriptorPool_ ) + , descriptorSetCount( descriptorSetCount_ ) + , pSetLayouts( pSetLayouts_ ) + { + } + + DescriptorSetAllocateInfo( VkDescriptorSetAllocateInfo const & rhs ) + { + memcpy( this, &rhs, sizeof( DescriptorSetAllocateInfo ) ); + } + + DescriptorSetAllocateInfo& operator=( VkDescriptorSetAllocateInfo const & rhs ) + { + memcpy( this, &rhs, sizeof( DescriptorSetAllocateInfo ) ); + return *this; + } + DescriptorSetAllocateInfo& setPNext( const void* pNext_ ) + { + pNext = pNext_; + return *this; + } + + DescriptorSetAllocateInfo& setDescriptorPool( DescriptorPool descriptorPool_ ) + { + descriptorPool = descriptorPool_; + return *this; + } + + DescriptorSetAllocateInfo& setDescriptorSetCount( uint32_t descriptorSetCount_ ) + { + descriptorSetCount = descriptorSetCount_; + return *this; + } + + DescriptorSetAllocateInfo& setPSetLayouts( const DescriptorSetLayout* pSetLayouts_ ) + { + pSetLayouts = pSetLayouts_; + return *this; + } + + operator VkDescriptorSetAllocateInfo const&() const + { + return *reinterpret_cast(this); + } + + operator VkDescriptorSetAllocateInfo &() + { + return *reinterpret_cast(this); + } + + bool operator==( DescriptorSetAllocateInfo const& rhs ) const + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( descriptorPool == rhs.descriptorPool ) + && ( descriptorSetCount == rhs.descriptorSetCount ) + && ( pSetLayouts == rhs.pSetLayouts ); + } + + bool operator!=( DescriptorSetAllocateInfo const& rhs ) const + { + return !operator==( rhs ); + } + + private: + StructureType sType = StructureType::eDescriptorSetAllocateInfo; + + public: + const void* pNext = nullptr; + DescriptorPool descriptorPool; + uint32_t descriptorSetCount; + const DescriptorSetLayout* pSetLayouts; + }; + static_assert( sizeof( DescriptorSetAllocateInfo ) == sizeof( VkDescriptorSetAllocateInfo ), "struct and wrapper have different size!" ); + + struct PipelineVertexInputStateCreateInfo + { + PipelineVertexInputStateCreateInfo( PipelineVertexInputStateCreateFlags flags_ = PipelineVertexInputStateCreateFlags(), + uint32_t vertexBindingDescriptionCount_ = 0, + const VertexInputBindingDescription* pVertexBindingDescriptions_ = nullptr, + uint32_t vertexAttributeDescriptionCount_ = 0, + const VertexInputAttributeDescription* pVertexAttributeDescriptions_ = nullptr ) + : flags( flags_ ) + , vertexBindingDescriptionCount( vertexBindingDescriptionCount_ ) + , pVertexBindingDescriptions( pVertexBindingDescriptions_ ) + , vertexAttributeDescriptionCount( vertexAttributeDescriptionCount_ ) + , pVertexAttributeDescriptions( pVertexAttributeDescriptions_ ) + { + } + + PipelineVertexInputStateCreateInfo( VkPipelineVertexInputStateCreateInfo const & rhs ) + { + memcpy( this, &rhs, sizeof( PipelineVertexInputStateCreateInfo ) ); + } + + PipelineVertexInputStateCreateInfo& operator=( VkPipelineVertexInputStateCreateInfo const & rhs ) + { + memcpy( this, &rhs, sizeof( PipelineVertexInputStateCreateInfo ) ); + return *this; + } + PipelineVertexInputStateCreateInfo& setPNext( const void* pNext_ ) + { + pNext = pNext_; + return *this; + } + + PipelineVertexInputStateCreateInfo& setFlags( PipelineVertexInputStateCreateFlags flags_ ) + { + flags = flags_; + return *this; + } + + PipelineVertexInputStateCreateInfo& setVertexBindingDescriptionCount( uint32_t vertexBindingDescriptionCount_ ) + { + vertexBindingDescriptionCount = vertexBindingDescriptionCount_; + return *this; + } + + PipelineVertexInputStateCreateInfo& setPVertexBindingDescriptions( const VertexInputBindingDescription* pVertexBindingDescriptions_ ) + { + pVertexBindingDescriptions = pVertexBindingDescriptions_; + return *this; + } + + PipelineVertexInputStateCreateInfo& setVertexAttributeDescriptionCount( uint32_t vertexAttributeDescriptionCount_ ) + { + vertexAttributeDescriptionCount = vertexAttributeDescriptionCount_; + return *this; + } + + PipelineVertexInputStateCreateInfo& setPVertexAttributeDescriptions( const VertexInputAttributeDescription* pVertexAttributeDescriptions_ ) + { + pVertexAttributeDescriptions = pVertexAttributeDescriptions_; + return *this; + } + + operator VkPipelineVertexInputStateCreateInfo const&() const + { + return *reinterpret_cast(this); + } + + operator VkPipelineVertexInputStateCreateInfo &() + { + return *reinterpret_cast(this); + } + + bool operator==( PipelineVertexInputStateCreateInfo const& rhs ) const + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( flags == rhs.flags ) + && ( vertexBindingDescriptionCount == rhs.vertexBindingDescriptionCount ) + && ( pVertexBindingDescriptions == rhs.pVertexBindingDescriptions ) + && ( vertexAttributeDescriptionCount == rhs.vertexAttributeDescriptionCount ) + && ( pVertexAttributeDescriptions == rhs.pVertexAttributeDescriptions ); + } + + bool operator!=( PipelineVertexInputStateCreateInfo const& rhs ) const + { + return !operator==( rhs ); + } + + private: + StructureType sType = StructureType::ePipelineVertexInputStateCreateInfo; + + public: + const void* pNext = nullptr; + PipelineVertexInputStateCreateFlags flags; + uint32_t vertexBindingDescriptionCount; + const VertexInputBindingDescription* pVertexBindingDescriptions; + uint32_t vertexAttributeDescriptionCount; + const VertexInputAttributeDescription* pVertexAttributeDescriptions; + }; + static_assert( sizeof( PipelineVertexInputStateCreateInfo ) == sizeof( VkPipelineVertexInputStateCreateInfo ), "struct and wrapper have different size!" ); + + struct PipelineInputAssemblyStateCreateInfo + { + PipelineInputAssemblyStateCreateInfo( PipelineInputAssemblyStateCreateFlags flags_ = PipelineInputAssemblyStateCreateFlags(), + PrimitiveTopology topology_ = PrimitiveTopology::ePointList, + Bool32 primitiveRestartEnable_ = 0 ) + : flags( flags_ ) + , topology( topology_ ) + , primitiveRestartEnable( primitiveRestartEnable_ ) + { + } + + PipelineInputAssemblyStateCreateInfo( VkPipelineInputAssemblyStateCreateInfo const & rhs ) + { + memcpy( this, &rhs, sizeof( PipelineInputAssemblyStateCreateInfo ) ); + } + + PipelineInputAssemblyStateCreateInfo& operator=( VkPipelineInputAssemblyStateCreateInfo const & rhs ) + { + memcpy( this, &rhs, sizeof( PipelineInputAssemblyStateCreateInfo ) ); + return *this; + } + PipelineInputAssemblyStateCreateInfo& setPNext( const void* pNext_ ) + { + pNext = pNext_; + return *this; + } + + PipelineInputAssemblyStateCreateInfo& setFlags( PipelineInputAssemblyStateCreateFlags flags_ ) + { + flags = flags_; + return *this; + } + + PipelineInputAssemblyStateCreateInfo& setTopology( PrimitiveTopology topology_ ) + { + topology = topology_; + return *this; + } + + PipelineInputAssemblyStateCreateInfo& setPrimitiveRestartEnable( Bool32 primitiveRestartEnable_ ) + { + primitiveRestartEnable = primitiveRestartEnable_; + return *this; + } + + operator VkPipelineInputAssemblyStateCreateInfo const&() const + { + return *reinterpret_cast(this); + } + + operator VkPipelineInputAssemblyStateCreateInfo &() + { + return *reinterpret_cast(this); + } + + bool operator==( PipelineInputAssemblyStateCreateInfo const& rhs ) const + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( flags == rhs.flags ) + && ( topology == rhs.topology ) + && ( primitiveRestartEnable == rhs.primitiveRestartEnable ); + } + + bool operator!=( PipelineInputAssemblyStateCreateInfo const& rhs ) const + { + return !operator==( rhs ); + } + + private: + StructureType sType = StructureType::ePipelineInputAssemblyStateCreateInfo; + + public: + const void* pNext = nullptr; + PipelineInputAssemblyStateCreateFlags flags; + PrimitiveTopology topology; + Bool32 primitiveRestartEnable; + }; + static_assert( sizeof( PipelineInputAssemblyStateCreateInfo ) == sizeof( VkPipelineInputAssemblyStateCreateInfo ), "struct and wrapper have different size!" ); + + struct PipelineTessellationStateCreateInfo + { + PipelineTessellationStateCreateInfo( PipelineTessellationStateCreateFlags flags_ = PipelineTessellationStateCreateFlags(), + uint32_t patchControlPoints_ = 0 ) + : flags( flags_ ) + , patchControlPoints( patchControlPoints_ ) + { + } + + PipelineTessellationStateCreateInfo( VkPipelineTessellationStateCreateInfo const & rhs ) + { + memcpy( this, &rhs, sizeof( PipelineTessellationStateCreateInfo ) ); + } + + PipelineTessellationStateCreateInfo& operator=( VkPipelineTessellationStateCreateInfo const & rhs ) + { + memcpy( this, &rhs, sizeof( PipelineTessellationStateCreateInfo ) ); + return *this; + } + PipelineTessellationStateCreateInfo& setPNext( const void* pNext_ ) + { + pNext = pNext_; + return *this; + } + + PipelineTessellationStateCreateInfo& setFlags( PipelineTessellationStateCreateFlags flags_ ) + { + flags = flags_; + return *this; + } + + PipelineTessellationStateCreateInfo& setPatchControlPoints( uint32_t patchControlPoints_ ) + { + patchControlPoints = patchControlPoints_; + return *this; + } + + operator VkPipelineTessellationStateCreateInfo const&() const + { + return *reinterpret_cast(this); + } + + operator VkPipelineTessellationStateCreateInfo &() + { + return *reinterpret_cast(this); + } + + bool operator==( PipelineTessellationStateCreateInfo const& rhs ) const + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( flags == rhs.flags ) + && ( patchControlPoints == rhs.patchControlPoints ); + } + + bool operator!=( PipelineTessellationStateCreateInfo const& rhs ) const + { + return !operator==( rhs ); + } + + private: + StructureType sType = StructureType::ePipelineTessellationStateCreateInfo; + + public: + const void* pNext = nullptr; + PipelineTessellationStateCreateFlags flags; + uint32_t patchControlPoints; + }; + static_assert( sizeof( PipelineTessellationStateCreateInfo ) == sizeof( VkPipelineTessellationStateCreateInfo ), "struct and wrapper have different size!" ); + + struct PipelineViewportStateCreateInfo + { + PipelineViewportStateCreateInfo( PipelineViewportStateCreateFlags flags_ = PipelineViewportStateCreateFlags(), + uint32_t viewportCount_ = 0, + const Viewport* pViewports_ = nullptr, + uint32_t scissorCount_ = 0, + const Rect2D* pScissors_ = nullptr ) + : flags( flags_ ) + , viewportCount( viewportCount_ ) + , pViewports( pViewports_ ) + , scissorCount( scissorCount_ ) + , pScissors( pScissors_ ) + { + } + + PipelineViewportStateCreateInfo( VkPipelineViewportStateCreateInfo const & rhs ) + { + memcpy( this, &rhs, sizeof( PipelineViewportStateCreateInfo ) ); + } + + PipelineViewportStateCreateInfo& operator=( VkPipelineViewportStateCreateInfo const & rhs ) + { + memcpy( this, &rhs, sizeof( PipelineViewportStateCreateInfo ) ); + return *this; + } + PipelineViewportStateCreateInfo& setPNext( const void* pNext_ ) + { + pNext = pNext_; + return *this; + } + + PipelineViewportStateCreateInfo& setFlags( PipelineViewportStateCreateFlags flags_ ) + { + flags = flags_; + return *this; + } + + PipelineViewportStateCreateInfo& setViewportCount( uint32_t viewportCount_ ) + { + viewportCount = viewportCount_; + return *this; + } + + PipelineViewportStateCreateInfo& setPViewports( const Viewport* pViewports_ ) + { + pViewports = pViewports_; + return *this; + } + + PipelineViewportStateCreateInfo& setScissorCount( uint32_t scissorCount_ ) + { + scissorCount = scissorCount_; + return *this; + } + + PipelineViewportStateCreateInfo& setPScissors( const Rect2D* pScissors_ ) + { + pScissors = pScissors_; + return *this; + } + + operator VkPipelineViewportStateCreateInfo const&() const + { + return *reinterpret_cast(this); + } + + operator VkPipelineViewportStateCreateInfo &() + { + return *reinterpret_cast(this); + } + + bool operator==( PipelineViewportStateCreateInfo const& rhs ) const + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( flags == rhs.flags ) + && ( viewportCount == rhs.viewportCount ) + && ( pViewports == rhs.pViewports ) + && ( scissorCount == rhs.scissorCount ) + && ( pScissors == rhs.pScissors ); + } + + bool operator!=( PipelineViewportStateCreateInfo const& rhs ) const + { + return !operator==( rhs ); + } + + private: + StructureType sType = StructureType::ePipelineViewportStateCreateInfo; + + public: + const void* pNext = nullptr; + PipelineViewportStateCreateFlags flags; + uint32_t viewportCount; + const Viewport* pViewports; + uint32_t scissorCount; + const Rect2D* pScissors; + }; + static_assert( sizeof( PipelineViewportStateCreateInfo ) == sizeof( VkPipelineViewportStateCreateInfo ), "struct and wrapper have different size!" ); + + struct PipelineRasterizationStateCreateInfo + { + PipelineRasterizationStateCreateInfo( PipelineRasterizationStateCreateFlags flags_ = PipelineRasterizationStateCreateFlags(), + Bool32 depthClampEnable_ = 0, + Bool32 rasterizerDiscardEnable_ = 0, + PolygonMode polygonMode_ = PolygonMode::eFill, + CullModeFlags cullMode_ = CullModeFlags(), + FrontFace frontFace_ = FrontFace::eCounterClockwise, + Bool32 depthBiasEnable_ = 0, + float depthBiasConstantFactor_ = 0, + float depthBiasClamp_ = 0, + float depthBiasSlopeFactor_ = 0, + float lineWidth_ = 0 ) + : flags( flags_ ) + , depthClampEnable( depthClampEnable_ ) + , rasterizerDiscardEnable( rasterizerDiscardEnable_ ) + , polygonMode( polygonMode_ ) + , cullMode( cullMode_ ) + , frontFace( frontFace_ ) + , depthBiasEnable( depthBiasEnable_ ) + , depthBiasConstantFactor( depthBiasConstantFactor_ ) + , depthBiasClamp( depthBiasClamp_ ) + , depthBiasSlopeFactor( depthBiasSlopeFactor_ ) + , lineWidth( lineWidth_ ) + { + } + + PipelineRasterizationStateCreateInfo( VkPipelineRasterizationStateCreateInfo const & rhs ) + { + memcpy( this, &rhs, sizeof( PipelineRasterizationStateCreateInfo ) ); + } + + PipelineRasterizationStateCreateInfo& operator=( VkPipelineRasterizationStateCreateInfo const & rhs ) + { + memcpy( this, &rhs, sizeof( PipelineRasterizationStateCreateInfo ) ); + return *this; + } + PipelineRasterizationStateCreateInfo& setPNext( const void* pNext_ ) + { + pNext = pNext_; + return *this; + } + + PipelineRasterizationStateCreateInfo& setFlags( PipelineRasterizationStateCreateFlags flags_ ) + { + flags = flags_; + return *this; + } + + PipelineRasterizationStateCreateInfo& setDepthClampEnable( Bool32 depthClampEnable_ ) + { + depthClampEnable = depthClampEnable_; + return *this; + } + + PipelineRasterizationStateCreateInfo& setRasterizerDiscardEnable( Bool32 rasterizerDiscardEnable_ ) + { + rasterizerDiscardEnable = rasterizerDiscardEnable_; + return *this; + } + + PipelineRasterizationStateCreateInfo& setPolygonMode( PolygonMode polygonMode_ ) + { + polygonMode = polygonMode_; + return *this; + } + + PipelineRasterizationStateCreateInfo& setCullMode( CullModeFlags cullMode_ ) + { + cullMode = cullMode_; + return *this; + } + + PipelineRasterizationStateCreateInfo& setFrontFace( FrontFace frontFace_ ) + { + frontFace = frontFace_; + return *this; + } + + PipelineRasterizationStateCreateInfo& setDepthBiasEnable( Bool32 depthBiasEnable_ ) + { + depthBiasEnable = depthBiasEnable_; + return *this; + } + + PipelineRasterizationStateCreateInfo& setDepthBiasConstantFactor( float depthBiasConstantFactor_ ) + { + depthBiasConstantFactor = depthBiasConstantFactor_; + return *this; + } + + PipelineRasterizationStateCreateInfo& setDepthBiasClamp( float depthBiasClamp_ ) + { + depthBiasClamp = depthBiasClamp_; + return *this; + } + + PipelineRasterizationStateCreateInfo& setDepthBiasSlopeFactor( float depthBiasSlopeFactor_ ) + { + depthBiasSlopeFactor = depthBiasSlopeFactor_; + return *this; + } + + PipelineRasterizationStateCreateInfo& setLineWidth( float lineWidth_ ) + { + lineWidth = lineWidth_; + return *this; + } + + operator VkPipelineRasterizationStateCreateInfo const&() const + { + return *reinterpret_cast(this); + } + + operator VkPipelineRasterizationStateCreateInfo &() + { + return *reinterpret_cast(this); + } + + bool operator==( PipelineRasterizationStateCreateInfo const& rhs ) const + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( flags == rhs.flags ) + && ( depthClampEnable == rhs.depthClampEnable ) + && ( rasterizerDiscardEnable == rhs.rasterizerDiscardEnable ) + && ( polygonMode == rhs.polygonMode ) + && ( cullMode == rhs.cullMode ) + && ( frontFace == rhs.frontFace ) + && ( depthBiasEnable == rhs.depthBiasEnable ) + && ( depthBiasConstantFactor == rhs.depthBiasConstantFactor ) + && ( depthBiasClamp == rhs.depthBiasClamp ) + && ( depthBiasSlopeFactor == rhs.depthBiasSlopeFactor ) + && ( lineWidth == rhs.lineWidth ); + } + + bool operator!=( PipelineRasterizationStateCreateInfo const& rhs ) const + { + return !operator==( rhs ); + } + + private: + StructureType sType = StructureType::ePipelineRasterizationStateCreateInfo; + + public: + const void* pNext = nullptr; + PipelineRasterizationStateCreateFlags flags; + Bool32 depthClampEnable; + Bool32 rasterizerDiscardEnable; + PolygonMode polygonMode; + CullModeFlags cullMode; + FrontFace frontFace; + Bool32 depthBiasEnable; + float depthBiasConstantFactor; + float depthBiasClamp; + float depthBiasSlopeFactor; + float lineWidth; + }; + static_assert( sizeof( PipelineRasterizationStateCreateInfo ) == sizeof( VkPipelineRasterizationStateCreateInfo ), "struct and wrapper have different size!" ); + + struct PipelineDepthStencilStateCreateInfo + { + PipelineDepthStencilStateCreateInfo( PipelineDepthStencilStateCreateFlags flags_ = PipelineDepthStencilStateCreateFlags(), + Bool32 depthTestEnable_ = 0, + Bool32 depthWriteEnable_ = 0, + CompareOp depthCompareOp_ = CompareOp::eNever, + Bool32 depthBoundsTestEnable_ = 0, + Bool32 stencilTestEnable_ = 0, + StencilOpState front_ = StencilOpState(), + StencilOpState back_ = StencilOpState(), + float minDepthBounds_ = 0, + float maxDepthBounds_ = 0 ) + : flags( flags_ ) + , depthTestEnable( depthTestEnable_ ) + , depthWriteEnable( depthWriteEnable_ ) + , depthCompareOp( depthCompareOp_ ) + , depthBoundsTestEnable( depthBoundsTestEnable_ ) + , stencilTestEnable( stencilTestEnable_ ) + , front( front_ ) + , back( back_ ) + , minDepthBounds( minDepthBounds_ ) + , maxDepthBounds( maxDepthBounds_ ) + { + } + + PipelineDepthStencilStateCreateInfo( VkPipelineDepthStencilStateCreateInfo const & rhs ) + { + memcpy( this, &rhs, sizeof( PipelineDepthStencilStateCreateInfo ) ); + } + + PipelineDepthStencilStateCreateInfo& operator=( VkPipelineDepthStencilStateCreateInfo const & rhs ) + { + memcpy( this, &rhs, sizeof( PipelineDepthStencilStateCreateInfo ) ); + return *this; + } + PipelineDepthStencilStateCreateInfo& setPNext( const void* pNext_ ) + { + pNext = pNext_; + return *this; + } + + PipelineDepthStencilStateCreateInfo& setFlags( PipelineDepthStencilStateCreateFlags flags_ ) + { + flags = flags_; + return *this; + } + + PipelineDepthStencilStateCreateInfo& setDepthTestEnable( Bool32 depthTestEnable_ ) + { + depthTestEnable = depthTestEnable_; + return *this; + } + + PipelineDepthStencilStateCreateInfo& setDepthWriteEnable( Bool32 depthWriteEnable_ ) + { + depthWriteEnable = depthWriteEnable_; + return *this; + } + + PipelineDepthStencilStateCreateInfo& setDepthCompareOp( CompareOp depthCompareOp_ ) + { + depthCompareOp = depthCompareOp_; + return *this; + } + + PipelineDepthStencilStateCreateInfo& setDepthBoundsTestEnable( Bool32 depthBoundsTestEnable_ ) + { + depthBoundsTestEnable = depthBoundsTestEnable_; + return *this; + } + + PipelineDepthStencilStateCreateInfo& setStencilTestEnable( Bool32 stencilTestEnable_ ) + { + stencilTestEnable = stencilTestEnable_; + return *this; + } + + PipelineDepthStencilStateCreateInfo& setFront( StencilOpState front_ ) + { + front = front_; + return *this; + } + + PipelineDepthStencilStateCreateInfo& setBack( StencilOpState back_ ) + { + back = back_; + return *this; + } + + PipelineDepthStencilStateCreateInfo& setMinDepthBounds( float minDepthBounds_ ) + { + minDepthBounds = minDepthBounds_; + return *this; + } + + PipelineDepthStencilStateCreateInfo& setMaxDepthBounds( float maxDepthBounds_ ) + { + maxDepthBounds = maxDepthBounds_; + return *this; + } + + operator VkPipelineDepthStencilStateCreateInfo const&() const + { + return *reinterpret_cast(this); + } + + operator VkPipelineDepthStencilStateCreateInfo &() + { + return *reinterpret_cast(this); + } + + bool operator==( PipelineDepthStencilStateCreateInfo const& rhs ) const + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( flags == rhs.flags ) + && ( depthTestEnable == rhs.depthTestEnable ) + && ( depthWriteEnable == rhs.depthWriteEnable ) + && ( depthCompareOp == rhs.depthCompareOp ) + && ( depthBoundsTestEnable == rhs.depthBoundsTestEnable ) + && ( stencilTestEnable == rhs.stencilTestEnable ) + && ( front == rhs.front ) + && ( back == rhs.back ) + && ( minDepthBounds == rhs.minDepthBounds ) + && ( maxDepthBounds == rhs.maxDepthBounds ); + } + + bool operator!=( PipelineDepthStencilStateCreateInfo const& rhs ) const + { + return !operator==( rhs ); + } + + private: + StructureType sType = StructureType::ePipelineDepthStencilStateCreateInfo; + + public: + const void* pNext = nullptr; + PipelineDepthStencilStateCreateFlags flags; + Bool32 depthTestEnable; + Bool32 depthWriteEnable; + CompareOp depthCompareOp; + Bool32 depthBoundsTestEnable; + Bool32 stencilTestEnable; + StencilOpState front; + StencilOpState back; + float minDepthBounds; + float maxDepthBounds; + }; + static_assert( sizeof( PipelineDepthStencilStateCreateInfo ) == sizeof( VkPipelineDepthStencilStateCreateInfo ), "struct and wrapper have different size!" ); + + struct PipelineCacheCreateInfo + { + PipelineCacheCreateInfo( PipelineCacheCreateFlags flags_ = PipelineCacheCreateFlags(), + size_t initialDataSize_ = 0, + const void* pInitialData_ = nullptr ) + : flags( flags_ ) + , initialDataSize( initialDataSize_ ) + , pInitialData( pInitialData_ ) + { + } + + PipelineCacheCreateInfo( VkPipelineCacheCreateInfo const & rhs ) + { + memcpy( this, &rhs, sizeof( PipelineCacheCreateInfo ) ); + } + + PipelineCacheCreateInfo& operator=( VkPipelineCacheCreateInfo const & rhs ) + { + memcpy( this, &rhs, sizeof( PipelineCacheCreateInfo ) ); + return *this; + } + PipelineCacheCreateInfo& setPNext( const void* pNext_ ) + { + pNext = pNext_; + return *this; + } + + PipelineCacheCreateInfo& setFlags( PipelineCacheCreateFlags flags_ ) + { + flags = flags_; + return *this; + } + + PipelineCacheCreateInfo& setInitialDataSize( size_t initialDataSize_ ) + { + initialDataSize = initialDataSize_; + return *this; + } + + PipelineCacheCreateInfo& setPInitialData( const void* pInitialData_ ) + { + pInitialData = pInitialData_; + return *this; + } + + operator VkPipelineCacheCreateInfo const&() const + { + return *reinterpret_cast(this); + } + + operator VkPipelineCacheCreateInfo &() + { + return *reinterpret_cast(this); + } + + bool operator==( PipelineCacheCreateInfo const& rhs ) const + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( flags == rhs.flags ) + && ( initialDataSize == rhs.initialDataSize ) + && ( pInitialData == rhs.pInitialData ); + } + + bool operator!=( PipelineCacheCreateInfo const& rhs ) const + { + return !operator==( rhs ); + } + + private: + StructureType sType = StructureType::ePipelineCacheCreateInfo; + + public: + const void* pNext = nullptr; + PipelineCacheCreateFlags flags; + size_t initialDataSize; + const void* pInitialData; + }; + static_assert( sizeof( PipelineCacheCreateInfo ) == sizeof( VkPipelineCacheCreateInfo ), "struct and wrapper have different size!" ); + + struct SamplerCreateInfo + { + SamplerCreateInfo( SamplerCreateFlags flags_ = SamplerCreateFlags(), + Filter magFilter_ = Filter::eNearest, + Filter minFilter_ = Filter::eNearest, + SamplerMipmapMode mipmapMode_ = SamplerMipmapMode::eNearest, + SamplerAddressMode addressModeU_ = SamplerAddressMode::eRepeat, + SamplerAddressMode addressModeV_ = SamplerAddressMode::eRepeat, + SamplerAddressMode addressModeW_ = SamplerAddressMode::eRepeat, + float mipLodBias_ = 0, + Bool32 anisotropyEnable_ = 0, + float maxAnisotropy_ = 0, + Bool32 compareEnable_ = 0, + CompareOp compareOp_ = CompareOp::eNever, + float minLod_ = 0, + float maxLod_ = 0, + BorderColor borderColor_ = BorderColor::eFloatTransparentBlack, + Bool32 unnormalizedCoordinates_ = 0 ) + : flags( flags_ ) + , magFilter( magFilter_ ) + , minFilter( minFilter_ ) + , mipmapMode( mipmapMode_ ) + , addressModeU( addressModeU_ ) + , addressModeV( addressModeV_ ) + , addressModeW( addressModeW_ ) + , mipLodBias( mipLodBias_ ) + , anisotropyEnable( anisotropyEnable_ ) + , maxAnisotropy( maxAnisotropy_ ) + , compareEnable( compareEnable_ ) + , compareOp( compareOp_ ) + , minLod( minLod_ ) + , maxLod( maxLod_ ) + , borderColor( borderColor_ ) + , unnormalizedCoordinates( unnormalizedCoordinates_ ) + { + } + + SamplerCreateInfo( VkSamplerCreateInfo const & rhs ) + { + memcpy( this, &rhs, sizeof( SamplerCreateInfo ) ); + } + + SamplerCreateInfo& operator=( VkSamplerCreateInfo const & rhs ) + { + memcpy( this, &rhs, sizeof( SamplerCreateInfo ) ); + return *this; + } + SamplerCreateInfo& setPNext( const void* pNext_ ) + { + pNext = pNext_; + return *this; + } + + SamplerCreateInfo& setFlags( SamplerCreateFlags flags_ ) + { + flags = flags_; + return *this; + } + + SamplerCreateInfo& setMagFilter( Filter magFilter_ ) + { + magFilter = magFilter_; + return *this; + } + + SamplerCreateInfo& setMinFilter( Filter minFilter_ ) + { + minFilter = minFilter_; + return *this; + } + + SamplerCreateInfo& setMipmapMode( SamplerMipmapMode mipmapMode_ ) + { + mipmapMode = mipmapMode_; + return *this; + } + + SamplerCreateInfo& setAddressModeU( SamplerAddressMode addressModeU_ ) + { + addressModeU = addressModeU_; + return *this; + } + + SamplerCreateInfo& setAddressModeV( SamplerAddressMode addressModeV_ ) + { + addressModeV = addressModeV_; + return *this; + } + + SamplerCreateInfo& setAddressModeW( SamplerAddressMode addressModeW_ ) + { + addressModeW = addressModeW_; + return *this; + } + + SamplerCreateInfo& setMipLodBias( float mipLodBias_ ) + { + mipLodBias = mipLodBias_; + return *this; + } + + SamplerCreateInfo& setAnisotropyEnable( Bool32 anisotropyEnable_ ) + { + anisotropyEnable = anisotropyEnable_; + return *this; + } + + SamplerCreateInfo& setMaxAnisotropy( float maxAnisotropy_ ) + { + maxAnisotropy = maxAnisotropy_; + return *this; + } + + SamplerCreateInfo& setCompareEnable( Bool32 compareEnable_ ) + { + compareEnable = compareEnable_; + return *this; + } + + SamplerCreateInfo& setCompareOp( CompareOp compareOp_ ) + { + compareOp = compareOp_; + return *this; + } + + SamplerCreateInfo& setMinLod( float minLod_ ) + { + minLod = minLod_; + return *this; + } + + SamplerCreateInfo& setMaxLod( float maxLod_ ) + { + maxLod = maxLod_; + return *this; + } + + SamplerCreateInfo& setBorderColor( BorderColor borderColor_ ) + { + borderColor = borderColor_; + return *this; + } + + SamplerCreateInfo& setUnnormalizedCoordinates( Bool32 unnormalizedCoordinates_ ) + { + unnormalizedCoordinates = unnormalizedCoordinates_; + return *this; + } + + operator VkSamplerCreateInfo const&() const + { + return *reinterpret_cast(this); + } + + operator VkSamplerCreateInfo &() + { + return *reinterpret_cast(this); + } + + bool operator==( SamplerCreateInfo const& rhs ) const + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( flags == rhs.flags ) + && ( magFilter == rhs.magFilter ) + && ( minFilter == rhs.minFilter ) + && ( mipmapMode == rhs.mipmapMode ) + && ( addressModeU == rhs.addressModeU ) + && ( addressModeV == rhs.addressModeV ) + && ( addressModeW == rhs.addressModeW ) + && ( mipLodBias == rhs.mipLodBias ) + && ( anisotropyEnable == rhs.anisotropyEnable ) + && ( maxAnisotropy == rhs.maxAnisotropy ) + && ( compareEnable == rhs.compareEnable ) + && ( compareOp == rhs.compareOp ) + && ( minLod == rhs.minLod ) + && ( maxLod == rhs.maxLod ) + && ( borderColor == rhs.borderColor ) + && ( unnormalizedCoordinates == rhs.unnormalizedCoordinates ); + } + + bool operator!=( SamplerCreateInfo const& rhs ) const + { + return !operator==( rhs ); + } + + private: + StructureType sType = StructureType::eSamplerCreateInfo; + + public: + const void* pNext = nullptr; + SamplerCreateFlags flags; + Filter magFilter; + Filter minFilter; + SamplerMipmapMode mipmapMode; + SamplerAddressMode addressModeU; + SamplerAddressMode addressModeV; + SamplerAddressMode addressModeW; + float mipLodBias; + Bool32 anisotropyEnable; + float maxAnisotropy; + Bool32 compareEnable; + CompareOp compareOp; + float minLod; + float maxLod; + BorderColor borderColor; + Bool32 unnormalizedCoordinates; + }; + static_assert( sizeof( SamplerCreateInfo ) == sizeof( VkSamplerCreateInfo ), "struct and wrapper have different size!" ); + + struct CommandBufferAllocateInfo + { + CommandBufferAllocateInfo( CommandPool commandPool_ = CommandPool(), + CommandBufferLevel level_ = CommandBufferLevel::ePrimary, + uint32_t commandBufferCount_ = 0 ) + : commandPool( commandPool_ ) + , level( level_ ) + , commandBufferCount( commandBufferCount_ ) + { + } + + CommandBufferAllocateInfo( VkCommandBufferAllocateInfo const & rhs ) + { + memcpy( this, &rhs, sizeof( CommandBufferAllocateInfo ) ); + } + + CommandBufferAllocateInfo& operator=( VkCommandBufferAllocateInfo const & rhs ) + { + memcpy( this, &rhs, sizeof( CommandBufferAllocateInfo ) ); + return *this; + } + CommandBufferAllocateInfo& setPNext( const void* pNext_ ) + { + pNext = pNext_; + return *this; + } + + CommandBufferAllocateInfo& setCommandPool( CommandPool commandPool_ ) + { + commandPool = commandPool_; + return *this; + } + + CommandBufferAllocateInfo& setLevel( CommandBufferLevel level_ ) + { + level = level_; + return *this; + } + + CommandBufferAllocateInfo& setCommandBufferCount( uint32_t commandBufferCount_ ) + { + commandBufferCount = commandBufferCount_; + return *this; + } + + operator VkCommandBufferAllocateInfo const&() const + { + return *reinterpret_cast(this); + } + + operator VkCommandBufferAllocateInfo &() + { + return *reinterpret_cast(this); + } + + bool operator==( CommandBufferAllocateInfo const& rhs ) const + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( commandPool == rhs.commandPool ) + && ( level == rhs.level ) + && ( commandBufferCount == rhs.commandBufferCount ); + } + + bool operator!=( CommandBufferAllocateInfo const& rhs ) const + { + return !operator==( rhs ); + } + + private: + StructureType sType = StructureType::eCommandBufferAllocateInfo; + + public: + const void* pNext = nullptr; + CommandPool commandPool; + CommandBufferLevel level; + uint32_t commandBufferCount; + }; + static_assert( sizeof( CommandBufferAllocateInfo ) == sizeof( VkCommandBufferAllocateInfo ), "struct and wrapper have different size!" ); + + struct RenderPassBeginInfo + { + RenderPassBeginInfo( RenderPass renderPass_ = RenderPass(), + Framebuffer framebuffer_ = Framebuffer(), + Rect2D renderArea_ = Rect2D(), + uint32_t clearValueCount_ = 0, + const ClearValue* pClearValues_ = nullptr ) + : renderPass( renderPass_ ) + , framebuffer( framebuffer_ ) + , renderArea( renderArea_ ) + , clearValueCount( clearValueCount_ ) + , pClearValues( pClearValues_ ) + { + } + + RenderPassBeginInfo( VkRenderPassBeginInfo const & rhs ) + { + memcpy( this, &rhs, sizeof( RenderPassBeginInfo ) ); + } + + RenderPassBeginInfo& operator=( VkRenderPassBeginInfo const & rhs ) + { + memcpy( this, &rhs, sizeof( RenderPassBeginInfo ) ); + return *this; + } + RenderPassBeginInfo& setPNext( const void* pNext_ ) + { + pNext = pNext_; + return *this; + } + + RenderPassBeginInfo& setRenderPass( RenderPass renderPass_ ) + { + renderPass = renderPass_; + return *this; + } + + RenderPassBeginInfo& setFramebuffer( Framebuffer framebuffer_ ) + { + framebuffer = framebuffer_; + return *this; + } + + RenderPassBeginInfo& setRenderArea( Rect2D renderArea_ ) + { + renderArea = renderArea_; + return *this; + } + + RenderPassBeginInfo& setClearValueCount( uint32_t clearValueCount_ ) + { + clearValueCount = clearValueCount_; + return *this; + } + + RenderPassBeginInfo& setPClearValues( const ClearValue* pClearValues_ ) + { + pClearValues = pClearValues_; + return *this; + } + + operator VkRenderPassBeginInfo const&() const + { + return *reinterpret_cast(this); + } + + operator VkRenderPassBeginInfo &() + { + return *reinterpret_cast(this); + } + + bool operator==( RenderPassBeginInfo const& rhs ) const + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( renderPass == rhs.renderPass ) + && ( framebuffer == rhs.framebuffer ) + && ( renderArea == rhs.renderArea ) + && ( clearValueCount == rhs.clearValueCount ) + && ( pClearValues == rhs.pClearValues ); + } + + bool operator!=( RenderPassBeginInfo const& rhs ) const + { + return !operator==( rhs ); + } + + private: + StructureType sType = StructureType::eRenderPassBeginInfo; + + public: + const void* pNext = nullptr; + RenderPass renderPass; + Framebuffer framebuffer; + Rect2D renderArea; + uint32_t clearValueCount; + const ClearValue* pClearValues; + }; + static_assert( sizeof( RenderPassBeginInfo ) == sizeof( VkRenderPassBeginInfo ), "struct and wrapper have different size!" ); + + struct EventCreateInfo + { + EventCreateInfo( EventCreateFlags flags_ = EventCreateFlags() ) + : flags( flags_ ) + { + } + + EventCreateInfo( VkEventCreateInfo const & rhs ) + { + memcpy( this, &rhs, sizeof( EventCreateInfo ) ); + } + + EventCreateInfo& operator=( VkEventCreateInfo const & rhs ) + { + memcpy( this, &rhs, sizeof( EventCreateInfo ) ); + return *this; + } + EventCreateInfo& setPNext( const void* pNext_ ) + { + pNext = pNext_; + return *this; + } + + EventCreateInfo& setFlags( EventCreateFlags flags_ ) + { + flags = flags_; + return *this; + } + + operator VkEventCreateInfo const&() const + { + return *reinterpret_cast(this); + } + + operator VkEventCreateInfo &() + { + return *reinterpret_cast(this); + } + + bool operator==( EventCreateInfo const& rhs ) const + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( flags == rhs.flags ); + } + + bool operator!=( EventCreateInfo const& rhs ) const + { + return !operator==( rhs ); + } + + private: + StructureType sType = StructureType::eEventCreateInfo; + + public: + const void* pNext = nullptr; + EventCreateFlags flags; + }; + static_assert( sizeof( EventCreateInfo ) == sizeof( VkEventCreateInfo ), "struct and wrapper have different size!" ); + + struct SemaphoreCreateInfo + { + SemaphoreCreateInfo( SemaphoreCreateFlags flags_ = SemaphoreCreateFlags() ) + : flags( flags_ ) + { + } + + SemaphoreCreateInfo( VkSemaphoreCreateInfo const & rhs ) + { + memcpy( this, &rhs, sizeof( SemaphoreCreateInfo ) ); + } + + SemaphoreCreateInfo& operator=( VkSemaphoreCreateInfo const & rhs ) + { + memcpy( this, &rhs, sizeof( SemaphoreCreateInfo ) ); + return *this; + } + SemaphoreCreateInfo& setPNext( const void* pNext_ ) + { + pNext = pNext_; + return *this; + } + + SemaphoreCreateInfo& setFlags( SemaphoreCreateFlags flags_ ) + { + flags = flags_; + return *this; + } + + operator VkSemaphoreCreateInfo const&() const + { + return *reinterpret_cast(this); + } + + operator VkSemaphoreCreateInfo &() + { + return *reinterpret_cast(this); + } + + bool operator==( SemaphoreCreateInfo const& rhs ) const + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( flags == rhs.flags ); + } + + bool operator!=( SemaphoreCreateInfo const& rhs ) const + { + return !operator==( rhs ); + } + + private: + StructureType sType = StructureType::eSemaphoreCreateInfo; + + public: + const void* pNext = nullptr; + SemaphoreCreateFlags flags; + }; + static_assert( sizeof( SemaphoreCreateInfo ) == sizeof( VkSemaphoreCreateInfo ), "struct and wrapper have different size!" ); + + struct FramebufferCreateInfo + { + FramebufferCreateInfo( FramebufferCreateFlags flags_ = FramebufferCreateFlags(), + RenderPass renderPass_ = RenderPass(), + uint32_t attachmentCount_ = 0, + const ImageView* pAttachments_ = nullptr, + uint32_t width_ = 0, + uint32_t height_ = 0, + uint32_t layers_ = 0 ) + : flags( flags_ ) + , renderPass( renderPass_ ) + , attachmentCount( attachmentCount_ ) + , pAttachments( pAttachments_ ) + , width( width_ ) + , height( height_ ) + , layers( layers_ ) + { + } + + FramebufferCreateInfo( VkFramebufferCreateInfo const & rhs ) + { + memcpy( this, &rhs, sizeof( FramebufferCreateInfo ) ); + } + + FramebufferCreateInfo& operator=( VkFramebufferCreateInfo const & rhs ) + { + memcpy( this, &rhs, sizeof( FramebufferCreateInfo ) ); + return *this; + } + FramebufferCreateInfo& setPNext( const void* pNext_ ) + { + pNext = pNext_; + return *this; + } + + FramebufferCreateInfo& setFlags( FramebufferCreateFlags flags_ ) + { + flags = flags_; + return *this; + } + + FramebufferCreateInfo& setRenderPass( RenderPass renderPass_ ) + { + renderPass = renderPass_; + return *this; + } + + FramebufferCreateInfo& setAttachmentCount( uint32_t attachmentCount_ ) + { + attachmentCount = attachmentCount_; + return *this; + } + + FramebufferCreateInfo& setPAttachments( const ImageView* pAttachments_ ) + { + pAttachments = pAttachments_; + return *this; + } + + FramebufferCreateInfo& setWidth( uint32_t width_ ) + { + width = width_; + return *this; + } + + FramebufferCreateInfo& setHeight( uint32_t height_ ) + { + height = height_; + return *this; + } + + FramebufferCreateInfo& setLayers( uint32_t layers_ ) + { + layers = layers_; + return *this; + } + + operator VkFramebufferCreateInfo const&() const + { + return *reinterpret_cast(this); + } + + operator VkFramebufferCreateInfo &() + { + return *reinterpret_cast(this); + } + + bool operator==( FramebufferCreateInfo const& rhs ) const + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( flags == rhs.flags ) + && ( renderPass == rhs.renderPass ) + && ( attachmentCount == rhs.attachmentCount ) + && ( pAttachments == rhs.pAttachments ) + && ( width == rhs.width ) + && ( height == rhs.height ) + && ( layers == rhs.layers ); + } + + bool operator!=( FramebufferCreateInfo const& rhs ) const + { + return !operator==( rhs ); + } + + private: + StructureType sType = StructureType::eFramebufferCreateInfo; + + public: + const void* pNext = nullptr; + FramebufferCreateFlags flags; + RenderPass renderPass; + uint32_t attachmentCount; + const ImageView* pAttachments; + uint32_t width; + uint32_t height; + uint32_t layers; + }; + static_assert( sizeof( FramebufferCreateInfo ) == sizeof( VkFramebufferCreateInfo ), "struct and wrapper have different size!" ); + + struct DisplayModeCreateInfoKHR + { + DisplayModeCreateInfoKHR( DisplayModeCreateFlagsKHR flags_ = DisplayModeCreateFlagsKHR(), + DisplayModeParametersKHR parameters_ = DisplayModeParametersKHR() ) + : flags( flags_ ) + , parameters( parameters_ ) + { + } + + DisplayModeCreateInfoKHR( VkDisplayModeCreateInfoKHR const & rhs ) + { + memcpy( this, &rhs, sizeof( DisplayModeCreateInfoKHR ) ); + } + + DisplayModeCreateInfoKHR& operator=( VkDisplayModeCreateInfoKHR const & rhs ) + { + memcpy( this, &rhs, sizeof( DisplayModeCreateInfoKHR ) ); + return *this; + } + DisplayModeCreateInfoKHR& setPNext( const void* pNext_ ) + { + pNext = pNext_; + return *this; + } + + DisplayModeCreateInfoKHR& setFlags( DisplayModeCreateFlagsKHR flags_ ) + { + flags = flags_; + return *this; + } + + DisplayModeCreateInfoKHR& setParameters( DisplayModeParametersKHR parameters_ ) + { + parameters = parameters_; + return *this; + } + + operator VkDisplayModeCreateInfoKHR const&() const + { + return *reinterpret_cast(this); + } + + operator VkDisplayModeCreateInfoKHR &() + { + return *reinterpret_cast(this); + } + + bool operator==( DisplayModeCreateInfoKHR const& rhs ) const + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( flags == rhs.flags ) + && ( parameters == rhs.parameters ); + } + + bool operator!=( DisplayModeCreateInfoKHR const& rhs ) const + { + return !operator==( rhs ); + } + + private: + StructureType sType = StructureType::eDisplayModeCreateInfoKHR; + + public: + const void* pNext = nullptr; + DisplayModeCreateFlagsKHR flags; + DisplayModeParametersKHR parameters; + }; + static_assert( sizeof( DisplayModeCreateInfoKHR ) == sizeof( VkDisplayModeCreateInfoKHR ), "struct and wrapper have different size!" ); + + struct DisplayPresentInfoKHR + { + DisplayPresentInfoKHR( Rect2D srcRect_ = Rect2D(), + Rect2D dstRect_ = Rect2D(), + Bool32 persistent_ = 0 ) + : srcRect( srcRect_ ) + , dstRect( dstRect_ ) + , persistent( persistent_ ) + { + } + + DisplayPresentInfoKHR( VkDisplayPresentInfoKHR const & rhs ) + { + memcpy( this, &rhs, sizeof( DisplayPresentInfoKHR ) ); + } + + DisplayPresentInfoKHR& operator=( VkDisplayPresentInfoKHR const & rhs ) + { + memcpy( this, &rhs, sizeof( DisplayPresentInfoKHR ) ); + return *this; + } + DisplayPresentInfoKHR& setPNext( const void* pNext_ ) + { + pNext = pNext_; + return *this; + } + + DisplayPresentInfoKHR& setSrcRect( Rect2D srcRect_ ) + { + srcRect = srcRect_; + return *this; + } + + DisplayPresentInfoKHR& setDstRect( Rect2D dstRect_ ) + { + dstRect = dstRect_; + return *this; + } + + DisplayPresentInfoKHR& setPersistent( Bool32 persistent_ ) + { + persistent = persistent_; + return *this; + } + + operator VkDisplayPresentInfoKHR const&() const + { + return *reinterpret_cast(this); + } + + operator VkDisplayPresentInfoKHR &() + { + return *reinterpret_cast(this); + } + + bool operator==( DisplayPresentInfoKHR const& rhs ) const + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( srcRect == rhs.srcRect ) + && ( dstRect == rhs.dstRect ) + && ( persistent == rhs.persistent ); + } + + bool operator!=( DisplayPresentInfoKHR const& rhs ) const + { + return !operator==( rhs ); + } + + private: + StructureType sType = StructureType::eDisplayPresentInfoKHR; + + public: + const void* pNext = nullptr; + Rect2D srcRect; + Rect2D dstRect; + Bool32 persistent; + }; + static_assert( sizeof( DisplayPresentInfoKHR ) == sizeof( VkDisplayPresentInfoKHR ), "struct and wrapper have different size!" ); + +#ifdef VK_USE_PLATFORM_ANDROID_KHR + struct AndroidSurfaceCreateInfoKHR + { + AndroidSurfaceCreateInfoKHR( AndroidSurfaceCreateFlagsKHR flags_ = AndroidSurfaceCreateFlagsKHR(), + struct ANativeWindow* window_ = nullptr ) + : flags( flags_ ) + , window( window_ ) + { + } + + AndroidSurfaceCreateInfoKHR( VkAndroidSurfaceCreateInfoKHR const & rhs ) + { + memcpy( this, &rhs, sizeof( AndroidSurfaceCreateInfoKHR ) ); + } + + AndroidSurfaceCreateInfoKHR& operator=( VkAndroidSurfaceCreateInfoKHR const & rhs ) + { + memcpy( this, &rhs, sizeof( AndroidSurfaceCreateInfoKHR ) ); + return *this; + } + AndroidSurfaceCreateInfoKHR& setPNext( const void* pNext_ ) + { + pNext = pNext_; + return *this; + } + + AndroidSurfaceCreateInfoKHR& setFlags( AndroidSurfaceCreateFlagsKHR flags_ ) + { + flags = flags_; + return *this; + } + + AndroidSurfaceCreateInfoKHR& setWindow( struct ANativeWindow* window_ ) + { + window = window_; + return *this; + } + + operator VkAndroidSurfaceCreateInfoKHR const&() const + { + return *reinterpret_cast(this); + } + + operator VkAndroidSurfaceCreateInfoKHR &() + { + return *reinterpret_cast(this); + } + + bool operator==( AndroidSurfaceCreateInfoKHR const& rhs ) const + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( flags == rhs.flags ) + && ( window == rhs.window ); + } + + bool operator!=( AndroidSurfaceCreateInfoKHR const& rhs ) const + { + return !operator==( rhs ); + } + + private: + StructureType sType = StructureType::eAndroidSurfaceCreateInfoKHR; + + public: + const void* pNext = nullptr; + AndroidSurfaceCreateFlagsKHR flags; + struct ANativeWindow* window; + }; + static_assert( sizeof( AndroidSurfaceCreateInfoKHR ) == sizeof( VkAndroidSurfaceCreateInfoKHR ), "struct and wrapper have different size!" ); +#endif /*VK_USE_PLATFORM_ANDROID_KHR*/ + +#ifdef VK_USE_PLATFORM_VI_NN + struct ViSurfaceCreateInfoNN + { + ViSurfaceCreateInfoNN( ViSurfaceCreateFlagsNN flags_ = ViSurfaceCreateFlagsNN(), + void* window_ = nullptr ) + : flags( flags_ ) + , window( window_ ) + { + } + + ViSurfaceCreateInfoNN( VkViSurfaceCreateInfoNN const & rhs ) + { + memcpy( this, &rhs, sizeof( ViSurfaceCreateInfoNN ) ); + } + + ViSurfaceCreateInfoNN& operator=( VkViSurfaceCreateInfoNN const & rhs ) + { + memcpy( this, &rhs, sizeof( ViSurfaceCreateInfoNN ) ); + return *this; + } + ViSurfaceCreateInfoNN& setPNext( const void* pNext_ ) + { + pNext = pNext_; + return *this; + } + + ViSurfaceCreateInfoNN& setFlags( ViSurfaceCreateFlagsNN flags_ ) + { + flags = flags_; + return *this; + } + + ViSurfaceCreateInfoNN& setWindow( void* window_ ) + { + window = window_; + return *this; + } + + operator VkViSurfaceCreateInfoNN const&() const + { + return *reinterpret_cast(this); + } + + operator VkViSurfaceCreateInfoNN &() + { + return *reinterpret_cast(this); + } + + bool operator==( ViSurfaceCreateInfoNN const& rhs ) const + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( flags == rhs.flags ) + && ( window == rhs.window ); + } + + bool operator!=( ViSurfaceCreateInfoNN const& rhs ) const + { + return !operator==( rhs ); + } + + private: + StructureType sType = StructureType::eViSurfaceCreateInfoNN; + + public: + const void* pNext = nullptr; + ViSurfaceCreateFlagsNN flags; + void* window; + }; + static_assert( sizeof( ViSurfaceCreateInfoNN ) == sizeof( VkViSurfaceCreateInfoNN ), "struct and wrapper have different size!" ); +#endif /*VK_USE_PLATFORM_VI_NN*/ + +#ifdef VK_USE_PLATFORM_WAYLAND_KHR + struct WaylandSurfaceCreateInfoKHR + { + WaylandSurfaceCreateInfoKHR( WaylandSurfaceCreateFlagsKHR flags_ = WaylandSurfaceCreateFlagsKHR(), + struct wl_display* display_ = nullptr, + struct wl_surface* surface_ = nullptr ) + : flags( flags_ ) + , display( display_ ) + , surface( surface_ ) + { + } + + WaylandSurfaceCreateInfoKHR( VkWaylandSurfaceCreateInfoKHR const & rhs ) + { + memcpy( this, &rhs, sizeof( WaylandSurfaceCreateInfoKHR ) ); + } + + WaylandSurfaceCreateInfoKHR& operator=( VkWaylandSurfaceCreateInfoKHR const & rhs ) + { + memcpy( this, &rhs, sizeof( WaylandSurfaceCreateInfoKHR ) ); + return *this; + } + WaylandSurfaceCreateInfoKHR& setPNext( const void* pNext_ ) + { + pNext = pNext_; + return *this; + } + + WaylandSurfaceCreateInfoKHR& setFlags( WaylandSurfaceCreateFlagsKHR flags_ ) + { + flags = flags_; + return *this; + } + + WaylandSurfaceCreateInfoKHR& setDisplay( struct wl_display* display_ ) + { + display = display_; + return *this; + } + + WaylandSurfaceCreateInfoKHR& setSurface( struct wl_surface* surface_ ) + { + surface = surface_; + return *this; + } + + operator VkWaylandSurfaceCreateInfoKHR const&() const + { + return *reinterpret_cast(this); + } + + operator VkWaylandSurfaceCreateInfoKHR &() + { + return *reinterpret_cast(this); + } + + bool operator==( WaylandSurfaceCreateInfoKHR const& rhs ) const + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( flags == rhs.flags ) + && ( display == rhs.display ) + && ( surface == rhs.surface ); + } + + bool operator!=( WaylandSurfaceCreateInfoKHR const& rhs ) const + { + return !operator==( rhs ); + } + + private: + StructureType sType = StructureType::eWaylandSurfaceCreateInfoKHR; + + public: + const void* pNext = nullptr; + WaylandSurfaceCreateFlagsKHR flags; + struct wl_display* display; + struct wl_surface* surface; + }; + static_assert( sizeof( WaylandSurfaceCreateInfoKHR ) == sizeof( VkWaylandSurfaceCreateInfoKHR ), "struct and wrapper have different size!" ); +#endif /*VK_USE_PLATFORM_WAYLAND_KHR*/ + +#ifdef VK_USE_PLATFORM_WIN32_KHR + struct Win32SurfaceCreateInfoKHR + { + Win32SurfaceCreateInfoKHR( Win32SurfaceCreateFlagsKHR flags_ = Win32SurfaceCreateFlagsKHR(), + HINSTANCE hinstance_ = 0, + HWND hwnd_ = 0 ) + : flags( flags_ ) + , hinstance( hinstance_ ) + , hwnd( hwnd_ ) + { + } + + Win32SurfaceCreateInfoKHR( VkWin32SurfaceCreateInfoKHR const & rhs ) + { + memcpy( this, &rhs, sizeof( Win32SurfaceCreateInfoKHR ) ); + } + + Win32SurfaceCreateInfoKHR& operator=( VkWin32SurfaceCreateInfoKHR const & rhs ) + { + memcpy( this, &rhs, sizeof( Win32SurfaceCreateInfoKHR ) ); + return *this; + } + Win32SurfaceCreateInfoKHR& setPNext( const void* pNext_ ) + { + pNext = pNext_; + return *this; + } + + Win32SurfaceCreateInfoKHR& setFlags( Win32SurfaceCreateFlagsKHR flags_ ) + { + flags = flags_; + return *this; + } + + Win32SurfaceCreateInfoKHR& setHinstance( HINSTANCE hinstance_ ) + { + hinstance = hinstance_; + return *this; + } + + Win32SurfaceCreateInfoKHR& setHwnd( HWND hwnd_ ) + { + hwnd = hwnd_; + return *this; + } + + operator VkWin32SurfaceCreateInfoKHR const&() const + { + return *reinterpret_cast(this); + } + + operator VkWin32SurfaceCreateInfoKHR &() + { + return *reinterpret_cast(this); + } + + bool operator==( Win32SurfaceCreateInfoKHR const& rhs ) const + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( flags == rhs.flags ) + && ( hinstance == rhs.hinstance ) + && ( hwnd == rhs.hwnd ); + } + + bool operator!=( Win32SurfaceCreateInfoKHR const& rhs ) const + { + return !operator==( rhs ); + } + + private: + StructureType sType = StructureType::eWin32SurfaceCreateInfoKHR; + + public: + const void* pNext = nullptr; + Win32SurfaceCreateFlagsKHR flags; + HINSTANCE hinstance; + HWND hwnd; + }; + static_assert( sizeof( Win32SurfaceCreateInfoKHR ) == sizeof( VkWin32SurfaceCreateInfoKHR ), "struct and wrapper have different size!" ); +#endif /*VK_USE_PLATFORM_WIN32_KHR*/ + +#ifdef VK_USE_PLATFORM_XLIB_KHR + struct XlibSurfaceCreateInfoKHR + { + XlibSurfaceCreateInfoKHR( XlibSurfaceCreateFlagsKHR flags_ = XlibSurfaceCreateFlagsKHR(), + Display* dpy_ = nullptr, + Window window_ = 0 ) + : flags( flags_ ) + , dpy( dpy_ ) + , window( window_ ) + { + } + + XlibSurfaceCreateInfoKHR( VkXlibSurfaceCreateInfoKHR const & rhs ) + { + memcpy( this, &rhs, sizeof( XlibSurfaceCreateInfoKHR ) ); + } + + XlibSurfaceCreateInfoKHR& operator=( VkXlibSurfaceCreateInfoKHR const & rhs ) + { + memcpy( this, &rhs, sizeof( XlibSurfaceCreateInfoKHR ) ); + return *this; + } + XlibSurfaceCreateInfoKHR& setPNext( const void* pNext_ ) + { + pNext = pNext_; + return *this; + } + + XlibSurfaceCreateInfoKHR& setFlags( XlibSurfaceCreateFlagsKHR flags_ ) + { + flags = flags_; + return *this; + } + + XlibSurfaceCreateInfoKHR& setDpy( Display* dpy_ ) + { + dpy = dpy_; + return *this; + } + + XlibSurfaceCreateInfoKHR& setWindow( Window window_ ) + { + window = window_; + return *this; + } + + operator VkXlibSurfaceCreateInfoKHR const&() const + { + return *reinterpret_cast(this); + } + + operator VkXlibSurfaceCreateInfoKHR &() + { + return *reinterpret_cast(this); + } + + bool operator==( XlibSurfaceCreateInfoKHR const& rhs ) const + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( flags == rhs.flags ) + && ( dpy == rhs.dpy ) + && ( window == rhs.window ); + } + + bool operator!=( XlibSurfaceCreateInfoKHR const& rhs ) const + { + return !operator==( rhs ); + } + + private: + StructureType sType = StructureType::eXlibSurfaceCreateInfoKHR; + + public: + const void* pNext = nullptr; + XlibSurfaceCreateFlagsKHR flags; + Display* dpy; + Window window; + }; + static_assert( sizeof( XlibSurfaceCreateInfoKHR ) == sizeof( VkXlibSurfaceCreateInfoKHR ), "struct and wrapper have different size!" ); +#endif /*VK_USE_PLATFORM_XLIB_KHR*/ + +#ifdef VK_USE_PLATFORM_XCB_KHR + struct XcbSurfaceCreateInfoKHR + { + XcbSurfaceCreateInfoKHR( XcbSurfaceCreateFlagsKHR flags_ = XcbSurfaceCreateFlagsKHR(), + xcb_connection_t* connection_ = nullptr, + xcb_window_t window_ = 0 ) + : flags( flags_ ) + , connection( connection_ ) + , window( window_ ) + { + } + + XcbSurfaceCreateInfoKHR( VkXcbSurfaceCreateInfoKHR const & rhs ) + { + memcpy( this, &rhs, sizeof( XcbSurfaceCreateInfoKHR ) ); + } + + XcbSurfaceCreateInfoKHR& operator=( VkXcbSurfaceCreateInfoKHR const & rhs ) + { + memcpy( this, &rhs, sizeof( XcbSurfaceCreateInfoKHR ) ); + return *this; + } + XcbSurfaceCreateInfoKHR& setPNext( const void* pNext_ ) + { + pNext = pNext_; + return *this; + } + + XcbSurfaceCreateInfoKHR& setFlags( XcbSurfaceCreateFlagsKHR flags_ ) + { + flags = flags_; + return *this; + } + + XcbSurfaceCreateInfoKHR& setConnection( xcb_connection_t* connection_ ) + { + connection = connection_; + return *this; + } + + XcbSurfaceCreateInfoKHR& setWindow( xcb_window_t window_ ) + { + window = window_; + return *this; + } + + operator VkXcbSurfaceCreateInfoKHR const&() const + { + return *reinterpret_cast(this); + } + + operator VkXcbSurfaceCreateInfoKHR &() + { + return *reinterpret_cast(this); + } + + bool operator==( XcbSurfaceCreateInfoKHR const& rhs ) const + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( flags == rhs.flags ) + && ( connection == rhs.connection ) + && ( window == rhs.window ); + } + + bool operator!=( XcbSurfaceCreateInfoKHR const& rhs ) const + { + return !operator==( rhs ); + } + + private: + StructureType sType = StructureType::eXcbSurfaceCreateInfoKHR; + + public: + const void* pNext = nullptr; + XcbSurfaceCreateFlagsKHR flags; + xcb_connection_t* connection; + xcb_window_t window; + }; + static_assert( sizeof( XcbSurfaceCreateInfoKHR ) == sizeof( VkXcbSurfaceCreateInfoKHR ), "struct and wrapper have different size!" ); +#endif /*VK_USE_PLATFORM_XCB_KHR*/ + +#ifdef VK_USE_PLATFORM_FUCHSIA_FUCHSIA + struct ImagePipeSurfaceCreateInfoFUCHSIA + { + ImagePipeSurfaceCreateInfoFUCHSIA( ImagePipeSurfaceCreateFlagsFUCHSIA flags_ = ImagePipeSurfaceCreateFlagsFUCHSIA(), + zx_handle_t imagePipeHandle_ = 0 ) + : flags( flags_ ) + , imagePipeHandle( imagePipeHandle_ ) + { + } + + ImagePipeSurfaceCreateInfoFUCHSIA( VkImagePipeSurfaceCreateInfoFUCHSIA const & rhs ) + { + memcpy( this, &rhs, sizeof( ImagePipeSurfaceCreateInfoFUCHSIA ) ); + } + + ImagePipeSurfaceCreateInfoFUCHSIA& operator=( VkImagePipeSurfaceCreateInfoFUCHSIA const & rhs ) + { + memcpy( this, &rhs, sizeof( ImagePipeSurfaceCreateInfoFUCHSIA ) ); + return *this; + } + ImagePipeSurfaceCreateInfoFUCHSIA& setPNext( const void* pNext_ ) + { + pNext = pNext_; + return *this; + } + + ImagePipeSurfaceCreateInfoFUCHSIA& setFlags( ImagePipeSurfaceCreateFlagsFUCHSIA flags_ ) + { + flags = flags_; + return *this; + } + + ImagePipeSurfaceCreateInfoFUCHSIA& setImagePipeHandle( zx_handle_t imagePipeHandle_ ) + { + imagePipeHandle = imagePipeHandle_; + return *this; + } + + operator VkImagePipeSurfaceCreateInfoFUCHSIA const&() const + { + return *reinterpret_cast(this); + } + + operator VkImagePipeSurfaceCreateInfoFUCHSIA &() + { + return *reinterpret_cast(this); + } + + bool operator==( ImagePipeSurfaceCreateInfoFUCHSIA const& rhs ) const + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( flags == rhs.flags ) + && ( imagePipeHandle == rhs.imagePipeHandle ); + } + + bool operator!=( ImagePipeSurfaceCreateInfoFUCHSIA const& rhs ) const + { + return !operator==( rhs ); + } + + private: + StructureType sType = StructureType::eImagepipeSurfaceCreateInfoFUCHSIA; + + public: + const void* pNext = nullptr; + ImagePipeSurfaceCreateFlagsFUCHSIA flags; + zx_handle_t imagePipeHandle; + }; + static_assert( sizeof( ImagePipeSurfaceCreateInfoFUCHSIA ) == sizeof( VkImagePipeSurfaceCreateInfoFUCHSIA ), "struct and wrapper have different size!" ); +#endif /*VK_USE_PLATFORM_FUCHSIA_FUCHSIA*/ + + struct DebugMarkerMarkerInfoEXT + { + DebugMarkerMarkerInfoEXT( const char* pMarkerName_ = nullptr, + std::array const& color_ = { { 0, 0, 0, 0 } } ) + : pMarkerName( pMarkerName_ ) + { + memcpy( &color, color_.data(), 4 * sizeof( float ) ); + } + + DebugMarkerMarkerInfoEXT( VkDebugMarkerMarkerInfoEXT const & rhs ) + { + memcpy( this, &rhs, sizeof( DebugMarkerMarkerInfoEXT ) ); + } + + DebugMarkerMarkerInfoEXT& operator=( VkDebugMarkerMarkerInfoEXT const & rhs ) + { + memcpy( this, &rhs, sizeof( DebugMarkerMarkerInfoEXT ) ); + return *this; + } + DebugMarkerMarkerInfoEXT& setPNext( const void* pNext_ ) + { + pNext = pNext_; + return *this; + } + + DebugMarkerMarkerInfoEXT& setPMarkerName( const char* pMarkerName_ ) + { + pMarkerName = pMarkerName_; + return *this; + } + + DebugMarkerMarkerInfoEXT& setColor( std::array color_ ) + { + memcpy( &color, color_.data(), 4 * sizeof( float ) ); + return *this; + } + + operator VkDebugMarkerMarkerInfoEXT const&() const + { + return *reinterpret_cast(this); + } + + operator VkDebugMarkerMarkerInfoEXT &() + { + return *reinterpret_cast(this); + } + + bool operator==( DebugMarkerMarkerInfoEXT const& rhs ) const + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( pMarkerName == rhs.pMarkerName ) + && ( memcmp( color, rhs.color, 4 * sizeof( float ) ) == 0 ); + } + + bool operator!=( DebugMarkerMarkerInfoEXT const& rhs ) const + { + return !operator==( rhs ); + } + + private: + StructureType sType = StructureType::eDebugMarkerMarkerInfoEXT; + + public: + const void* pNext = nullptr; + const char* pMarkerName; + float color[4]; + }; + static_assert( sizeof( DebugMarkerMarkerInfoEXT ) == sizeof( VkDebugMarkerMarkerInfoEXT ), "struct and wrapper have different size!" ); + + struct DedicatedAllocationImageCreateInfoNV + { + DedicatedAllocationImageCreateInfoNV( Bool32 dedicatedAllocation_ = 0 ) + : dedicatedAllocation( dedicatedAllocation_ ) + { + } + + DedicatedAllocationImageCreateInfoNV( VkDedicatedAllocationImageCreateInfoNV const & rhs ) + { + memcpy( this, &rhs, sizeof( DedicatedAllocationImageCreateInfoNV ) ); + } + + DedicatedAllocationImageCreateInfoNV& operator=( VkDedicatedAllocationImageCreateInfoNV const & rhs ) + { + memcpy( this, &rhs, sizeof( DedicatedAllocationImageCreateInfoNV ) ); + return *this; + } + DedicatedAllocationImageCreateInfoNV& setPNext( const void* pNext_ ) + { + pNext = pNext_; + return *this; + } + + DedicatedAllocationImageCreateInfoNV& setDedicatedAllocation( Bool32 dedicatedAllocation_ ) + { + dedicatedAllocation = dedicatedAllocation_; + return *this; + } + + operator VkDedicatedAllocationImageCreateInfoNV const&() const + { + return *reinterpret_cast(this); + } + + operator VkDedicatedAllocationImageCreateInfoNV &() + { + return *reinterpret_cast(this); + } + + bool operator==( DedicatedAllocationImageCreateInfoNV const& rhs ) const + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( dedicatedAllocation == rhs.dedicatedAllocation ); + } + + bool operator!=( DedicatedAllocationImageCreateInfoNV const& rhs ) const + { + return !operator==( rhs ); + } + + private: + StructureType sType = StructureType::eDedicatedAllocationImageCreateInfoNV; + + public: + const void* pNext = nullptr; + Bool32 dedicatedAllocation; + }; + static_assert( sizeof( DedicatedAllocationImageCreateInfoNV ) == sizeof( VkDedicatedAllocationImageCreateInfoNV ), "struct and wrapper have different size!" ); + + struct DedicatedAllocationBufferCreateInfoNV + { + DedicatedAllocationBufferCreateInfoNV( Bool32 dedicatedAllocation_ = 0 ) + : dedicatedAllocation( dedicatedAllocation_ ) + { + } + + DedicatedAllocationBufferCreateInfoNV( VkDedicatedAllocationBufferCreateInfoNV const & rhs ) + { + memcpy( this, &rhs, sizeof( DedicatedAllocationBufferCreateInfoNV ) ); + } + + DedicatedAllocationBufferCreateInfoNV& operator=( VkDedicatedAllocationBufferCreateInfoNV const & rhs ) + { + memcpy( this, &rhs, sizeof( DedicatedAllocationBufferCreateInfoNV ) ); + return *this; + } + DedicatedAllocationBufferCreateInfoNV& setPNext( const void* pNext_ ) + { + pNext = pNext_; + return *this; + } + + DedicatedAllocationBufferCreateInfoNV& setDedicatedAllocation( Bool32 dedicatedAllocation_ ) + { + dedicatedAllocation = dedicatedAllocation_; + return *this; + } + + operator VkDedicatedAllocationBufferCreateInfoNV const&() const + { + return *reinterpret_cast(this); + } + + operator VkDedicatedAllocationBufferCreateInfoNV &() + { + return *reinterpret_cast(this); + } + + bool operator==( DedicatedAllocationBufferCreateInfoNV const& rhs ) const + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( dedicatedAllocation == rhs.dedicatedAllocation ); + } + + bool operator!=( DedicatedAllocationBufferCreateInfoNV const& rhs ) const + { + return !operator==( rhs ); + } + + private: + StructureType sType = StructureType::eDedicatedAllocationBufferCreateInfoNV; + + public: + const void* pNext = nullptr; + Bool32 dedicatedAllocation; + }; + static_assert( sizeof( DedicatedAllocationBufferCreateInfoNV ) == sizeof( VkDedicatedAllocationBufferCreateInfoNV ), "struct and wrapper have different size!" ); + + struct DedicatedAllocationMemoryAllocateInfoNV + { + DedicatedAllocationMemoryAllocateInfoNV( Image image_ = Image(), + Buffer buffer_ = Buffer() ) + : image( image_ ) + , buffer( buffer_ ) + { + } + + DedicatedAllocationMemoryAllocateInfoNV( VkDedicatedAllocationMemoryAllocateInfoNV const & rhs ) + { + memcpy( this, &rhs, sizeof( DedicatedAllocationMemoryAllocateInfoNV ) ); + } + + DedicatedAllocationMemoryAllocateInfoNV& operator=( VkDedicatedAllocationMemoryAllocateInfoNV const & rhs ) + { + memcpy( this, &rhs, sizeof( DedicatedAllocationMemoryAllocateInfoNV ) ); + return *this; + } + DedicatedAllocationMemoryAllocateInfoNV& setPNext( const void* pNext_ ) + { + pNext = pNext_; + return *this; + } + + DedicatedAllocationMemoryAllocateInfoNV& setImage( Image image_ ) + { + image = image_; + return *this; + } + + DedicatedAllocationMemoryAllocateInfoNV& setBuffer( Buffer buffer_ ) + { + buffer = buffer_; + return *this; + } + + operator VkDedicatedAllocationMemoryAllocateInfoNV const&() const + { + return *reinterpret_cast(this); + } + + operator VkDedicatedAllocationMemoryAllocateInfoNV &() + { + return *reinterpret_cast(this); + } + + bool operator==( DedicatedAllocationMemoryAllocateInfoNV const& rhs ) const + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( image == rhs.image ) + && ( buffer == rhs.buffer ); + } + + bool operator!=( DedicatedAllocationMemoryAllocateInfoNV const& rhs ) const + { + return !operator==( rhs ); + } + + private: + StructureType sType = StructureType::eDedicatedAllocationMemoryAllocateInfoNV; + + public: + const void* pNext = nullptr; + Image image; + Buffer buffer; + }; + static_assert( sizeof( DedicatedAllocationMemoryAllocateInfoNV ) == sizeof( VkDedicatedAllocationMemoryAllocateInfoNV ), "struct and wrapper have different size!" ); + +#ifdef VK_USE_PLATFORM_WIN32_NV + struct ExportMemoryWin32HandleInfoNV + { + ExportMemoryWin32HandleInfoNV( const SECURITY_ATTRIBUTES* pAttributes_ = nullptr, + DWORD dwAccess_ = 0 ) + : pAttributes( pAttributes_ ) + , dwAccess( dwAccess_ ) + { + } + + ExportMemoryWin32HandleInfoNV( VkExportMemoryWin32HandleInfoNV const & rhs ) + { + memcpy( this, &rhs, sizeof( ExportMemoryWin32HandleInfoNV ) ); + } + + ExportMemoryWin32HandleInfoNV& operator=( VkExportMemoryWin32HandleInfoNV const & rhs ) + { + memcpy( this, &rhs, sizeof( ExportMemoryWin32HandleInfoNV ) ); + return *this; + } + ExportMemoryWin32HandleInfoNV& setPNext( const void* pNext_ ) + { + pNext = pNext_; + return *this; + } + + ExportMemoryWin32HandleInfoNV& setPAttributes( const SECURITY_ATTRIBUTES* pAttributes_ ) + { + pAttributes = pAttributes_; + return *this; + } + + ExportMemoryWin32HandleInfoNV& setDwAccess( DWORD dwAccess_ ) + { + dwAccess = dwAccess_; + return *this; + } + + operator VkExportMemoryWin32HandleInfoNV const&() const + { + return *reinterpret_cast(this); + } + + operator VkExportMemoryWin32HandleInfoNV &() + { + return *reinterpret_cast(this); + } + + bool operator==( ExportMemoryWin32HandleInfoNV const& rhs ) const + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( pAttributes == rhs.pAttributes ) + && ( dwAccess == rhs.dwAccess ); + } + + bool operator!=( ExportMemoryWin32HandleInfoNV const& rhs ) const + { + return !operator==( rhs ); + } + + private: + StructureType sType = StructureType::eExportMemoryWin32HandleInfoNV; + + public: + const void* pNext = nullptr; + const SECURITY_ATTRIBUTES* pAttributes; + DWORD dwAccess; + }; + static_assert( sizeof( ExportMemoryWin32HandleInfoNV ) == sizeof( VkExportMemoryWin32HandleInfoNV ), "struct and wrapper have different size!" ); +#endif /*VK_USE_PLATFORM_WIN32_NV*/ + +#ifdef VK_USE_PLATFORM_WIN32_NV + struct Win32KeyedMutexAcquireReleaseInfoNV + { + Win32KeyedMutexAcquireReleaseInfoNV( uint32_t acquireCount_ = 0, + const DeviceMemory* pAcquireSyncs_ = nullptr, + const uint64_t* pAcquireKeys_ = nullptr, + const uint32_t* pAcquireTimeoutMilliseconds_ = nullptr, + uint32_t releaseCount_ = 0, + const DeviceMemory* pReleaseSyncs_ = nullptr, + const uint64_t* pReleaseKeys_ = nullptr ) + : acquireCount( acquireCount_ ) + , pAcquireSyncs( pAcquireSyncs_ ) + , pAcquireKeys( pAcquireKeys_ ) + , pAcquireTimeoutMilliseconds( pAcquireTimeoutMilliseconds_ ) + , releaseCount( releaseCount_ ) + , pReleaseSyncs( pReleaseSyncs_ ) + , pReleaseKeys( pReleaseKeys_ ) + { + } + + Win32KeyedMutexAcquireReleaseInfoNV( VkWin32KeyedMutexAcquireReleaseInfoNV const & rhs ) + { + memcpy( this, &rhs, sizeof( Win32KeyedMutexAcquireReleaseInfoNV ) ); + } + + Win32KeyedMutexAcquireReleaseInfoNV& operator=( VkWin32KeyedMutexAcquireReleaseInfoNV const & rhs ) + { + memcpy( this, &rhs, sizeof( Win32KeyedMutexAcquireReleaseInfoNV ) ); + return *this; + } + Win32KeyedMutexAcquireReleaseInfoNV& setPNext( const void* pNext_ ) + { + pNext = pNext_; + return *this; + } + + Win32KeyedMutexAcquireReleaseInfoNV& setAcquireCount( uint32_t acquireCount_ ) + { + acquireCount = acquireCount_; + return *this; + } + + Win32KeyedMutexAcquireReleaseInfoNV& setPAcquireSyncs( const DeviceMemory* pAcquireSyncs_ ) + { + pAcquireSyncs = pAcquireSyncs_; + return *this; + } + + Win32KeyedMutexAcquireReleaseInfoNV& setPAcquireKeys( const uint64_t* pAcquireKeys_ ) + { + pAcquireKeys = pAcquireKeys_; + return *this; + } + + Win32KeyedMutexAcquireReleaseInfoNV& setPAcquireTimeoutMilliseconds( const uint32_t* pAcquireTimeoutMilliseconds_ ) + { + pAcquireTimeoutMilliseconds = pAcquireTimeoutMilliseconds_; + return *this; + } + + Win32KeyedMutexAcquireReleaseInfoNV& setReleaseCount( uint32_t releaseCount_ ) + { + releaseCount = releaseCount_; + return *this; + } + + Win32KeyedMutexAcquireReleaseInfoNV& setPReleaseSyncs( const DeviceMemory* pReleaseSyncs_ ) + { + pReleaseSyncs = pReleaseSyncs_; + return *this; + } + + Win32KeyedMutexAcquireReleaseInfoNV& setPReleaseKeys( const uint64_t* pReleaseKeys_ ) + { + pReleaseKeys = pReleaseKeys_; + return *this; + } + + operator VkWin32KeyedMutexAcquireReleaseInfoNV const&() const + { + return *reinterpret_cast(this); + } + + operator VkWin32KeyedMutexAcquireReleaseInfoNV &() + { + return *reinterpret_cast(this); + } + + bool operator==( Win32KeyedMutexAcquireReleaseInfoNV const& rhs ) const + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( acquireCount == rhs.acquireCount ) + && ( pAcquireSyncs == rhs.pAcquireSyncs ) + && ( pAcquireKeys == rhs.pAcquireKeys ) + && ( pAcquireTimeoutMilliseconds == rhs.pAcquireTimeoutMilliseconds ) + && ( releaseCount == rhs.releaseCount ) + && ( pReleaseSyncs == rhs.pReleaseSyncs ) + && ( pReleaseKeys == rhs.pReleaseKeys ); + } + + bool operator!=( Win32KeyedMutexAcquireReleaseInfoNV const& rhs ) const + { + return !operator==( rhs ); + } + + private: + StructureType sType = StructureType::eWin32KeyedMutexAcquireReleaseInfoNV; + + public: + const void* pNext = nullptr; + uint32_t acquireCount; + const DeviceMemory* pAcquireSyncs; + const uint64_t* pAcquireKeys; + const uint32_t* pAcquireTimeoutMilliseconds; + uint32_t releaseCount; + const DeviceMemory* pReleaseSyncs; + const uint64_t* pReleaseKeys; + }; + static_assert( sizeof( Win32KeyedMutexAcquireReleaseInfoNV ) == sizeof( VkWin32KeyedMutexAcquireReleaseInfoNV ), "struct and wrapper have different size!" ); +#endif /*VK_USE_PLATFORM_WIN32_NV*/ + + struct DeviceGeneratedCommandsFeaturesNVX + { + DeviceGeneratedCommandsFeaturesNVX( Bool32 computeBindingPointSupport_ = 0 ) + : computeBindingPointSupport( computeBindingPointSupport_ ) + { + } + + DeviceGeneratedCommandsFeaturesNVX( VkDeviceGeneratedCommandsFeaturesNVX const & rhs ) + { + memcpy( this, &rhs, sizeof( DeviceGeneratedCommandsFeaturesNVX ) ); + } + + DeviceGeneratedCommandsFeaturesNVX& operator=( VkDeviceGeneratedCommandsFeaturesNVX const & rhs ) + { + memcpy( this, &rhs, sizeof( DeviceGeneratedCommandsFeaturesNVX ) ); + return *this; + } + DeviceGeneratedCommandsFeaturesNVX& setPNext( const void* pNext_ ) + { + pNext = pNext_; + return *this; + } + + DeviceGeneratedCommandsFeaturesNVX& setComputeBindingPointSupport( Bool32 computeBindingPointSupport_ ) + { + computeBindingPointSupport = computeBindingPointSupport_; + return *this; + } + + operator VkDeviceGeneratedCommandsFeaturesNVX const&() const + { + return *reinterpret_cast(this); + } + + operator VkDeviceGeneratedCommandsFeaturesNVX &() + { + return *reinterpret_cast(this); + } + + bool operator==( DeviceGeneratedCommandsFeaturesNVX const& rhs ) const + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( computeBindingPointSupport == rhs.computeBindingPointSupport ); + } + + bool operator!=( DeviceGeneratedCommandsFeaturesNVX const& rhs ) const + { + return !operator==( rhs ); + } + + private: + StructureType sType = StructureType::eDeviceGeneratedCommandsFeaturesNVX; + + public: + const void* pNext = nullptr; + Bool32 computeBindingPointSupport; + }; + static_assert( sizeof( DeviceGeneratedCommandsFeaturesNVX ) == sizeof( VkDeviceGeneratedCommandsFeaturesNVX ), "struct and wrapper have different size!" ); + + struct DeviceGeneratedCommandsLimitsNVX + { + DeviceGeneratedCommandsLimitsNVX( uint32_t maxIndirectCommandsLayoutTokenCount_ = 0, + uint32_t maxObjectEntryCounts_ = 0, + uint32_t minSequenceCountBufferOffsetAlignment_ = 0, + uint32_t minSequenceIndexBufferOffsetAlignment_ = 0, + uint32_t minCommandsTokenBufferOffsetAlignment_ = 0 ) + : maxIndirectCommandsLayoutTokenCount( maxIndirectCommandsLayoutTokenCount_ ) + , maxObjectEntryCounts( maxObjectEntryCounts_ ) + , minSequenceCountBufferOffsetAlignment( minSequenceCountBufferOffsetAlignment_ ) + , minSequenceIndexBufferOffsetAlignment( minSequenceIndexBufferOffsetAlignment_ ) + , minCommandsTokenBufferOffsetAlignment( minCommandsTokenBufferOffsetAlignment_ ) + { + } + + DeviceGeneratedCommandsLimitsNVX( VkDeviceGeneratedCommandsLimitsNVX const & rhs ) + { + memcpy( this, &rhs, sizeof( DeviceGeneratedCommandsLimitsNVX ) ); + } + + DeviceGeneratedCommandsLimitsNVX& operator=( VkDeviceGeneratedCommandsLimitsNVX const & rhs ) + { + memcpy( this, &rhs, sizeof( DeviceGeneratedCommandsLimitsNVX ) ); + return *this; + } + DeviceGeneratedCommandsLimitsNVX& setPNext( const void* pNext_ ) + { + pNext = pNext_; + return *this; + } + + DeviceGeneratedCommandsLimitsNVX& setMaxIndirectCommandsLayoutTokenCount( uint32_t maxIndirectCommandsLayoutTokenCount_ ) + { + maxIndirectCommandsLayoutTokenCount = maxIndirectCommandsLayoutTokenCount_; + return *this; + } + + DeviceGeneratedCommandsLimitsNVX& setMaxObjectEntryCounts( uint32_t maxObjectEntryCounts_ ) + { + maxObjectEntryCounts = maxObjectEntryCounts_; + return *this; + } + + DeviceGeneratedCommandsLimitsNVX& setMinSequenceCountBufferOffsetAlignment( uint32_t minSequenceCountBufferOffsetAlignment_ ) + { + minSequenceCountBufferOffsetAlignment = minSequenceCountBufferOffsetAlignment_; + return *this; + } + + DeviceGeneratedCommandsLimitsNVX& setMinSequenceIndexBufferOffsetAlignment( uint32_t minSequenceIndexBufferOffsetAlignment_ ) + { + minSequenceIndexBufferOffsetAlignment = minSequenceIndexBufferOffsetAlignment_; + return *this; + } + + DeviceGeneratedCommandsLimitsNVX& setMinCommandsTokenBufferOffsetAlignment( uint32_t minCommandsTokenBufferOffsetAlignment_ ) + { + minCommandsTokenBufferOffsetAlignment = minCommandsTokenBufferOffsetAlignment_; + return *this; + } + + operator VkDeviceGeneratedCommandsLimitsNVX const&() const + { + return *reinterpret_cast(this); + } + + operator VkDeviceGeneratedCommandsLimitsNVX &() + { + return *reinterpret_cast(this); + } + + bool operator==( DeviceGeneratedCommandsLimitsNVX const& rhs ) const + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( maxIndirectCommandsLayoutTokenCount == rhs.maxIndirectCommandsLayoutTokenCount ) + && ( maxObjectEntryCounts == rhs.maxObjectEntryCounts ) + && ( minSequenceCountBufferOffsetAlignment == rhs.minSequenceCountBufferOffsetAlignment ) + && ( minSequenceIndexBufferOffsetAlignment == rhs.minSequenceIndexBufferOffsetAlignment ) + && ( minCommandsTokenBufferOffsetAlignment == rhs.minCommandsTokenBufferOffsetAlignment ); + } + + bool operator!=( DeviceGeneratedCommandsLimitsNVX const& rhs ) const + { + return !operator==( rhs ); + } + + private: + StructureType sType = StructureType::eDeviceGeneratedCommandsLimitsNVX; + + public: + const void* pNext = nullptr; + uint32_t maxIndirectCommandsLayoutTokenCount; + uint32_t maxObjectEntryCounts; + uint32_t minSequenceCountBufferOffsetAlignment; + uint32_t minSequenceIndexBufferOffsetAlignment; + uint32_t minCommandsTokenBufferOffsetAlignment; + }; + static_assert( sizeof( DeviceGeneratedCommandsLimitsNVX ) == sizeof( VkDeviceGeneratedCommandsLimitsNVX ), "struct and wrapper have different size!" ); + + struct CmdReserveSpaceForCommandsInfoNVX + { + CmdReserveSpaceForCommandsInfoNVX( ObjectTableNVX objectTable_ = ObjectTableNVX(), + IndirectCommandsLayoutNVX indirectCommandsLayout_ = IndirectCommandsLayoutNVX(), + uint32_t maxSequencesCount_ = 0 ) + : objectTable( objectTable_ ) + , indirectCommandsLayout( indirectCommandsLayout_ ) + , maxSequencesCount( maxSequencesCount_ ) + { + } + + CmdReserveSpaceForCommandsInfoNVX( VkCmdReserveSpaceForCommandsInfoNVX const & rhs ) + { + memcpy( this, &rhs, sizeof( CmdReserveSpaceForCommandsInfoNVX ) ); + } + + CmdReserveSpaceForCommandsInfoNVX& operator=( VkCmdReserveSpaceForCommandsInfoNVX const & rhs ) + { + memcpy( this, &rhs, sizeof( CmdReserveSpaceForCommandsInfoNVX ) ); + return *this; + } + CmdReserveSpaceForCommandsInfoNVX& setPNext( const void* pNext_ ) + { + pNext = pNext_; + return *this; + } + + CmdReserveSpaceForCommandsInfoNVX& setObjectTable( ObjectTableNVX objectTable_ ) + { + objectTable = objectTable_; + return *this; + } + + CmdReserveSpaceForCommandsInfoNVX& setIndirectCommandsLayout( IndirectCommandsLayoutNVX indirectCommandsLayout_ ) + { + indirectCommandsLayout = indirectCommandsLayout_; + return *this; + } + + CmdReserveSpaceForCommandsInfoNVX& setMaxSequencesCount( uint32_t maxSequencesCount_ ) + { + maxSequencesCount = maxSequencesCount_; + return *this; + } + + operator VkCmdReserveSpaceForCommandsInfoNVX const&() const + { + return *reinterpret_cast(this); + } + + operator VkCmdReserveSpaceForCommandsInfoNVX &() + { + return *reinterpret_cast(this); + } + + bool operator==( CmdReserveSpaceForCommandsInfoNVX const& rhs ) const + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( objectTable == rhs.objectTable ) + && ( indirectCommandsLayout == rhs.indirectCommandsLayout ) + && ( maxSequencesCount == rhs.maxSequencesCount ); + } + + bool operator!=( CmdReserveSpaceForCommandsInfoNVX const& rhs ) const + { + return !operator==( rhs ); + } + + private: + StructureType sType = StructureType::eCmdReserveSpaceForCommandsInfoNVX; + + public: + const void* pNext = nullptr; + ObjectTableNVX objectTable; + IndirectCommandsLayoutNVX indirectCommandsLayout; + uint32_t maxSequencesCount; + }; + static_assert( sizeof( CmdReserveSpaceForCommandsInfoNVX ) == sizeof( VkCmdReserveSpaceForCommandsInfoNVX ), "struct and wrapper have different size!" ); + + struct PhysicalDeviceFeatures2 + { + PhysicalDeviceFeatures2( PhysicalDeviceFeatures features_ = PhysicalDeviceFeatures() ) + : features( features_ ) + { + } + + PhysicalDeviceFeatures2( VkPhysicalDeviceFeatures2 const & rhs ) + { + memcpy( this, &rhs, sizeof( PhysicalDeviceFeatures2 ) ); + } + + PhysicalDeviceFeatures2& operator=( VkPhysicalDeviceFeatures2 const & rhs ) + { + memcpy( this, &rhs, sizeof( PhysicalDeviceFeatures2 ) ); + return *this; + } + PhysicalDeviceFeatures2& setPNext( void* pNext_ ) + { + pNext = pNext_; + return *this; + } + + PhysicalDeviceFeatures2& setFeatures( PhysicalDeviceFeatures features_ ) + { + features = features_; + return *this; + } + + operator VkPhysicalDeviceFeatures2 const&() const + { + return *reinterpret_cast(this); + } + + operator VkPhysicalDeviceFeatures2 &() + { + return *reinterpret_cast(this); + } + + bool operator==( PhysicalDeviceFeatures2 const& rhs ) const + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( features == rhs.features ); + } + + bool operator!=( PhysicalDeviceFeatures2 const& rhs ) const + { + return !operator==( rhs ); + } + + private: + StructureType sType = StructureType::ePhysicalDeviceFeatures2; + + public: + void* pNext = nullptr; + PhysicalDeviceFeatures features; + }; + static_assert( sizeof( PhysicalDeviceFeatures2 ) == sizeof( VkPhysicalDeviceFeatures2 ), "struct and wrapper have different size!" ); + + using PhysicalDeviceFeatures2KHR = PhysicalDeviceFeatures2; + + struct PhysicalDevicePushDescriptorPropertiesKHR + { + PhysicalDevicePushDescriptorPropertiesKHR( uint32_t maxPushDescriptors_ = 0 ) + : maxPushDescriptors( maxPushDescriptors_ ) + { + } + + PhysicalDevicePushDescriptorPropertiesKHR( VkPhysicalDevicePushDescriptorPropertiesKHR const & rhs ) + { + memcpy( this, &rhs, sizeof( PhysicalDevicePushDescriptorPropertiesKHR ) ); + } + + PhysicalDevicePushDescriptorPropertiesKHR& operator=( VkPhysicalDevicePushDescriptorPropertiesKHR const & rhs ) + { + memcpy( this, &rhs, sizeof( PhysicalDevicePushDescriptorPropertiesKHR ) ); + return *this; + } + PhysicalDevicePushDescriptorPropertiesKHR& setPNext( void* pNext_ ) + { + pNext = pNext_; + return *this; + } + + PhysicalDevicePushDescriptorPropertiesKHR& setMaxPushDescriptors( uint32_t maxPushDescriptors_ ) + { + maxPushDescriptors = maxPushDescriptors_; + return *this; + } + + operator VkPhysicalDevicePushDescriptorPropertiesKHR const&() const + { + return *reinterpret_cast(this); + } + + operator VkPhysicalDevicePushDescriptorPropertiesKHR &() + { + return *reinterpret_cast(this); + } + + bool operator==( PhysicalDevicePushDescriptorPropertiesKHR const& rhs ) const + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( maxPushDescriptors == rhs.maxPushDescriptors ); + } + + bool operator!=( PhysicalDevicePushDescriptorPropertiesKHR const& rhs ) const + { + return !operator==( rhs ); + } + + private: + StructureType sType = StructureType::ePhysicalDevicePushDescriptorPropertiesKHR; + + public: + void* pNext = nullptr; + uint32_t maxPushDescriptors; + }; + static_assert( sizeof( PhysicalDevicePushDescriptorPropertiesKHR ) == sizeof( VkPhysicalDevicePushDescriptorPropertiesKHR ), "struct and wrapper have different size!" ); + + struct PresentRegionsKHR + { + PresentRegionsKHR( uint32_t swapchainCount_ = 0, + const PresentRegionKHR* pRegions_ = nullptr ) + : swapchainCount( swapchainCount_ ) + , pRegions( pRegions_ ) + { + } + + PresentRegionsKHR( VkPresentRegionsKHR const & rhs ) + { + memcpy( this, &rhs, sizeof( PresentRegionsKHR ) ); + } + + PresentRegionsKHR& operator=( VkPresentRegionsKHR const & rhs ) + { + memcpy( this, &rhs, sizeof( PresentRegionsKHR ) ); + return *this; + } + PresentRegionsKHR& setPNext( const void* pNext_ ) + { + pNext = pNext_; + return *this; + } + + PresentRegionsKHR& setSwapchainCount( uint32_t swapchainCount_ ) + { + swapchainCount = swapchainCount_; + return *this; + } + + PresentRegionsKHR& setPRegions( const PresentRegionKHR* pRegions_ ) + { + pRegions = pRegions_; + return *this; + } + + operator VkPresentRegionsKHR const&() const + { + return *reinterpret_cast(this); + } + + operator VkPresentRegionsKHR &() + { + return *reinterpret_cast(this); + } + + bool operator==( PresentRegionsKHR const& rhs ) const + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( swapchainCount == rhs.swapchainCount ) + && ( pRegions == rhs.pRegions ); + } + + bool operator!=( PresentRegionsKHR const& rhs ) const + { + return !operator==( rhs ); + } + + private: + StructureType sType = StructureType::ePresentRegionsKHR; + + public: + const void* pNext = nullptr; + uint32_t swapchainCount; + const PresentRegionKHR* pRegions; + }; + static_assert( sizeof( PresentRegionsKHR ) == sizeof( VkPresentRegionsKHR ), "struct and wrapper have different size!" ); + + struct PhysicalDeviceVariablePointerFeatures + { + PhysicalDeviceVariablePointerFeatures( Bool32 variablePointersStorageBuffer_ = 0, + Bool32 variablePointers_ = 0 ) + : variablePointersStorageBuffer( variablePointersStorageBuffer_ ) + , variablePointers( variablePointers_ ) + { + } + + PhysicalDeviceVariablePointerFeatures( VkPhysicalDeviceVariablePointerFeatures const & rhs ) + { + memcpy( this, &rhs, sizeof( PhysicalDeviceVariablePointerFeatures ) ); + } + + PhysicalDeviceVariablePointerFeatures& operator=( VkPhysicalDeviceVariablePointerFeatures const & rhs ) + { + memcpy( this, &rhs, sizeof( PhysicalDeviceVariablePointerFeatures ) ); + return *this; + } + PhysicalDeviceVariablePointerFeatures& setPNext( void* pNext_ ) + { + pNext = pNext_; + return *this; + } + + PhysicalDeviceVariablePointerFeatures& setVariablePointersStorageBuffer( Bool32 variablePointersStorageBuffer_ ) + { + variablePointersStorageBuffer = variablePointersStorageBuffer_; + return *this; + } + + PhysicalDeviceVariablePointerFeatures& setVariablePointers( Bool32 variablePointers_ ) + { + variablePointers = variablePointers_; + return *this; + } + + operator VkPhysicalDeviceVariablePointerFeatures const&() const + { + return *reinterpret_cast(this); + } + + operator VkPhysicalDeviceVariablePointerFeatures &() + { + return *reinterpret_cast(this); + } + + bool operator==( PhysicalDeviceVariablePointerFeatures const& rhs ) const + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( variablePointersStorageBuffer == rhs.variablePointersStorageBuffer ) + && ( variablePointers == rhs.variablePointers ); + } + + bool operator!=( PhysicalDeviceVariablePointerFeatures const& rhs ) const + { + return !operator==( rhs ); + } + + private: + StructureType sType = StructureType::ePhysicalDeviceVariablePointerFeatures; + + public: + void* pNext = nullptr; + Bool32 variablePointersStorageBuffer; + Bool32 variablePointers; + }; + static_assert( sizeof( PhysicalDeviceVariablePointerFeatures ) == sizeof( VkPhysicalDeviceVariablePointerFeatures ), "struct and wrapper have different size!" ); + + using PhysicalDeviceVariablePointerFeaturesKHR = PhysicalDeviceVariablePointerFeatures; + + struct PhysicalDeviceIDProperties + { + operator VkPhysicalDeviceIDProperties const&() const + { + return *reinterpret_cast(this); + } + + operator VkPhysicalDeviceIDProperties &() + { + return *reinterpret_cast(this); + } + + bool operator==( PhysicalDeviceIDProperties const& rhs ) const + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( memcmp( deviceUUID, rhs.deviceUUID, VK_UUID_SIZE * sizeof( uint8_t ) ) == 0 ) + && ( memcmp( driverUUID, rhs.driverUUID, VK_UUID_SIZE * sizeof( uint8_t ) ) == 0 ) + && ( memcmp( deviceLUID, rhs.deviceLUID, VK_LUID_SIZE * sizeof( uint8_t ) ) == 0 ) + && ( deviceNodeMask == rhs.deviceNodeMask ) + && ( deviceLUIDValid == rhs.deviceLUIDValid ); + } + + bool operator!=( PhysicalDeviceIDProperties const& rhs ) const + { + return !operator==( rhs ); + } + + private: + StructureType sType = StructureType::ePhysicalDeviceIdProperties; + + public: + void* pNext = nullptr; + uint8_t deviceUUID[VK_UUID_SIZE]; + uint8_t driverUUID[VK_UUID_SIZE]; + uint8_t deviceLUID[VK_LUID_SIZE]; + uint32_t deviceNodeMask; + Bool32 deviceLUIDValid; + }; + static_assert( sizeof( PhysicalDeviceIDProperties ) == sizeof( VkPhysicalDeviceIDProperties ), "struct and wrapper have different size!" ); + + using PhysicalDeviceIDPropertiesKHR = PhysicalDeviceIDProperties; + +#ifdef VK_USE_PLATFORM_WIN32_KHR + struct ExportMemoryWin32HandleInfoKHR + { + ExportMemoryWin32HandleInfoKHR( const SECURITY_ATTRIBUTES* pAttributes_ = nullptr, + DWORD dwAccess_ = 0, + LPCWSTR name_ = 0 ) + : pAttributes( pAttributes_ ) + , dwAccess( dwAccess_ ) + , name( name_ ) + { + } + + ExportMemoryWin32HandleInfoKHR( VkExportMemoryWin32HandleInfoKHR const & rhs ) + { + memcpy( this, &rhs, sizeof( ExportMemoryWin32HandleInfoKHR ) ); + } + + ExportMemoryWin32HandleInfoKHR& operator=( VkExportMemoryWin32HandleInfoKHR const & rhs ) + { + memcpy( this, &rhs, sizeof( ExportMemoryWin32HandleInfoKHR ) ); + return *this; + } + ExportMemoryWin32HandleInfoKHR& setPNext( const void* pNext_ ) + { + pNext = pNext_; + return *this; + } + + ExportMemoryWin32HandleInfoKHR& setPAttributes( const SECURITY_ATTRIBUTES* pAttributes_ ) + { + pAttributes = pAttributes_; + return *this; + } + + ExportMemoryWin32HandleInfoKHR& setDwAccess( DWORD dwAccess_ ) + { + dwAccess = dwAccess_; + return *this; + } + + ExportMemoryWin32HandleInfoKHR& setName( LPCWSTR name_ ) + { + name = name_; + return *this; + } + + operator VkExportMemoryWin32HandleInfoKHR const&() const + { + return *reinterpret_cast(this); + } + + operator VkExportMemoryWin32HandleInfoKHR &() + { + return *reinterpret_cast(this); + } + + bool operator==( ExportMemoryWin32HandleInfoKHR const& rhs ) const + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( pAttributes == rhs.pAttributes ) + && ( dwAccess == rhs.dwAccess ) + && ( name == rhs.name ); + } + + bool operator!=( ExportMemoryWin32HandleInfoKHR const& rhs ) const + { + return !operator==( rhs ); + } + + private: + StructureType sType = StructureType::eExportMemoryWin32HandleInfoKHR; + + public: + const void* pNext = nullptr; + const SECURITY_ATTRIBUTES* pAttributes; + DWORD dwAccess; + LPCWSTR name; + }; + static_assert( sizeof( ExportMemoryWin32HandleInfoKHR ) == sizeof( VkExportMemoryWin32HandleInfoKHR ), "struct and wrapper have different size!" ); +#endif /*VK_USE_PLATFORM_WIN32_KHR*/ + +#ifdef VK_USE_PLATFORM_WIN32_KHR + struct MemoryWin32HandlePropertiesKHR + { + operator VkMemoryWin32HandlePropertiesKHR const&() const + { + return *reinterpret_cast(this); + } + + operator VkMemoryWin32HandlePropertiesKHR &() + { + return *reinterpret_cast(this); + } + + bool operator==( MemoryWin32HandlePropertiesKHR const& rhs ) const + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( memoryTypeBits == rhs.memoryTypeBits ); + } + + bool operator!=( MemoryWin32HandlePropertiesKHR const& rhs ) const + { + return !operator==( rhs ); + } + + private: + StructureType sType = StructureType::eMemoryWin32HandlePropertiesKHR; + + public: + void* pNext = nullptr; + uint32_t memoryTypeBits; + }; + static_assert( sizeof( MemoryWin32HandlePropertiesKHR ) == sizeof( VkMemoryWin32HandlePropertiesKHR ), "struct and wrapper have different size!" ); +#endif /*VK_USE_PLATFORM_WIN32_KHR*/ + + struct MemoryFdPropertiesKHR + { + operator VkMemoryFdPropertiesKHR const&() const + { + return *reinterpret_cast(this); + } + + operator VkMemoryFdPropertiesKHR &() + { + return *reinterpret_cast(this); + } + + bool operator==( MemoryFdPropertiesKHR const& rhs ) const + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( memoryTypeBits == rhs.memoryTypeBits ); + } + + bool operator!=( MemoryFdPropertiesKHR const& rhs ) const + { + return !operator==( rhs ); + } + + private: + StructureType sType = StructureType::eMemoryFdPropertiesKHR; + + public: + void* pNext = nullptr; + uint32_t memoryTypeBits; + }; + static_assert( sizeof( MemoryFdPropertiesKHR ) == sizeof( VkMemoryFdPropertiesKHR ), "struct and wrapper have different size!" ); + +#ifdef VK_USE_PLATFORM_WIN32_KHR + struct Win32KeyedMutexAcquireReleaseInfoKHR + { + Win32KeyedMutexAcquireReleaseInfoKHR( uint32_t acquireCount_ = 0, + const DeviceMemory* pAcquireSyncs_ = nullptr, + const uint64_t* pAcquireKeys_ = nullptr, + const uint32_t* pAcquireTimeouts_ = nullptr, + uint32_t releaseCount_ = 0, + const DeviceMemory* pReleaseSyncs_ = nullptr, + const uint64_t* pReleaseKeys_ = nullptr ) + : acquireCount( acquireCount_ ) + , pAcquireSyncs( pAcquireSyncs_ ) + , pAcquireKeys( pAcquireKeys_ ) + , pAcquireTimeouts( pAcquireTimeouts_ ) + , releaseCount( releaseCount_ ) + , pReleaseSyncs( pReleaseSyncs_ ) + , pReleaseKeys( pReleaseKeys_ ) + { + } + + Win32KeyedMutexAcquireReleaseInfoKHR( VkWin32KeyedMutexAcquireReleaseInfoKHR const & rhs ) + { + memcpy( this, &rhs, sizeof( Win32KeyedMutexAcquireReleaseInfoKHR ) ); + } + + Win32KeyedMutexAcquireReleaseInfoKHR& operator=( VkWin32KeyedMutexAcquireReleaseInfoKHR const & rhs ) + { + memcpy( this, &rhs, sizeof( Win32KeyedMutexAcquireReleaseInfoKHR ) ); + return *this; + } + Win32KeyedMutexAcquireReleaseInfoKHR& setPNext( const void* pNext_ ) + { + pNext = pNext_; + return *this; + } + + Win32KeyedMutexAcquireReleaseInfoKHR& setAcquireCount( uint32_t acquireCount_ ) + { + acquireCount = acquireCount_; + return *this; + } + + Win32KeyedMutexAcquireReleaseInfoKHR& setPAcquireSyncs( const DeviceMemory* pAcquireSyncs_ ) + { + pAcquireSyncs = pAcquireSyncs_; + return *this; + } + + Win32KeyedMutexAcquireReleaseInfoKHR& setPAcquireKeys( const uint64_t* pAcquireKeys_ ) + { + pAcquireKeys = pAcquireKeys_; + return *this; + } + + Win32KeyedMutexAcquireReleaseInfoKHR& setPAcquireTimeouts( const uint32_t* pAcquireTimeouts_ ) + { + pAcquireTimeouts = pAcquireTimeouts_; + return *this; + } + + Win32KeyedMutexAcquireReleaseInfoKHR& setReleaseCount( uint32_t releaseCount_ ) + { + releaseCount = releaseCount_; + return *this; + } + + Win32KeyedMutexAcquireReleaseInfoKHR& setPReleaseSyncs( const DeviceMemory* pReleaseSyncs_ ) + { + pReleaseSyncs = pReleaseSyncs_; + return *this; + } + + Win32KeyedMutexAcquireReleaseInfoKHR& setPReleaseKeys( const uint64_t* pReleaseKeys_ ) + { + pReleaseKeys = pReleaseKeys_; + return *this; + } + + operator VkWin32KeyedMutexAcquireReleaseInfoKHR const&() const + { + return *reinterpret_cast(this); + } + + operator VkWin32KeyedMutexAcquireReleaseInfoKHR &() + { + return *reinterpret_cast(this); + } + + bool operator==( Win32KeyedMutexAcquireReleaseInfoKHR const& rhs ) const + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( acquireCount == rhs.acquireCount ) + && ( pAcquireSyncs == rhs.pAcquireSyncs ) + && ( pAcquireKeys == rhs.pAcquireKeys ) + && ( pAcquireTimeouts == rhs.pAcquireTimeouts ) + && ( releaseCount == rhs.releaseCount ) + && ( pReleaseSyncs == rhs.pReleaseSyncs ) + && ( pReleaseKeys == rhs.pReleaseKeys ); + } + + bool operator!=( Win32KeyedMutexAcquireReleaseInfoKHR const& rhs ) const + { + return !operator==( rhs ); + } + + private: + StructureType sType = StructureType::eWin32KeyedMutexAcquireReleaseInfoKHR; + + public: + const void* pNext = nullptr; + uint32_t acquireCount; + const DeviceMemory* pAcquireSyncs; + const uint64_t* pAcquireKeys; + const uint32_t* pAcquireTimeouts; + uint32_t releaseCount; + const DeviceMemory* pReleaseSyncs; + const uint64_t* pReleaseKeys; + }; + static_assert( sizeof( Win32KeyedMutexAcquireReleaseInfoKHR ) == sizeof( VkWin32KeyedMutexAcquireReleaseInfoKHR ), "struct and wrapper have different size!" ); +#endif /*VK_USE_PLATFORM_WIN32_KHR*/ + +#ifdef VK_USE_PLATFORM_WIN32_KHR + struct ExportSemaphoreWin32HandleInfoKHR + { + ExportSemaphoreWin32HandleInfoKHR( const SECURITY_ATTRIBUTES* pAttributes_ = nullptr, + DWORD dwAccess_ = 0, + LPCWSTR name_ = 0 ) + : pAttributes( pAttributes_ ) + , dwAccess( dwAccess_ ) + , name( name_ ) + { + } + + ExportSemaphoreWin32HandleInfoKHR( VkExportSemaphoreWin32HandleInfoKHR const & rhs ) + { + memcpy( this, &rhs, sizeof( ExportSemaphoreWin32HandleInfoKHR ) ); + } + + ExportSemaphoreWin32HandleInfoKHR& operator=( VkExportSemaphoreWin32HandleInfoKHR const & rhs ) + { + memcpy( this, &rhs, sizeof( ExportSemaphoreWin32HandleInfoKHR ) ); + return *this; + } + ExportSemaphoreWin32HandleInfoKHR& setPNext( const void* pNext_ ) + { + pNext = pNext_; + return *this; + } + + ExportSemaphoreWin32HandleInfoKHR& setPAttributes( const SECURITY_ATTRIBUTES* pAttributes_ ) + { + pAttributes = pAttributes_; + return *this; + } + + ExportSemaphoreWin32HandleInfoKHR& setDwAccess( DWORD dwAccess_ ) + { + dwAccess = dwAccess_; + return *this; + } + + ExportSemaphoreWin32HandleInfoKHR& setName( LPCWSTR name_ ) + { + name = name_; + return *this; + } + + operator VkExportSemaphoreWin32HandleInfoKHR const&() const + { + return *reinterpret_cast(this); + } + + operator VkExportSemaphoreWin32HandleInfoKHR &() + { + return *reinterpret_cast(this); + } + + bool operator==( ExportSemaphoreWin32HandleInfoKHR const& rhs ) const + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( pAttributes == rhs.pAttributes ) + && ( dwAccess == rhs.dwAccess ) + && ( name == rhs.name ); + } + + bool operator!=( ExportSemaphoreWin32HandleInfoKHR const& rhs ) const + { + return !operator==( rhs ); + } + + private: + StructureType sType = StructureType::eExportSemaphoreWin32HandleInfoKHR; + + public: + const void* pNext = nullptr; + const SECURITY_ATTRIBUTES* pAttributes; + DWORD dwAccess; + LPCWSTR name; + }; + static_assert( sizeof( ExportSemaphoreWin32HandleInfoKHR ) == sizeof( VkExportSemaphoreWin32HandleInfoKHR ), "struct and wrapper have different size!" ); +#endif /*VK_USE_PLATFORM_WIN32_KHR*/ + +#ifdef VK_USE_PLATFORM_WIN32_KHR + struct D3D12FenceSubmitInfoKHR + { + D3D12FenceSubmitInfoKHR( uint32_t waitSemaphoreValuesCount_ = 0, + const uint64_t* pWaitSemaphoreValues_ = nullptr, + uint32_t signalSemaphoreValuesCount_ = 0, + const uint64_t* pSignalSemaphoreValues_ = nullptr ) + : waitSemaphoreValuesCount( waitSemaphoreValuesCount_ ) + , pWaitSemaphoreValues( pWaitSemaphoreValues_ ) + , signalSemaphoreValuesCount( signalSemaphoreValuesCount_ ) + , pSignalSemaphoreValues( pSignalSemaphoreValues_ ) + { + } + + D3D12FenceSubmitInfoKHR( VkD3D12FenceSubmitInfoKHR const & rhs ) + { + memcpy( this, &rhs, sizeof( D3D12FenceSubmitInfoKHR ) ); + } + + D3D12FenceSubmitInfoKHR& operator=( VkD3D12FenceSubmitInfoKHR const & rhs ) + { + memcpy( this, &rhs, sizeof( D3D12FenceSubmitInfoKHR ) ); + return *this; + } + D3D12FenceSubmitInfoKHR& setPNext( const void* pNext_ ) + { + pNext = pNext_; + return *this; + } + + D3D12FenceSubmitInfoKHR& setWaitSemaphoreValuesCount( uint32_t waitSemaphoreValuesCount_ ) + { + waitSemaphoreValuesCount = waitSemaphoreValuesCount_; + return *this; + } + + D3D12FenceSubmitInfoKHR& setPWaitSemaphoreValues( const uint64_t* pWaitSemaphoreValues_ ) + { + pWaitSemaphoreValues = pWaitSemaphoreValues_; + return *this; + } + + D3D12FenceSubmitInfoKHR& setSignalSemaphoreValuesCount( uint32_t signalSemaphoreValuesCount_ ) + { + signalSemaphoreValuesCount = signalSemaphoreValuesCount_; + return *this; + } + + D3D12FenceSubmitInfoKHR& setPSignalSemaphoreValues( const uint64_t* pSignalSemaphoreValues_ ) + { + pSignalSemaphoreValues = pSignalSemaphoreValues_; + return *this; + } + + operator VkD3D12FenceSubmitInfoKHR const&() const + { + return *reinterpret_cast(this); + } + + operator VkD3D12FenceSubmitInfoKHR &() + { + return *reinterpret_cast(this); + } + + bool operator==( D3D12FenceSubmitInfoKHR const& rhs ) const + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( waitSemaphoreValuesCount == rhs.waitSemaphoreValuesCount ) + && ( pWaitSemaphoreValues == rhs.pWaitSemaphoreValues ) + && ( signalSemaphoreValuesCount == rhs.signalSemaphoreValuesCount ) + && ( pSignalSemaphoreValues == rhs.pSignalSemaphoreValues ); + } + + bool operator!=( D3D12FenceSubmitInfoKHR const& rhs ) const + { + return !operator==( rhs ); + } + + private: + StructureType sType = StructureType::eD3D12FenceSubmitInfoKHR; + + public: + const void* pNext = nullptr; + uint32_t waitSemaphoreValuesCount; + const uint64_t* pWaitSemaphoreValues; + uint32_t signalSemaphoreValuesCount; + const uint64_t* pSignalSemaphoreValues; + }; + static_assert( sizeof( D3D12FenceSubmitInfoKHR ) == sizeof( VkD3D12FenceSubmitInfoKHR ), "struct and wrapper have different size!" ); +#endif /*VK_USE_PLATFORM_WIN32_KHR*/ + +#ifdef VK_USE_PLATFORM_WIN32_KHR + struct ExportFenceWin32HandleInfoKHR + { + ExportFenceWin32HandleInfoKHR( const SECURITY_ATTRIBUTES* pAttributes_ = nullptr, + DWORD dwAccess_ = 0, + LPCWSTR name_ = 0 ) + : pAttributes( pAttributes_ ) + , dwAccess( dwAccess_ ) + , name( name_ ) + { + } + + ExportFenceWin32HandleInfoKHR( VkExportFenceWin32HandleInfoKHR const & rhs ) + { + memcpy( this, &rhs, sizeof( ExportFenceWin32HandleInfoKHR ) ); + } + + ExportFenceWin32HandleInfoKHR& operator=( VkExportFenceWin32HandleInfoKHR const & rhs ) + { + memcpy( this, &rhs, sizeof( ExportFenceWin32HandleInfoKHR ) ); + return *this; + } + ExportFenceWin32HandleInfoKHR& setPNext( const void* pNext_ ) + { + pNext = pNext_; + return *this; + } + + ExportFenceWin32HandleInfoKHR& setPAttributes( const SECURITY_ATTRIBUTES* pAttributes_ ) + { + pAttributes = pAttributes_; + return *this; + } + + ExportFenceWin32HandleInfoKHR& setDwAccess( DWORD dwAccess_ ) + { + dwAccess = dwAccess_; + return *this; + } + + ExportFenceWin32HandleInfoKHR& setName( LPCWSTR name_ ) + { + name = name_; + return *this; + } + + operator VkExportFenceWin32HandleInfoKHR const&() const + { + return *reinterpret_cast(this); + } + + operator VkExportFenceWin32HandleInfoKHR &() + { + return *reinterpret_cast(this); + } + + bool operator==( ExportFenceWin32HandleInfoKHR const& rhs ) const + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( pAttributes == rhs.pAttributes ) + && ( dwAccess == rhs.dwAccess ) + && ( name == rhs.name ); + } + + bool operator!=( ExportFenceWin32HandleInfoKHR const& rhs ) const + { + return !operator==( rhs ); + } + + private: + StructureType sType = StructureType::eExportFenceWin32HandleInfoKHR; + + public: + const void* pNext = nullptr; + const SECURITY_ATTRIBUTES* pAttributes; + DWORD dwAccess; + LPCWSTR name; + }; + static_assert( sizeof( ExportFenceWin32HandleInfoKHR ) == sizeof( VkExportFenceWin32HandleInfoKHR ), "struct and wrapper have different size!" ); +#endif /*VK_USE_PLATFORM_WIN32_KHR*/ + + struct PhysicalDeviceMultiviewFeatures + { + PhysicalDeviceMultiviewFeatures( Bool32 multiview_ = 0, + Bool32 multiviewGeometryShader_ = 0, + Bool32 multiviewTessellationShader_ = 0 ) + : multiview( multiview_ ) + , multiviewGeometryShader( multiviewGeometryShader_ ) + , multiviewTessellationShader( multiviewTessellationShader_ ) + { + } + + PhysicalDeviceMultiviewFeatures( VkPhysicalDeviceMultiviewFeatures const & rhs ) + { + memcpy( this, &rhs, sizeof( PhysicalDeviceMultiviewFeatures ) ); + } + + PhysicalDeviceMultiviewFeatures& operator=( VkPhysicalDeviceMultiviewFeatures const & rhs ) + { + memcpy( this, &rhs, sizeof( PhysicalDeviceMultiviewFeatures ) ); + return *this; + } + PhysicalDeviceMultiviewFeatures& setPNext( void* pNext_ ) + { + pNext = pNext_; + return *this; + } + + PhysicalDeviceMultiviewFeatures& setMultiview( Bool32 multiview_ ) + { + multiview = multiview_; + return *this; + } + + PhysicalDeviceMultiviewFeatures& setMultiviewGeometryShader( Bool32 multiviewGeometryShader_ ) + { + multiviewGeometryShader = multiviewGeometryShader_; + return *this; + } + + PhysicalDeviceMultiviewFeatures& setMultiviewTessellationShader( Bool32 multiviewTessellationShader_ ) + { + multiviewTessellationShader = multiviewTessellationShader_; + return *this; + } + + operator VkPhysicalDeviceMultiviewFeatures const&() const + { + return *reinterpret_cast(this); + } + + operator VkPhysicalDeviceMultiviewFeatures &() + { + return *reinterpret_cast(this); + } + + bool operator==( PhysicalDeviceMultiviewFeatures const& rhs ) const + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( multiview == rhs.multiview ) + && ( multiviewGeometryShader == rhs.multiviewGeometryShader ) + && ( multiviewTessellationShader == rhs.multiviewTessellationShader ); + } + + bool operator!=( PhysicalDeviceMultiviewFeatures const& rhs ) const + { + return !operator==( rhs ); + } + + private: + StructureType sType = StructureType::ePhysicalDeviceMultiviewFeatures; + + public: + void* pNext = nullptr; + Bool32 multiview; + Bool32 multiviewGeometryShader; + Bool32 multiviewTessellationShader; + }; + static_assert( sizeof( PhysicalDeviceMultiviewFeatures ) == sizeof( VkPhysicalDeviceMultiviewFeatures ), "struct and wrapper have different size!" ); + + using PhysicalDeviceMultiviewFeaturesKHR = PhysicalDeviceMultiviewFeatures; + + struct PhysicalDeviceMultiviewProperties + { + operator VkPhysicalDeviceMultiviewProperties const&() const + { + return *reinterpret_cast(this); + } + + operator VkPhysicalDeviceMultiviewProperties &() + { + return *reinterpret_cast(this); + } + + bool operator==( PhysicalDeviceMultiviewProperties const& rhs ) const + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( maxMultiviewViewCount == rhs.maxMultiviewViewCount ) + && ( maxMultiviewInstanceIndex == rhs.maxMultiviewInstanceIndex ); + } + + bool operator!=( PhysicalDeviceMultiviewProperties const& rhs ) const + { + return !operator==( rhs ); + } + + private: + StructureType sType = StructureType::ePhysicalDeviceMultiviewProperties; + + public: + void* pNext = nullptr; + uint32_t maxMultiviewViewCount; + uint32_t maxMultiviewInstanceIndex; + }; + static_assert( sizeof( PhysicalDeviceMultiviewProperties ) == sizeof( VkPhysicalDeviceMultiviewProperties ), "struct and wrapper have different size!" ); + + using PhysicalDeviceMultiviewPropertiesKHR = PhysicalDeviceMultiviewProperties; + + struct RenderPassMultiviewCreateInfo + { + RenderPassMultiviewCreateInfo( uint32_t subpassCount_ = 0, + const uint32_t* pViewMasks_ = nullptr, + uint32_t dependencyCount_ = 0, + const int32_t* pViewOffsets_ = nullptr, + uint32_t correlationMaskCount_ = 0, + const uint32_t* pCorrelationMasks_ = nullptr ) + : subpassCount( subpassCount_ ) + , pViewMasks( pViewMasks_ ) + , dependencyCount( dependencyCount_ ) + , pViewOffsets( pViewOffsets_ ) + , correlationMaskCount( correlationMaskCount_ ) + , pCorrelationMasks( pCorrelationMasks_ ) + { + } + + RenderPassMultiviewCreateInfo( VkRenderPassMultiviewCreateInfo const & rhs ) + { + memcpy( this, &rhs, sizeof( RenderPassMultiviewCreateInfo ) ); + } + + RenderPassMultiviewCreateInfo& operator=( VkRenderPassMultiviewCreateInfo const & rhs ) + { + memcpy( this, &rhs, sizeof( RenderPassMultiviewCreateInfo ) ); + return *this; + } + RenderPassMultiviewCreateInfo& setPNext( const void* pNext_ ) + { + pNext = pNext_; + return *this; + } + + RenderPassMultiviewCreateInfo& setSubpassCount( uint32_t subpassCount_ ) + { + subpassCount = subpassCount_; + return *this; + } + + RenderPassMultiviewCreateInfo& setPViewMasks( const uint32_t* pViewMasks_ ) + { + pViewMasks = pViewMasks_; + return *this; + } + + RenderPassMultiviewCreateInfo& setDependencyCount( uint32_t dependencyCount_ ) + { + dependencyCount = dependencyCount_; + return *this; + } + + RenderPassMultiviewCreateInfo& setPViewOffsets( const int32_t* pViewOffsets_ ) + { + pViewOffsets = pViewOffsets_; + return *this; + } + + RenderPassMultiviewCreateInfo& setCorrelationMaskCount( uint32_t correlationMaskCount_ ) + { + correlationMaskCount = correlationMaskCount_; + return *this; + } + + RenderPassMultiviewCreateInfo& setPCorrelationMasks( const uint32_t* pCorrelationMasks_ ) + { + pCorrelationMasks = pCorrelationMasks_; + return *this; + } + + operator VkRenderPassMultiviewCreateInfo const&() const + { + return *reinterpret_cast(this); + } + + operator VkRenderPassMultiviewCreateInfo &() + { + return *reinterpret_cast(this); + } + + bool operator==( RenderPassMultiviewCreateInfo const& rhs ) const + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( subpassCount == rhs.subpassCount ) + && ( pViewMasks == rhs.pViewMasks ) + && ( dependencyCount == rhs.dependencyCount ) + && ( pViewOffsets == rhs.pViewOffsets ) + && ( correlationMaskCount == rhs.correlationMaskCount ) + && ( pCorrelationMasks == rhs.pCorrelationMasks ); + } + + bool operator!=( RenderPassMultiviewCreateInfo const& rhs ) const + { + return !operator==( rhs ); + } + + private: + StructureType sType = StructureType::eRenderPassMultiviewCreateInfo; + + public: + const void* pNext = nullptr; + uint32_t subpassCount; + const uint32_t* pViewMasks; + uint32_t dependencyCount; + const int32_t* pViewOffsets; + uint32_t correlationMaskCount; + const uint32_t* pCorrelationMasks; + }; + static_assert( sizeof( RenderPassMultiviewCreateInfo ) == sizeof( VkRenderPassMultiviewCreateInfo ), "struct and wrapper have different size!" ); + + using RenderPassMultiviewCreateInfoKHR = RenderPassMultiviewCreateInfo; + + struct BindBufferMemoryInfo + { + BindBufferMemoryInfo( Buffer buffer_ = Buffer(), + DeviceMemory memory_ = DeviceMemory(), + DeviceSize memoryOffset_ = 0 ) + : buffer( buffer_ ) + , memory( memory_ ) + , memoryOffset( memoryOffset_ ) + { + } + + BindBufferMemoryInfo( VkBindBufferMemoryInfo const & rhs ) + { + memcpy( this, &rhs, sizeof( BindBufferMemoryInfo ) ); + } + + BindBufferMemoryInfo& operator=( VkBindBufferMemoryInfo const & rhs ) + { + memcpy( this, &rhs, sizeof( BindBufferMemoryInfo ) ); + return *this; + } + BindBufferMemoryInfo& setPNext( const void* pNext_ ) + { + pNext = pNext_; + return *this; + } + + BindBufferMemoryInfo& setBuffer( Buffer buffer_ ) + { + buffer = buffer_; + return *this; + } + + BindBufferMemoryInfo& setMemory( DeviceMemory memory_ ) + { + memory = memory_; + return *this; + } + + BindBufferMemoryInfo& setMemoryOffset( DeviceSize memoryOffset_ ) + { + memoryOffset = memoryOffset_; + return *this; + } + + operator VkBindBufferMemoryInfo const&() const + { + return *reinterpret_cast(this); + } + + operator VkBindBufferMemoryInfo &() + { + return *reinterpret_cast(this); + } + + bool operator==( BindBufferMemoryInfo const& rhs ) const + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( buffer == rhs.buffer ) + && ( memory == rhs.memory ) + && ( memoryOffset == rhs.memoryOffset ); + } + + bool operator!=( BindBufferMemoryInfo const& rhs ) const + { + return !operator==( rhs ); + } + + private: + StructureType sType = StructureType::eBindBufferMemoryInfo; + + public: + const void* pNext = nullptr; + Buffer buffer; + DeviceMemory memory; + DeviceSize memoryOffset; + }; + static_assert( sizeof( BindBufferMemoryInfo ) == sizeof( VkBindBufferMemoryInfo ), "struct and wrapper have different size!" ); + + using BindBufferMemoryInfoKHR = BindBufferMemoryInfo; + + struct BindBufferMemoryDeviceGroupInfo + { + BindBufferMemoryDeviceGroupInfo( uint32_t deviceIndexCount_ = 0, + const uint32_t* pDeviceIndices_ = nullptr ) + : deviceIndexCount( deviceIndexCount_ ) + , pDeviceIndices( pDeviceIndices_ ) + { + } + + BindBufferMemoryDeviceGroupInfo( VkBindBufferMemoryDeviceGroupInfo const & rhs ) + { + memcpy( this, &rhs, sizeof( BindBufferMemoryDeviceGroupInfo ) ); + } + + BindBufferMemoryDeviceGroupInfo& operator=( VkBindBufferMemoryDeviceGroupInfo const & rhs ) + { + memcpy( this, &rhs, sizeof( BindBufferMemoryDeviceGroupInfo ) ); + return *this; + } + BindBufferMemoryDeviceGroupInfo& setPNext( const void* pNext_ ) + { + pNext = pNext_; + return *this; + } + + BindBufferMemoryDeviceGroupInfo& setDeviceIndexCount( uint32_t deviceIndexCount_ ) + { + deviceIndexCount = deviceIndexCount_; + return *this; + } + + BindBufferMemoryDeviceGroupInfo& setPDeviceIndices( const uint32_t* pDeviceIndices_ ) + { + pDeviceIndices = pDeviceIndices_; + return *this; + } + + operator VkBindBufferMemoryDeviceGroupInfo const&() const + { + return *reinterpret_cast(this); + } + + operator VkBindBufferMemoryDeviceGroupInfo &() + { + return *reinterpret_cast(this); + } + + bool operator==( BindBufferMemoryDeviceGroupInfo const& rhs ) const + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( deviceIndexCount == rhs.deviceIndexCount ) + && ( pDeviceIndices == rhs.pDeviceIndices ); + } + + bool operator!=( BindBufferMemoryDeviceGroupInfo const& rhs ) const + { + return !operator==( rhs ); + } + + private: + StructureType sType = StructureType::eBindBufferMemoryDeviceGroupInfo; + + public: + const void* pNext = nullptr; + uint32_t deviceIndexCount; + const uint32_t* pDeviceIndices; + }; + static_assert( sizeof( BindBufferMemoryDeviceGroupInfo ) == sizeof( VkBindBufferMemoryDeviceGroupInfo ), "struct and wrapper have different size!" ); + + using BindBufferMemoryDeviceGroupInfoKHR = BindBufferMemoryDeviceGroupInfo; + + struct BindImageMemoryInfo + { + BindImageMemoryInfo( Image image_ = Image(), + DeviceMemory memory_ = DeviceMemory(), + DeviceSize memoryOffset_ = 0 ) + : image( image_ ) + , memory( memory_ ) + , memoryOffset( memoryOffset_ ) + { + } + + BindImageMemoryInfo( VkBindImageMemoryInfo const & rhs ) + { + memcpy( this, &rhs, sizeof( BindImageMemoryInfo ) ); + } + + BindImageMemoryInfo& operator=( VkBindImageMemoryInfo const & rhs ) + { + memcpy( this, &rhs, sizeof( BindImageMemoryInfo ) ); + return *this; + } + BindImageMemoryInfo& setPNext( const void* pNext_ ) + { + pNext = pNext_; + return *this; + } + + BindImageMemoryInfo& setImage( Image image_ ) + { + image = image_; + return *this; + } + + BindImageMemoryInfo& setMemory( DeviceMemory memory_ ) + { + memory = memory_; + return *this; + } + + BindImageMemoryInfo& setMemoryOffset( DeviceSize memoryOffset_ ) + { + memoryOffset = memoryOffset_; + return *this; + } + + operator VkBindImageMemoryInfo const&() const + { + return *reinterpret_cast(this); + } + + operator VkBindImageMemoryInfo &() + { + return *reinterpret_cast(this); + } + + bool operator==( BindImageMemoryInfo const& rhs ) const + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( image == rhs.image ) + && ( memory == rhs.memory ) + && ( memoryOffset == rhs.memoryOffset ); + } + + bool operator!=( BindImageMemoryInfo const& rhs ) const + { + return !operator==( rhs ); + } + + private: + StructureType sType = StructureType::eBindImageMemoryInfo; + + public: + const void* pNext = nullptr; + Image image; + DeviceMemory memory; + DeviceSize memoryOffset; + }; + static_assert( sizeof( BindImageMemoryInfo ) == sizeof( VkBindImageMemoryInfo ), "struct and wrapper have different size!" ); + + using BindImageMemoryInfoKHR = BindImageMemoryInfo; + + struct BindImageMemoryDeviceGroupInfo + { + BindImageMemoryDeviceGroupInfo( uint32_t deviceIndexCount_ = 0, + const uint32_t* pDeviceIndices_ = nullptr, + uint32_t splitInstanceBindRegionCount_ = 0, + const Rect2D* pSplitInstanceBindRegions_ = nullptr ) + : deviceIndexCount( deviceIndexCount_ ) + , pDeviceIndices( pDeviceIndices_ ) + , splitInstanceBindRegionCount( splitInstanceBindRegionCount_ ) + , pSplitInstanceBindRegions( pSplitInstanceBindRegions_ ) + { + } + + BindImageMemoryDeviceGroupInfo( VkBindImageMemoryDeviceGroupInfo const & rhs ) + { + memcpy( this, &rhs, sizeof( BindImageMemoryDeviceGroupInfo ) ); + } + + BindImageMemoryDeviceGroupInfo& operator=( VkBindImageMemoryDeviceGroupInfo const & rhs ) + { + memcpy( this, &rhs, sizeof( BindImageMemoryDeviceGroupInfo ) ); + return *this; + } + BindImageMemoryDeviceGroupInfo& setPNext( const void* pNext_ ) + { + pNext = pNext_; + return *this; + } + + BindImageMemoryDeviceGroupInfo& setDeviceIndexCount( uint32_t deviceIndexCount_ ) + { + deviceIndexCount = deviceIndexCount_; + return *this; + } + + BindImageMemoryDeviceGroupInfo& setPDeviceIndices( const uint32_t* pDeviceIndices_ ) + { + pDeviceIndices = pDeviceIndices_; + return *this; + } + + BindImageMemoryDeviceGroupInfo& setSplitInstanceBindRegionCount( uint32_t splitInstanceBindRegionCount_ ) + { + splitInstanceBindRegionCount = splitInstanceBindRegionCount_; + return *this; + } + + BindImageMemoryDeviceGroupInfo& setPSplitInstanceBindRegions( const Rect2D* pSplitInstanceBindRegions_ ) + { + pSplitInstanceBindRegions = pSplitInstanceBindRegions_; + return *this; + } + + operator VkBindImageMemoryDeviceGroupInfo const&() const + { + return *reinterpret_cast(this); + } + + operator VkBindImageMemoryDeviceGroupInfo &() + { + return *reinterpret_cast(this); + } + + bool operator==( BindImageMemoryDeviceGroupInfo const& rhs ) const + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( deviceIndexCount == rhs.deviceIndexCount ) + && ( pDeviceIndices == rhs.pDeviceIndices ) + && ( splitInstanceBindRegionCount == rhs.splitInstanceBindRegionCount ) + && ( pSplitInstanceBindRegions == rhs.pSplitInstanceBindRegions ); + } + + bool operator!=( BindImageMemoryDeviceGroupInfo const& rhs ) const + { + return !operator==( rhs ); + } + + private: + StructureType sType = StructureType::eBindImageMemoryDeviceGroupInfo; + + public: + const void* pNext = nullptr; + uint32_t deviceIndexCount; + const uint32_t* pDeviceIndices; + uint32_t splitInstanceBindRegionCount; + const Rect2D* pSplitInstanceBindRegions; + }; + static_assert( sizeof( BindImageMemoryDeviceGroupInfo ) == sizeof( VkBindImageMemoryDeviceGroupInfo ), "struct and wrapper have different size!" ); + + using BindImageMemoryDeviceGroupInfoKHR = BindImageMemoryDeviceGroupInfo; + + struct DeviceGroupRenderPassBeginInfo + { + DeviceGroupRenderPassBeginInfo( uint32_t deviceMask_ = 0, + uint32_t deviceRenderAreaCount_ = 0, + const Rect2D* pDeviceRenderAreas_ = nullptr ) + : deviceMask( deviceMask_ ) + , deviceRenderAreaCount( deviceRenderAreaCount_ ) + , pDeviceRenderAreas( pDeviceRenderAreas_ ) + { + } + + DeviceGroupRenderPassBeginInfo( VkDeviceGroupRenderPassBeginInfo const & rhs ) + { + memcpy( this, &rhs, sizeof( DeviceGroupRenderPassBeginInfo ) ); + } + + DeviceGroupRenderPassBeginInfo& operator=( VkDeviceGroupRenderPassBeginInfo const & rhs ) + { + memcpy( this, &rhs, sizeof( DeviceGroupRenderPassBeginInfo ) ); + return *this; + } + DeviceGroupRenderPassBeginInfo& setPNext( const void* pNext_ ) + { + pNext = pNext_; + return *this; + } + + DeviceGroupRenderPassBeginInfo& setDeviceMask( uint32_t deviceMask_ ) + { + deviceMask = deviceMask_; + return *this; + } + + DeviceGroupRenderPassBeginInfo& setDeviceRenderAreaCount( uint32_t deviceRenderAreaCount_ ) + { + deviceRenderAreaCount = deviceRenderAreaCount_; + return *this; + } + + DeviceGroupRenderPassBeginInfo& setPDeviceRenderAreas( const Rect2D* pDeviceRenderAreas_ ) + { + pDeviceRenderAreas = pDeviceRenderAreas_; + return *this; + } + + operator VkDeviceGroupRenderPassBeginInfo const&() const + { + return *reinterpret_cast(this); + } + + operator VkDeviceGroupRenderPassBeginInfo &() + { + return *reinterpret_cast(this); + } + + bool operator==( DeviceGroupRenderPassBeginInfo const& rhs ) const + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( deviceMask == rhs.deviceMask ) + && ( deviceRenderAreaCount == rhs.deviceRenderAreaCount ) + && ( pDeviceRenderAreas == rhs.pDeviceRenderAreas ); + } + + bool operator!=( DeviceGroupRenderPassBeginInfo const& rhs ) const + { + return !operator==( rhs ); + } + + private: + StructureType sType = StructureType::eDeviceGroupRenderPassBeginInfo; + + public: + const void* pNext = nullptr; + uint32_t deviceMask; + uint32_t deviceRenderAreaCount; + const Rect2D* pDeviceRenderAreas; + }; + static_assert( sizeof( DeviceGroupRenderPassBeginInfo ) == sizeof( VkDeviceGroupRenderPassBeginInfo ), "struct and wrapper have different size!" ); + + using DeviceGroupRenderPassBeginInfoKHR = DeviceGroupRenderPassBeginInfo; + + struct DeviceGroupCommandBufferBeginInfo + { + DeviceGroupCommandBufferBeginInfo( uint32_t deviceMask_ = 0 ) + : deviceMask( deviceMask_ ) + { + } + + DeviceGroupCommandBufferBeginInfo( VkDeviceGroupCommandBufferBeginInfo const & rhs ) + { + memcpy( this, &rhs, sizeof( DeviceGroupCommandBufferBeginInfo ) ); + } + + DeviceGroupCommandBufferBeginInfo& operator=( VkDeviceGroupCommandBufferBeginInfo const & rhs ) + { + memcpy( this, &rhs, sizeof( DeviceGroupCommandBufferBeginInfo ) ); + return *this; + } + DeviceGroupCommandBufferBeginInfo& setPNext( const void* pNext_ ) + { + pNext = pNext_; + return *this; + } + + DeviceGroupCommandBufferBeginInfo& setDeviceMask( uint32_t deviceMask_ ) + { + deviceMask = deviceMask_; + return *this; + } + + operator VkDeviceGroupCommandBufferBeginInfo const&() const + { + return *reinterpret_cast(this); + } + + operator VkDeviceGroupCommandBufferBeginInfo &() + { + return *reinterpret_cast(this); + } + + bool operator==( DeviceGroupCommandBufferBeginInfo const& rhs ) const + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( deviceMask == rhs.deviceMask ); + } + + bool operator!=( DeviceGroupCommandBufferBeginInfo const& rhs ) const + { + return !operator==( rhs ); + } + + private: + StructureType sType = StructureType::eDeviceGroupCommandBufferBeginInfo; + + public: + const void* pNext = nullptr; + uint32_t deviceMask; + }; + static_assert( sizeof( DeviceGroupCommandBufferBeginInfo ) == sizeof( VkDeviceGroupCommandBufferBeginInfo ), "struct and wrapper have different size!" ); + + using DeviceGroupCommandBufferBeginInfoKHR = DeviceGroupCommandBufferBeginInfo; + + struct DeviceGroupSubmitInfo + { + DeviceGroupSubmitInfo( uint32_t waitSemaphoreCount_ = 0, + const uint32_t* pWaitSemaphoreDeviceIndices_ = nullptr, + uint32_t commandBufferCount_ = 0, + const uint32_t* pCommandBufferDeviceMasks_ = nullptr, + uint32_t signalSemaphoreCount_ = 0, + const uint32_t* pSignalSemaphoreDeviceIndices_ = nullptr ) + : waitSemaphoreCount( waitSemaphoreCount_ ) + , pWaitSemaphoreDeviceIndices( pWaitSemaphoreDeviceIndices_ ) + , commandBufferCount( commandBufferCount_ ) + , pCommandBufferDeviceMasks( pCommandBufferDeviceMasks_ ) + , signalSemaphoreCount( signalSemaphoreCount_ ) + , pSignalSemaphoreDeviceIndices( pSignalSemaphoreDeviceIndices_ ) + { + } + + DeviceGroupSubmitInfo( VkDeviceGroupSubmitInfo const & rhs ) + { + memcpy( this, &rhs, sizeof( DeviceGroupSubmitInfo ) ); + } + + DeviceGroupSubmitInfo& operator=( VkDeviceGroupSubmitInfo const & rhs ) + { + memcpy( this, &rhs, sizeof( DeviceGroupSubmitInfo ) ); + return *this; + } + DeviceGroupSubmitInfo& setPNext( const void* pNext_ ) + { + pNext = pNext_; + return *this; + } + + DeviceGroupSubmitInfo& setWaitSemaphoreCount( uint32_t waitSemaphoreCount_ ) + { + waitSemaphoreCount = waitSemaphoreCount_; + return *this; + } + + DeviceGroupSubmitInfo& setPWaitSemaphoreDeviceIndices( const uint32_t* pWaitSemaphoreDeviceIndices_ ) + { + pWaitSemaphoreDeviceIndices = pWaitSemaphoreDeviceIndices_; + return *this; + } + + DeviceGroupSubmitInfo& setCommandBufferCount( uint32_t commandBufferCount_ ) + { + commandBufferCount = commandBufferCount_; + return *this; + } + + DeviceGroupSubmitInfo& setPCommandBufferDeviceMasks( const uint32_t* pCommandBufferDeviceMasks_ ) + { + pCommandBufferDeviceMasks = pCommandBufferDeviceMasks_; + return *this; + } + + DeviceGroupSubmitInfo& setSignalSemaphoreCount( uint32_t signalSemaphoreCount_ ) + { + signalSemaphoreCount = signalSemaphoreCount_; + return *this; + } + + DeviceGroupSubmitInfo& setPSignalSemaphoreDeviceIndices( const uint32_t* pSignalSemaphoreDeviceIndices_ ) + { + pSignalSemaphoreDeviceIndices = pSignalSemaphoreDeviceIndices_; + return *this; + } + + operator VkDeviceGroupSubmitInfo const&() const + { + return *reinterpret_cast(this); + } + + operator VkDeviceGroupSubmitInfo &() + { + return *reinterpret_cast(this); + } + + bool operator==( DeviceGroupSubmitInfo const& rhs ) const + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( waitSemaphoreCount == rhs.waitSemaphoreCount ) + && ( pWaitSemaphoreDeviceIndices == rhs.pWaitSemaphoreDeviceIndices ) + && ( commandBufferCount == rhs.commandBufferCount ) + && ( pCommandBufferDeviceMasks == rhs.pCommandBufferDeviceMasks ) + && ( signalSemaphoreCount == rhs.signalSemaphoreCount ) + && ( pSignalSemaphoreDeviceIndices == rhs.pSignalSemaphoreDeviceIndices ); + } + + bool operator!=( DeviceGroupSubmitInfo const& rhs ) const + { + return !operator==( rhs ); + } + + private: + StructureType sType = StructureType::eDeviceGroupSubmitInfo; + + public: + const void* pNext = nullptr; + uint32_t waitSemaphoreCount; + const uint32_t* pWaitSemaphoreDeviceIndices; + uint32_t commandBufferCount; + const uint32_t* pCommandBufferDeviceMasks; + uint32_t signalSemaphoreCount; + const uint32_t* pSignalSemaphoreDeviceIndices; + }; + static_assert( sizeof( DeviceGroupSubmitInfo ) == sizeof( VkDeviceGroupSubmitInfo ), "struct and wrapper have different size!" ); + + using DeviceGroupSubmitInfoKHR = DeviceGroupSubmitInfo; + + struct DeviceGroupBindSparseInfo + { + DeviceGroupBindSparseInfo( uint32_t resourceDeviceIndex_ = 0, + uint32_t memoryDeviceIndex_ = 0 ) + : resourceDeviceIndex( resourceDeviceIndex_ ) + , memoryDeviceIndex( memoryDeviceIndex_ ) + { + } + + DeviceGroupBindSparseInfo( VkDeviceGroupBindSparseInfo const & rhs ) + { + memcpy( this, &rhs, sizeof( DeviceGroupBindSparseInfo ) ); + } + + DeviceGroupBindSparseInfo& operator=( VkDeviceGroupBindSparseInfo const & rhs ) + { + memcpy( this, &rhs, sizeof( DeviceGroupBindSparseInfo ) ); + return *this; + } + DeviceGroupBindSparseInfo& setPNext( const void* pNext_ ) + { + pNext = pNext_; + return *this; + } + + DeviceGroupBindSparseInfo& setResourceDeviceIndex( uint32_t resourceDeviceIndex_ ) + { + resourceDeviceIndex = resourceDeviceIndex_; + return *this; + } + + DeviceGroupBindSparseInfo& setMemoryDeviceIndex( uint32_t memoryDeviceIndex_ ) + { + memoryDeviceIndex = memoryDeviceIndex_; + return *this; + } + + operator VkDeviceGroupBindSparseInfo const&() const + { + return *reinterpret_cast(this); + } + + operator VkDeviceGroupBindSparseInfo &() + { + return *reinterpret_cast(this); + } + + bool operator==( DeviceGroupBindSparseInfo const& rhs ) const + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( resourceDeviceIndex == rhs.resourceDeviceIndex ) + && ( memoryDeviceIndex == rhs.memoryDeviceIndex ); + } + + bool operator!=( DeviceGroupBindSparseInfo const& rhs ) const + { + return !operator==( rhs ); + } + + private: + StructureType sType = StructureType::eDeviceGroupBindSparseInfo; + + public: + const void* pNext = nullptr; + uint32_t resourceDeviceIndex; + uint32_t memoryDeviceIndex; + }; + static_assert( sizeof( DeviceGroupBindSparseInfo ) == sizeof( VkDeviceGroupBindSparseInfo ), "struct and wrapper have different size!" ); + + using DeviceGroupBindSparseInfoKHR = DeviceGroupBindSparseInfo; + + struct ImageSwapchainCreateInfoKHR + { + ImageSwapchainCreateInfoKHR( SwapchainKHR swapchain_ = SwapchainKHR() ) + : swapchain( swapchain_ ) + { + } + + ImageSwapchainCreateInfoKHR( VkImageSwapchainCreateInfoKHR const & rhs ) + { + memcpy( this, &rhs, sizeof( ImageSwapchainCreateInfoKHR ) ); + } + + ImageSwapchainCreateInfoKHR& operator=( VkImageSwapchainCreateInfoKHR const & rhs ) + { + memcpy( this, &rhs, sizeof( ImageSwapchainCreateInfoKHR ) ); + return *this; + } + ImageSwapchainCreateInfoKHR& setPNext( const void* pNext_ ) + { + pNext = pNext_; + return *this; + } + + ImageSwapchainCreateInfoKHR& setSwapchain( SwapchainKHR swapchain_ ) + { + swapchain = swapchain_; + return *this; + } + + operator VkImageSwapchainCreateInfoKHR const&() const + { + return *reinterpret_cast(this); + } + + operator VkImageSwapchainCreateInfoKHR &() + { + return *reinterpret_cast(this); + } + + bool operator==( ImageSwapchainCreateInfoKHR const& rhs ) const + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( swapchain == rhs.swapchain ); + } + + bool operator!=( ImageSwapchainCreateInfoKHR const& rhs ) const + { + return !operator==( rhs ); + } + + private: + StructureType sType = StructureType::eImageSwapchainCreateInfoKHR; + + public: + const void* pNext = nullptr; + SwapchainKHR swapchain; + }; + static_assert( sizeof( ImageSwapchainCreateInfoKHR ) == sizeof( VkImageSwapchainCreateInfoKHR ), "struct and wrapper have different size!" ); + + struct BindImageMemorySwapchainInfoKHR + { + BindImageMemorySwapchainInfoKHR( SwapchainKHR swapchain_ = SwapchainKHR(), + uint32_t imageIndex_ = 0 ) + : swapchain( swapchain_ ) + , imageIndex( imageIndex_ ) + { + } + + BindImageMemorySwapchainInfoKHR( VkBindImageMemorySwapchainInfoKHR const & rhs ) + { + memcpy( this, &rhs, sizeof( BindImageMemorySwapchainInfoKHR ) ); + } + + BindImageMemorySwapchainInfoKHR& operator=( VkBindImageMemorySwapchainInfoKHR const & rhs ) + { + memcpy( this, &rhs, sizeof( BindImageMemorySwapchainInfoKHR ) ); + return *this; + } + BindImageMemorySwapchainInfoKHR& setPNext( const void* pNext_ ) + { + pNext = pNext_; + return *this; + } + + BindImageMemorySwapchainInfoKHR& setSwapchain( SwapchainKHR swapchain_ ) + { + swapchain = swapchain_; + return *this; + } + + BindImageMemorySwapchainInfoKHR& setImageIndex( uint32_t imageIndex_ ) + { + imageIndex = imageIndex_; + return *this; + } + + operator VkBindImageMemorySwapchainInfoKHR const&() const + { + return *reinterpret_cast(this); + } + + operator VkBindImageMemorySwapchainInfoKHR &() + { + return *reinterpret_cast(this); + } + + bool operator==( BindImageMemorySwapchainInfoKHR const& rhs ) const + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( swapchain == rhs.swapchain ) + && ( imageIndex == rhs.imageIndex ); + } + + bool operator!=( BindImageMemorySwapchainInfoKHR const& rhs ) const + { + return !operator==( rhs ); + } + + private: + StructureType sType = StructureType::eBindImageMemorySwapchainInfoKHR; + + public: + const void* pNext = nullptr; + SwapchainKHR swapchain; + uint32_t imageIndex; + }; + static_assert( sizeof( BindImageMemorySwapchainInfoKHR ) == sizeof( VkBindImageMemorySwapchainInfoKHR ), "struct and wrapper have different size!" ); + + struct AcquireNextImageInfoKHR + { + AcquireNextImageInfoKHR( SwapchainKHR swapchain_ = SwapchainKHR(), + uint64_t timeout_ = 0, + Semaphore semaphore_ = Semaphore(), + Fence fence_ = Fence(), + uint32_t deviceMask_ = 0 ) + : swapchain( swapchain_ ) + , timeout( timeout_ ) + , semaphore( semaphore_ ) + , fence( fence_ ) + , deviceMask( deviceMask_ ) + { + } + + AcquireNextImageInfoKHR( VkAcquireNextImageInfoKHR const & rhs ) + { + memcpy( this, &rhs, sizeof( AcquireNextImageInfoKHR ) ); + } + + AcquireNextImageInfoKHR& operator=( VkAcquireNextImageInfoKHR const & rhs ) + { + memcpy( this, &rhs, sizeof( AcquireNextImageInfoKHR ) ); + return *this; + } + AcquireNextImageInfoKHR& setPNext( const void* pNext_ ) + { + pNext = pNext_; + return *this; + } + + AcquireNextImageInfoKHR& setSwapchain( SwapchainKHR swapchain_ ) + { + swapchain = swapchain_; + return *this; + } + + AcquireNextImageInfoKHR& setTimeout( uint64_t timeout_ ) + { + timeout = timeout_; + return *this; + } + + AcquireNextImageInfoKHR& setSemaphore( Semaphore semaphore_ ) + { + semaphore = semaphore_; + return *this; + } + + AcquireNextImageInfoKHR& setFence( Fence fence_ ) + { + fence = fence_; + return *this; + } + + AcquireNextImageInfoKHR& setDeviceMask( uint32_t deviceMask_ ) + { + deviceMask = deviceMask_; + return *this; + } + + operator VkAcquireNextImageInfoKHR const&() const + { + return *reinterpret_cast(this); + } + + operator VkAcquireNextImageInfoKHR &() + { + return *reinterpret_cast(this); + } + + bool operator==( AcquireNextImageInfoKHR const& rhs ) const + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( swapchain == rhs.swapchain ) + && ( timeout == rhs.timeout ) + && ( semaphore == rhs.semaphore ) + && ( fence == rhs.fence ) + && ( deviceMask == rhs.deviceMask ); + } + + bool operator!=( AcquireNextImageInfoKHR const& rhs ) const + { + return !operator==( rhs ); + } + + private: + StructureType sType = StructureType::eAcquireNextImageInfoKHR; + + public: + const void* pNext = nullptr; + SwapchainKHR swapchain; + uint64_t timeout; + Semaphore semaphore; + Fence fence; + uint32_t deviceMask; + }; + static_assert( sizeof( AcquireNextImageInfoKHR ) == sizeof( VkAcquireNextImageInfoKHR ), "struct and wrapper have different size!" ); + + struct HdrMetadataEXT + { + HdrMetadataEXT( XYColorEXT displayPrimaryRed_ = XYColorEXT(), + XYColorEXT displayPrimaryGreen_ = XYColorEXT(), + XYColorEXT displayPrimaryBlue_ = XYColorEXT(), + XYColorEXT whitePoint_ = XYColorEXT(), + float maxLuminance_ = 0, + float minLuminance_ = 0, + float maxContentLightLevel_ = 0, + float maxFrameAverageLightLevel_ = 0 ) + : displayPrimaryRed( displayPrimaryRed_ ) + , displayPrimaryGreen( displayPrimaryGreen_ ) + , displayPrimaryBlue( displayPrimaryBlue_ ) + , whitePoint( whitePoint_ ) + , maxLuminance( maxLuminance_ ) + , minLuminance( minLuminance_ ) + , maxContentLightLevel( maxContentLightLevel_ ) + , maxFrameAverageLightLevel( maxFrameAverageLightLevel_ ) + { + } + + HdrMetadataEXT( VkHdrMetadataEXT const & rhs ) + { + memcpy( this, &rhs, sizeof( HdrMetadataEXT ) ); + } + + HdrMetadataEXT& operator=( VkHdrMetadataEXT const & rhs ) + { + memcpy( this, &rhs, sizeof( HdrMetadataEXT ) ); + return *this; + } + HdrMetadataEXT& setPNext( const void* pNext_ ) + { + pNext = pNext_; + return *this; + } + + HdrMetadataEXT& setDisplayPrimaryRed( XYColorEXT displayPrimaryRed_ ) + { + displayPrimaryRed = displayPrimaryRed_; + return *this; + } + + HdrMetadataEXT& setDisplayPrimaryGreen( XYColorEXT displayPrimaryGreen_ ) + { + displayPrimaryGreen = displayPrimaryGreen_; + return *this; + } + + HdrMetadataEXT& setDisplayPrimaryBlue( XYColorEXT displayPrimaryBlue_ ) + { + displayPrimaryBlue = displayPrimaryBlue_; + return *this; + } + + HdrMetadataEXT& setWhitePoint( XYColorEXT whitePoint_ ) + { + whitePoint = whitePoint_; + return *this; + } + + HdrMetadataEXT& setMaxLuminance( float maxLuminance_ ) + { + maxLuminance = maxLuminance_; + return *this; + } + + HdrMetadataEXT& setMinLuminance( float minLuminance_ ) + { + minLuminance = minLuminance_; + return *this; + } + + HdrMetadataEXT& setMaxContentLightLevel( float maxContentLightLevel_ ) + { + maxContentLightLevel = maxContentLightLevel_; + return *this; + } + + HdrMetadataEXT& setMaxFrameAverageLightLevel( float maxFrameAverageLightLevel_ ) + { + maxFrameAverageLightLevel = maxFrameAverageLightLevel_; + return *this; + } + + operator VkHdrMetadataEXT const&() const + { + return *reinterpret_cast(this); + } + + operator VkHdrMetadataEXT &() + { + return *reinterpret_cast(this); + } + + bool operator==( HdrMetadataEXT const& rhs ) const + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( displayPrimaryRed == rhs.displayPrimaryRed ) + && ( displayPrimaryGreen == rhs.displayPrimaryGreen ) + && ( displayPrimaryBlue == rhs.displayPrimaryBlue ) + && ( whitePoint == rhs.whitePoint ) + && ( maxLuminance == rhs.maxLuminance ) + && ( minLuminance == rhs.minLuminance ) + && ( maxContentLightLevel == rhs.maxContentLightLevel ) + && ( maxFrameAverageLightLevel == rhs.maxFrameAverageLightLevel ); + } + + bool operator!=( HdrMetadataEXT const& rhs ) const + { + return !operator==( rhs ); + } + + private: + StructureType sType = StructureType::eHdrMetadataEXT; + + public: + const void* pNext = nullptr; + XYColorEXT displayPrimaryRed; + XYColorEXT displayPrimaryGreen; + XYColorEXT displayPrimaryBlue; + XYColorEXT whitePoint; + float maxLuminance; + float minLuminance; + float maxContentLightLevel; + float maxFrameAverageLightLevel; + }; + static_assert( sizeof( HdrMetadataEXT ) == sizeof( VkHdrMetadataEXT ), "struct and wrapper have different size!" ); + + struct PresentTimesInfoGOOGLE + { + PresentTimesInfoGOOGLE( uint32_t swapchainCount_ = 0, + const PresentTimeGOOGLE* pTimes_ = nullptr ) + : swapchainCount( swapchainCount_ ) + , pTimes( pTimes_ ) + { + } + + PresentTimesInfoGOOGLE( VkPresentTimesInfoGOOGLE const & rhs ) + { + memcpy( this, &rhs, sizeof( PresentTimesInfoGOOGLE ) ); + } + + PresentTimesInfoGOOGLE& operator=( VkPresentTimesInfoGOOGLE const & rhs ) + { + memcpy( this, &rhs, sizeof( PresentTimesInfoGOOGLE ) ); + return *this; + } + PresentTimesInfoGOOGLE& setPNext( const void* pNext_ ) + { + pNext = pNext_; + return *this; + } + + PresentTimesInfoGOOGLE& setSwapchainCount( uint32_t swapchainCount_ ) + { + swapchainCount = swapchainCount_; + return *this; + } + + PresentTimesInfoGOOGLE& setPTimes( const PresentTimeGOOGLE* pTimes_ ) + { + pTimes = pTimes_; + return *this; + } + + operator VkPresentTimesInfoGOOGLE const&() const + { + return *reinterpret_cast(this); + } + + operator VkPresentTimesInfoGOOGLE &() + { + return *reinterpret_cast(this); + } + + bool operator==( PresentTimesInfoGOOGLE const& rhs ) const + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( swapchainCount == rhs.swapchainCount ) + && ( pTimes == rhs.pTimes ); + } + + bool operator!=( PresentTimesInfoGOOGLE const& rhs ) const + { + return !operator==( rhs ); + } + + private: + StructureType sType = StructureType::ePresentTimesInfoGOOGLE; + + public: + const void* pNext = nullptr; + uint32_t swapchainCount; + const PresentTimeGOOGLE* pTimes; + }; + static_assert( sizeof( PresentTimesInfoGOOGLE ) == sizeof( VkPresentTimesInfoGOOGLE ), "struct and wrapper have different size!" ); + +#ifdef VK_USE_PLATFORM_IOS_MVK + struct IOSSurfaceCreateInfoMVK + { + IOSSurfaceCreateInfoMVK( IOSSurfaceCreateFlagsMVK flags_ = IOSSurfaceCreateFlagsMVK(), + const void* pView_ = nullptr ) + : flags( flags_ ) + , pView( pView_ ) + { + } + + IOSSurfaceCreateInfoMVK( VkIOSSurfaceCreateInfoMVK const & rhs ) + { + memcpy( this, &rhs, sizeof( IOSSurfaceCreateInfoMVK ) ); + } + + IOSSurfaceCreateInfoMVK& operator=( VkIOSSurfaceCreateInfoMVK const & rhs ) + { + memcpy( this, &rhs, sizeof( IOSSurfaceCreateInfoMVK ) ); + return *this; + } + IOSSurfaceCreateInfoMVK& setPNext( const void* pNext_ ) + { + pNext = pNext_; + return *this; + } + + IOSSurfaceCreateInfoMVK& setFlags( IOSSurfaceCreateFlagsMVK flags_ ) + { + flags = flags_; + return *this; + } + + IOSSurfaceCreateInfoMVK& setPView( const void* pView_ ) + { + pView = pView_; + return *this; + } + + operator VkIOSSurfaceCreateInfoMVK const&() const + { + return *reinterpret_cast(this); + } + + operator VkIOSSurfaceCreateInfoMVK &() + { + return *reinterpret_cast(this); + } + + bool operator==( IOSSurfaceCreateInfoMVK const& rhs ) const + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( flags == rhs.flags ) + && ( pView == rhs.pView ); + } + + bool operator!=( IOSSurfaceCreateInfoMVK const& rhs ) const + { + return !operator==( rhs ); + } + + private: + StructureType sType = StructureType::eIosSurfaceCreateInfoMVK; + + public: + const void* pNext = nullptr; + IOSSurfaceCreateFlagsMVK flags; + const void* pView; + }; + static_assert( sizeof( IOSSurfaceCreateInfoMVK ) == sizeof( VkIOSSurfaceCreateInfoMVK ), "struct and wrapper have different size!" ); +#endif /*VK_USE_PLATFORM_IOS_MVK*/ + +#ifdef VK_USE_PLATFORM_MACOS_MVK + struct MacOSSurfaceCreateInfoMVK + { + MacOSSurfaceCreateInfoMVK( MacOSSurfaceCreateFlagsMVK flags_ = MacOSSurfaceCreateFlagsMVK(), + const void* pView_ = nullptr ) + : flags( flags_ ) + , pView( pView_ ) + { + } + + MacOSSurfaceCreateInfoMVK( VkMacOSSurfaceCreateInfoMVK const & rhs ) + { + memcpy( this, &rhs, sizeof( MacOSSurfaceCreateInfoMVK ) ); + } + + MacOSSurfaceCreateInfoMVK& operator=( VkMacOSSurfaceCreateInfoMVK const & rhs ) + { + memcpy( this, &rhs, sizeof( MacOSSurfaceCreateInfoMVK ) ); + return *this; + } + MacOSSurfaceCreateInfoMVK& setPNext( const void* pNext_ ) + { + pNext = pNext_; + return *this; + } + + MacOSSurfaceCreateInfoMVK& setFlags( MacOSSurfaceCreateFlagsMVK flags_ ) + { + flags = flags_; + return *this; + } + + MacOSSurfaceCreateInfoMVK& setPView( const void* pView_ ) + { + pView = pView_; + return *this; + } + + operator VkMacOSSurfaceCreateInfoMVK const&() const + { + return *reinterpret_cast(this); + } + + operator VkMacOSSurfaceCreateInfoMVK &() + { + return *reinterpret_cast(this); + } + + bool operator==( MacOSSurfaceCreateInfoMVK const& rhs ) const + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( flags == rhs.flags ) + && ( pView == rhs.pView ); + } + + bool operator!=( MacOSSurfaceCreateInfoMVK const& rhs ) const + { + return !operator==( rhs ); + } + + private: + StructureType sType = StructureType::eMacosSurfaceCreateInfoMVK; + + public: + const void* pNext = nullptr; + MacOSSurfaceCreateFlagsMVK flags; + const void* pView; + }; + static_assert( sizeof( MacOSSurfaceCreateInfoMVK ) == sizeof( VkMacOSSurfaceCreateInfoMVK ), "struct and wrapper have different size!" ); +#endif /*VK_USE_PLATFORM_MACOS_MVK*/ + + struct PipelineViewportWScalingStateCreateInfoNV + { + PipelineViewportWScalingStateCreateInfoNV( Bool32 viewportWScalingEnable_ = 0, + uint32_t viewportCount_ = 0, + const ViewportWScalingNV* pViewportWScalings_ = nullptr ) + : viewportWScalingEnable( viewportWScalingEnable_ ) + , viewportCount( viewportCount_ ) + , pViewportWScalings( pViewportWScalings_ ) + { + } + + PipelineViewportWScalingStateCreateInfoNV( VkPipelineViewportWScalingStateCreateInfoNV const & rhs ) + { + memcpy( this, &rhs, sizeof( PipelineViewportWScalingStateCreateInfoNV ) ); + } + + PipelineViewportWScalingStateCreateInfoNV& operator=( VkPipelineViewportWScalingStateCreateInfoNV const & rhs ) + { + memcpy( this, &rhs, sizeof( PipelineViewportWScalingStateCreateInfoNV ) ); + return *this; + } + PipelineViewportWScalingStateCreateInfoNV& setPNext( const void* pNext_ ) + { + pNext = pNext_; + return *this; + } + + PipelineViewportWScalingStateCreateInfoNV& setViewportWScalingEnable( Bool32 viewportWScalingEnable_ ) + { + viewportWScalingEnable = viewportWScalingEnable_; + return *this; + } + + PipelineViewportWScalingStateCreateInfoNV& setViewportCount( uint32_t viewportCount_ ) + { + viewportCount = viewportCount_; + return *this; + } + + PipelineViewportWScalingStateCreateInfoNV& setPViewportWScalings( const ViewportWScalingNV* pViewportWScalings_ ) + { + pViewportWScalings = pViewportWScalings_; + return *this; + } + + operator VkPipelineViewportWScalingStateCreateInfoNV const&() const + { + return *reinterpret_cast(this); + } + + operator VkPipelineViewportWScalingStateCreateInfoNV &() + { + return *reinterpret_cast(this); + } + + bool operator==( PipelineViewportWScalingStateCreateInfoNV const& rhs ) const + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( viewportWScalingEnable == rhs.viewportWScalingEnable ) + && ( viewportCount == rhs.viewportCount ) + && ( pViewportWScalings == rhs.pViewportWScalings ); + } + + bool operator!=( PipelineViewportWScalingStateCreateInfoNV const& rhs ) const + { + return !operator==( rhs ); + } + + private: + StructureType sType = StructureType::ePipelineViewportWScalingStateCreateInfoNV; + + public: + const void* pNext = nullptr; + Bool32 viewportWScalingEnable; + uint32_t viewportCount; + const ViewportWScalingNV* pViewportWScalings; + }; + static_assert( sizeof( PipelineViewportWScalingStateCreateInfoNV ) == sizeof( VkPipelineViewportWScalingStateCreateInfoNV ), "struct and wrapper have different size!" ); + + struct PhysicalDeviceDiscardRectanglePropertiesEXT + { + PhysicalDeviceDiscardRectanglePropertiesEXT( uint32_t maxDiscardRectangles_ = 0 ) + : maxDiscardRectangles( maxDiscardRectangles_ ) + { + } + + PhysicalDeviceDiscardRectanglePropertiesEXT( VkPhysicalDeviceDiscardRectanglePropertiesEXT const & rhs ) + { + memcpy( this, &rhs, sizeof( PhysicalDeviceDiscardRectanglePropertiesEXT ) ); + } + + PhysicalDeviceDiscardRectanglePropertiesEXT& operator=( VkPhysicalDeviceDiscardRectanglePropertiesEXT const & rhs ) + { + memcpy( this, &rhs, sizeof( PhysicalDeviceDiscardRectanglePropertiesEXT ) ); + return *this; + } + PhysicalDeviceDiscardRectanglePropertiesEXT& setPNext( void* pNext_ ) + { + pNext = pNext_; + return *this; + } + + PhysicalDeviceDiscardRectanglePropertiesEXT& setMaxDiscardRectangles( uint32_t maxDiscardRectangles_ ) + { + maxDiscardRectangles = maxDiscardRectangles_; + return *this; + } + + operator VkPhysicalDeviceDiscardRectanglePropertiesEXT const&() const + { + return *reinterpret_cast(this); + } + + operator VkPhysicalDeviceDiscardRectanglePropertiesEXT &() + { + return *reinterpret_cast(this); + } + + bool operator==( PhysicalDeviceDiscardRectanglePropertiesEXT const& rhs ) const + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( maxDiscardRectangles == rhs.maxDiscardRectangles ); + } + + bool operator!=( PhysicalDeviceDiscardRectanglePropertiesEXT const& rhs ) const + { + return !operator==( rhs ); + } + + private: + StructureType sType = StructureType::ePhysicalDeviceDiscardRectanglePropertiesEXT; + + public: + void* pNext = nullptr; + uint32_t maxDiscardRectangles; + }; + static_assert( sizeof( PhysicalDeviceDiscardRectanglePropertiesEXT ) == sizeof( VkPhysicalDeviceDiscardRectanglePropertiesEXT ), "struct and wrapper have different size!" ); + + struct PhysicalDeviceMultiviewPerViewAttributesPropertiesNVX + { + operator VkPhysicalDeviceMultiviewPerViewAttributesPropertiesNVX const&() const + { + return *reinterpret_cast(this); + } + + operator VkPhysicalDeviceMultiviewPerViewAttributesPropertiesNVX &() + { + return *reinterpret_cast(this); + } + + bool operator==( PhysicalDeviceMultiviewPerViewAttributesPropertiesNVX const& rhs ) const + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( perViewPositionAllComponents == rhs.perViewPositionAllComponents ); + } + + bool operator!=( PhysicalDeviceMultiviewPerViewAttributesPropertiesNVX const& rhs ) const + { + return !operator==( rhs ); + } + + private: + StructureType sType = StructureType::ePhysicalDeviceMultiviewPerViewAttributesPropertiesNVX; + + public: + void* pNext = nullptr; + Bool32 perViewPositionAllComponents; + }; + static_assert( sizeof( PhysicalDeviceMultiviewPerViewAttributesPropertiesNVX ) == sizeof( VkPhysicalDeviceMultiviewPerViewAttributesPropertiesNVX ), "struct and wrapper have different size!" ); + + struct PhysicalDeviceSurfaceInfo2KHR + { + PhysicalDeviceSurfaceInfo2KHR( SurfaceKHR surface_ = SurfaceKHR() ) + : surface( surface_ ) + { + } + + PhysicalDeviceSurfaceInfo2KHR( VkPhysicalDeviceSurfaceInfo2KHR const & rhs ) + { + memcpy( this, &rhs, sizeof( PhysicalDeviceSurfaceInfo2KHR ) ); + } + + PhysicalDeviceSurfaceInfo2KHR& operator=( VkPhysicalDeviceSurfaceInfo2KHR const & rhs ) + { + memcpy( this, &rhs, sizeof( PhysicalDeviceSurfaceInfo2KHR ) ); + return *this; + } + PhysicalDeviceSurfaceInfo2KHR& setPNext( const void* pNext_ ) + { + pNext = pNext_; + return *this; + } + + PhysicalDeviceSurfaceInfo2KHR& setSurface( SurfaceKHR surface_ ) + { + surface = surface_; + return *this; + } + + operator VkPhysicalDeviceSurfaceInfo2KHR const&() const + { + return *reinterpret_cast(this); + } + + operator VkPhysicalDeviceSurfaceInfo2KHR &() + { + return *reinterpret_cast(this); + } + + bool operator==( PhysicalDeviceSurfaceInfo2KHR const& rhs ) const + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( surface == rhs.surface ); + } + + bool operator!=( PhysicalDeviceSurfaceInfo2KHR const& rhs ) const + { + return !operator==( rhs ); + } + + private: + StructureType sType = StructureType::ePhysicalDeviceSurfaceInfo2KHR; + + public: + const void* pNext = nullptr; + SurfaceKHR surface; + }; + static_assert( sizeof( PhysicalDeviceSurfaceInfo2KHR ) == sizeof( VkPhysicalDeviceSurfaceInfo2KHR ), "struct and wrapper have different size!" ); + + struct DisplayPlaneProperties2KHR + { + operator VkDisplayPlaneProperties2KHR const&() const + { + return *reinterpret_cast(this); + } + + operator VkDisplayPlaneProperties2KHR &() + { + return *reinterpret_cast(this); + } + + bool operator==( DisplayPlaneProperties2KHR const& rhs ) const + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( displayPlaneProperties == rhs.displayPlaneProperties ); + } + + bool operator!=( DisplayPlaneProperties2KHR const& rhs ) const + { + return !operator==( rhs ); + } + + private: + StructureType sType = StructureType::eDisplayPlaneProperties2KHR; + + public: + void* pNext = nullptr; + DisplayPlanePropertiesKHR displayPlaneProperties; + }; + static_assert( sizeof( DisplayPlaneProperties2KHR ) == sizeof( VkDisplayPlaneProperties2KHR ), "struct and wrapper have different size!" ); + + struct DisplayModeProperties2KHR + { + operator VkDisplayModeProperties2KHR const&() const + { + return *reinterpret_cast(this); + } + + operator VkDisplayModeProperties2KHR &() + { + return *reinterpret_cast(this); + } + + bool operator==( DisplayModeProperties2KHR const& rhs ) const + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( displayModeProperties == rhs.displayModeProperties ); + } + + bool operator!=( DisplayModeProperties2KHR const& rhs ) const + { + return !operator==( rhs ); + } + + private: + StructureType sType = StructureType::eDisplayModeProperties2KHR; + + public: + void* pNext = nullptr; + DisplayModePropertiesKHR displayModeProperties; + }; + static_assert( sizeof( DisplayModeProperties2KHR ) == sizeof( VkDisplayModeProperties2KHR ), "struct and wrapper have different size!" ); + + struct DisplayPlaneInfo2KHR + { + DisplayPlaneInfo2KHR( DisplayModeKHR mode_ = DisplayModeKHR(), + uint32_t planeIndex_ = 0 ) + : mode( mode_ ) + , planeIndex( planeIndex_ ) + { + } + + DisplayPlaneInfo2KHR( VkDisplayPlaneInfo2KHR const & rhs ) + { + memcpy( this, &rhs, sizeof( DisplayPlaneInfo2KHR ) ); + } + + DisplayPlaneInfo2KHR& operator=( VkDisplayPlaneInfo2KHR const & rhs ) + { + memcpy( this, &rhs, sizeof( DisplayPlaneInfo2KHR ) ); + return *this; + } + DisplayPlaneInfo2KHR& setPNext( const void* pNext_ ) + { + pNext = pNext_; + return *this; + } + + DisplayPlaneInfo2KHR& setMode( DisplayModeKHR mode_ ) + { + mode = mode_; + return *this; + } + + DisplayPlaneInfo2KHR& setPlaneIndex( uint32_t planeIndex_ ) + { + planeIndex = planeIndex_; + return *this; + } + + operator VkDisplayPlaneInfo2KHR const&() const + { + return *reinterpret_cast(this); + } + + operator VkDisplayPlaneInfo2KHR &() + { + return *reinterpret_cast(this); + } + + bool operator==( DisplayPlaneInfo2KHR const& rhs ) const + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( mode == rhs.mode ) + && ( planeIndex == rhs.planeIndex ); + } + + bool operator!=( DisplayPlaneInfo2KHR const& rhs ) const + { + return !operator==( rhs ); + } + + private: + StructureType sType = StructureType::eDisplayPlaneInfo2KHR; + + public: + const void* pNext = nullptr; + DisplayModeKHR mode; + uint32_t planeIndex; + }; + static_assert( sizeof( DisplayPlaneInfo2KHR ) == sizeof( VkDisplayPlaneInfo2KHR ), "struct and wrapper have different size!" ); + + struct PhysicalDevice16BitStorageFeatures + { + PhysicalDevice16BitStorageFeatures( Bool32 storageBuffer16BitAccess_ = 0, + Bool32 uniformAndStorageBuffer16BitAccess_ = 0, + Bool32 storagePushConstant16_ = 0, + Bool32 storageInputOutput16_ = 0 ) + : storageBuffer16BitAccess( storageBuffer16BitAccess_ ) + , uniformAndStorageBuffer16BitAccess( uniformAndStorageBuffer16BitAccess_ ) + , storagePushConstant16( storagePushConstant16_ ) + , storageInputOutput16( storageInputOutput16_ ) + { + } + + PhysicalDevice16BitStorageFeatures( VkPhysicalDevice16BitStorageFeatures const & rhs ) + { + memcpy( this, &rhs, sizeof( PhysicalDevice16BitStorageFeatures ) ); + } + + PhysicalDevice16BitStorageFeatures& operator=( VkPhysicalDevice16BitStorageFeatures const & rhs ) + { + memcpy( this, &rhs, sizeof( PhysicalDevice16BitStorageFeatures ) ); + return *this; + } + PhysicalDevice16BitStorageFeatures& setPNext( void* pNext_ ) + { + pNext = pNext_; + return *this; + } + + PhysicalDevice16BitStorageFeatures& setStorageBuffer16BitAccess( Bool32 storageBuffer16BitAccess_ ) + { + storageBuffer16BitAccess = storageBuffer16BitAccess_; + return *this; + } + + PhysicalDevice16BitStorageFeatures& setUniformAndStorageBuffer16BitAccess( Bool32 uniformAndStorageBuffer16BitAccess_ ) + { + uniformAndStorageBuffer16BitAccess = uniformAndStorageBuffer16BitAccess_; + return *this; + } + + PhysicalDevice16BitStorageFeatures& setStoragePushConstant16( Bool32 storagePushConstant16_ ) + { + storagePushConstant16 = storagePushConstant16_; + return *this; + } + + PhysicalDevice16BitStorageFeatures& setStorageInputOutput16( Bool32 storageInputOutput16_ ) + { + storageInputOutput16 = storageInputOutput16_; + return *this; + } + + operator VkPhysicalDevice16BitStorageFeatures const&() const + { + return *reinterpret_cast(this); + } + + operator VkPhysicalDevice16BitStorageFeatures &() + { + return *reinterpret_cast(this); + } + + bool operator==( PhysicalDevice16BitStorageFeatures const& rhs ) const + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( storageBuffer16BitAccess == rhs.storageBuffer16BitAccess ) + && ( uniformAndStorageBuffer16BitAccess == rhs.uniformAndStorageBuffer16BitAccess ) + && ( storagePushConstant16 == rhs.storagePushConstant16 ) + && ( storageInputOutput16 == rhs.storageInputOutput16 ); + } + + bool operator!=( PhysicalDevice16BitStorageFeatures const& rhs ) const + { + return !operator==( rhs ); + } + + private: + StructureType sType = StructureType::ePhysicalDevice16BitStorageFeatures; + + public: + void* pNext = nullptr; + Bool32 storageBuffer16BitAccess; + Bool32 uniformAndStorageBuffer16BitAccess; + Bool32 storagePushConstant16; + Bool32 storageInputOutput16; + }; + static_assert( sizeof( PhysicalDevice16BitStorageFeatures ) == sizeof( VkPhysicalDevice16BitStorageFeatures ), "struct and wrapper have different size!" ); + + using PhysicalDevice16BitStorageFeaturesKHR = PhysicalDevice16BitStorageFeatures; + + struct BufferMemoryRequirementsInfo2 + { + BufferMemoryRequirementsInfo2( Buffer buffer_ = Buffer() ) + : buffer( buffer_ ) + { + } + + BufferMemoryRequirementsInfo2( VkBufferMemoryRequirementsInfo2 const & rhs ) + { + memcpy( this, &rhs, sizeof( BufferMemoryRequirementsInfo2 ) ); + } + + BufferMemoryRequirementsInfo2& operator=( VkBufferMemoryRequirementsInfo2 const & rhs ) + { + memcpy( this, &rhs, sizeof( BufferMemoryRequirementsInfo2 ) ); + return *this; + } + BufferMemoryRequirementsInfo2& setPNext( const void* pNext_ ) + { + pNext = pNext_; + return *this; + } + + BufferMemoryRequirementsInfo2& setBuffer( Buffer buffer_ ) + { + buffer = buffer_; + return *this; + } + + operator VkBufferMemoryRequirementsInfo2 const&() const + { + return *reinterpret_cast(this); + } + + operator VkBufferMemoryRequirementsInfo2 &() + { + return *reinterpret_cast(this); + } + + bool operator==( BufferMemoryRequirementsInfo2 const& rhs ) const + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( buffer == rhs.buffer ); + } + + bool operator!=( BufferMemoryRequirementsInfo2 const& rhs ) const + { + return !operator==( rhs ); + } + + private: + StructureType sType = StructureType::eBufferMemoryRequirementsInfo2; + + public: + const void* pNext = nullptr; + Buffer buffer; + }; + static_assert( sizeof( BufferMemoryRequirementsInfo2 ) == sizeof( VkBufferMemoryRequirementsInfo2 ), "struct and wrapper have different size!" ); + + using BufferMemoryRequirementsInfo2KHR = BufferMemoryRequirementsInfo2; + + struct ImageMemoryRequirementsInfo2 + { + ImageMemoryRequirementsInfo2( Image image_ = Image() ) + : image( image_ ) + { + } + + ImageMemoryRequirementsInfo2( VkImageMemoryRequirementsInfo2 const & rhs ) + { + memcpy( this, &rhs, sizeof( ImageMemoryRequirementsInfo2 ) ); + } + + ImageMemoryRequirementsInfo2& operator=( VkImageMemoryRequirementsInfo2 const & rhs ) + { + memcpy( this, &rhs, sizeof( ImageMemoryRequirementsInfo2 ) ); + return *this; + } + ImageMemoryRequirementsInfo2& setPNext( const void* pNext_ ) + { + pNext = pNext_; + return *this; + } + + ImageMemoryRequirementsInfo2& setImage( Image image_ ) + { + image = image_; + return *this; + } + + operator VkImageMemoryRequirementsInfo2 const&() const + { + return *reinterpret_cast(this); + } + + operator VkImageMemoryRequirementsInfo2 &() + { + return *reinterpret_cast(this); + } + + bool operator==( ImageMemoryRequirementsInfo2 const& rhs ) const + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( image == rhs.image ); + } + + bool operator!=( ImageMemoryRequirementsInfo2 const& rhs ) const + { + return !operator==( rhs ); + } + + private: + StructureType sType = StructureType::eImageMemoryRequirementsInfo2; + + public: + const void* pNext = nullptr; + Image image; + }; + static_assert( sizeof( ImageMemoryRequirementsInfo2 ) == sizeof( VkImageMemoryRequirementsInfo2 ), "struct and wrapper have different size!" ); + + using ImageMemoryRequirementsInfo2KHR = ImageMemoryRequirementsInfo2; + + struct ImageSparseMemoryRequirementsInfo2 + { + ImageSparseMemoryRequirementsInfo2( Image image_ = Image() ) + : image( image_ ) + { + } + + ImageSparseMemoryRequirementsInfo2( VkImageSparseMemoryRequirementsInfo2 const & rhs ) + { + memcpy( this, &rhs, sizeof( ImageSparseMemoryRequirementsInfo2 ) ); + } + + ImageSparseMemoryRequirementsInfo2& operator=( VkImageSparseMemoryRequirementsInfo2 const & rhs ) + { + memcpy( this, &rhs, sizeof( ImageSparseMemoryRequirementsInfo2 ) ); + return *this; + } + ImageSparseMemoryRequirementsInfo2& setPNext( const void* pNext_ ) + { + pNext = pNext_; + return *this; + } + + ImageSparseMemoryRequirementsInfo2& setImage( Image image_ ) + { + image = image_; + return *this; + } + + operator VkImageSparseMemoryRequirementsInfo2 const&() const + { + return *reinterpret_cast(this); + } + + operator VkImageSparseMemoryRequirementsInfo2 &() + { + return *reinterpret_cast(this); + } + + bool operator==( ImageSparseMemoryRequirementsInfo2 const& rhs ) const + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( image == rhs.image ); + } + + bool operator!=( ImageSparseMemoryRequirementsInfo2 const& rhs ) const + { + return !operator==( rhs ); + } + + private: + StructureType sType = StructureType::eImageSparseMemoryRequirementsInfo2; + + public: + const void* pNext = nullptr; + Image image; + }; + static_assert( sizeof( ImageSparseMemoryRequirementsInfo2 ) == sizeof( VkImageSparseMemoryRequirementsInfo2 ), "struct and wrapper have different size!" ); + + using ImageSparseMemoryRequirementsInfo2KHR = ImageSparseMemoryRequirementsInfo2; + + struct MemoryRequirements2 + { + operator VkMemoryRequirements2 const&() const + { + return *reinterpret_cast(this); + } + + operator VkMemoryRequirements2 &() + { + return *reinterpret_cast(this); + } + + bool operator==( MemoryRequirements2 const& rhs ) const + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( memoryRequirements == rhs.memoryRequirements ); + } + + bool operator!=( MemoryRequirements2 const& rhs ) const + { + return !operator==( rhs ); + } + + private: + StructureType sType = StructureType::eMemoryRequirements2; + + public: + void* pNext = nullptr; + MemoryRequirements memoryRequirements; + }; + static_assert( sizeof( MemoryRequirements2 ) == sizeof( VkMemoryRequirements2 ), "struct and wrapper have different size!" ); + + using MemoryRequirements2KHR = MemoryRequirements2; + + struct MemoryDedicatedRequirements + { + operator VkMemoryDedicatedRequirements const&() const + { + return *reinterpret_cast(this); + } + + operator VkMemoryDedicatedRequirements &() + { + return *reinterpret_cast(this); + } + + bool operator==( MemoryDedicatedRequirements const& rhs ) const + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( prefersDedicatedAllocation == rhs.prefersDedicatedAllocation ) + && ( requiresDedicatedAllocation == rhs.requiresDedicatedAllocation ); + } + + bool operator!=( MemoryDedicatedRequirements const& rhs ) const + { + return !operator==( rhs ); + } + + private: + StructureType sType = StructureType::eMemoryDedicatedRequirements; + + public: + void* pNext = nullptr; + Bool32 prefersDedicatedAllocation; + Bool32 requiresDedicatedAllocation; + }; + static_assert( sizeof( MemoryDedicatedRequirements ) == sizeof( VkMemoryDedicatedRequirements ), "struct and wrapper have different size!" ); + + using MemoryDedicatedRequirementsKHR = MemoryDedicatedRequirements; + + struct MemoryDedicatedAllocateInfo + { + MemoryDedicatedAllocateInfo( Image image_ = Image(), + Buffer buffer_ = Buffer() ) + : image( image_ ) + , buffer( buffer_ ) + { + } + + MemoryDedicatedAllocateInfo( VkMemoryDedicatedAllocateInfo const & rhs ) + { + memcpy( this, &rhs, sizeof( MemoryDedicatedAllocateInfo ) ); + } + + MemoryDedicatedAllocateInfo& operator=( VkMemoryDedicatedAllocateInfo const & rhs ) + { + memcpy( this, &rhs, sizeof( MemoryDedicatedAllocateInfo ) ); + return *this; + } + MemoryDedicatedAllocateInfo& setPNext( const void* pNext_ ) + { + pNext = pNext_; + return *this; + } + + MemoryDedicatedAllocateInfo& setImage( Image image_ ) + { + image = image_; + return *this; + } + + MemoryDedicatedAllocateInfo& setBuffer( Buffer buffer_ ) + { + buffer = buffer_; + return *this; + } + + operator VkMemoryDedicatedAllocateInfo const&() const + { + return *reinterpret_cast(this); + } + + operator VkMemoryDedicatedAllocateInfo &() + { + return *reinterpret_cast(this); + } + + bool operator==( MemoryDedicatedAllocateInfo const& rhs ) const + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( image == rhs.image ) + && ( buffer == rhs.buffer ); + } + + bool operator!=( MemoryDedicatedAllocateInfo const& rhs ) const + { + return !operator==( rhs ); + } + + private: + StructureType sType = StructureType::eMemoryDedicatedAllocateInfo; + + public: + const void* pNext = nullptr; + Image image; + Buffer buffer; + }; + static_assert( sizeof( MemoryDedicatedAllocateInfo ) == sizeof( VkMemoryDedicatedAllocateInfo ), "struct and wrapper have different size!" ); + + using MemoryDedicatedAllocateInfoKHR = MemoryDedicatedAllocateInfo; + + struct SamplerYcbcrConversionInfo + { + SamplerYcbcrConversionInfo( SamplerYcbcrConversion conversion_ = SamplerYcbcrConversion() ) + : conversion( conversion_ ) + { + } + + SamplerYcbcrConversionInfo( VkSamplerYcbcrConversionInfo const & rhs ) + { + memcpy( this, &rhs, sizeof( SamplerYcbcrConversionInfo ) ); + } + + SamplerYcbcrConversionInfo& operator=( VkSamplerYcbcrConversionInfo const & rhs ) + { + memcpy( this, &rhs, sizeof( SamplerYcbcrConversionInfo ) ); + return *this; + } + SamplerYcbcrConversionInfo& setPNext( const void* pNext_ ) + { + pNext = pNext_; + return *this; + } + + SamplerYcbcrConversionInfo& setConversion( SamplerYcbcrConversion conversion_ ) + { + conversion = conversion_; + return *this; + } + + operator VkSamplerYcbcrConversionInfo const&() const + { + return *reinterpret_cast(this); + } + + operator VkSamplerYcbcrConversionInfo &() + { + return *reinterpret_cast(this); + } + + bool operator==( SamplerYcbcrConversionInfo const& rhs ) const + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( conversion == rhs.conversion ); + } + + bool operator!=( SamplerYcbcrConversionInfo const& rhs ) const + { + return !operator==( rhs ); + } + + private: + StructureType sType = StructureType::eSamplerYcbcrConversionInfo; + + public: + const void* pNext = nullptr; + SamplerYcbcrConversion conversion; + }; + static_assert( sizeof( SamplerYcbcrConversionInfo ) == sizeof( VkSamplerYcbcrConversionInfo ), "struct and wrapper have different size!" ); + + using SamplerYcbcrConversionInfoKHR = SamplerYcbcrConversionInfo; + + struct PhysicalDeviceSamplerYcbcrConversionFeatures + { + PhysicalDeviceSamplerYcbcrConversionFeatures( Bool32 samplerYcbcrConversion_ = 0 ) + : samplerYcbcrConversion( samplerYcbcrConversion_ ) + { + } + + PhysicalDeviceSamplerYcbcrConversionFeatures( VkPhysicalDeviceSamplerYcbcrConversionFeatures const & rhs ) + { + memcpy( this, &rhs, sizeof( PhysicalDeviceSamplerYcbcrConversionFeatures ) ); + } + + PhysicalDeviceSamplerYcbcrConversionFeatures& operator=( VkPhysicalDeviceSamplerYcbcrConversionFeatures const & rhs ) + { + memcpy( this, &rhs, sizeof( PhysicalDeviceSamplerYcbcrConversionFeatures ) ); + return *this; + } + PhysicalDeviceSamplerYcbcrConversionFeatures& setPNext( void* pNext_ ) + { + pNext = pNext_; + return *this; + } + + PhysicalDeviceSamplerYcbcrConversionFeatures& setSamplerYcbcrConversion( Bool32 samplerYcbcrConversion_ ) + { + samplerYcbcrConversion = samplerYcbcrConversion_; + return *this; + } + + operator VkPhysicalDeviceSamplerYcbcrConversionFeatures const&() const + { + return *reinterpret_cast(this); + } + + operator VkPhysicalDeviceSamplerYcbcrConversionFeatures &() + { + return *reinterpret_cast(this); + } + + bool operator==( PhysicalDeviceSamplerYcbcrConversionFeatures const& rhs ) const + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( samplerYcbcrConversion == rhs.samplerYcbcrConversion ); + } + + bool operator!=( PhysicalDeviceSamplerYcbcrConversionFeatures const& rhs ) const + { + return !operator==( rhs ); + } + + private: + StructureType sType = StructureType::ePhysicalDeviceSamplerYcbcrConversionFeatures; + + public: + void* pNext = nullptr; + Bool32 samplerYcbcrConversion; + }; + static_assert( sizeof( PhysicalDeviceSamplerYcbcrConversionFeatures ) == sizeof( VkPhysicalDeviceSamplerYcbcrConversionFeatures ), "struct and wrapper have different size!" ); + + using PhysicalDeviceSamplerYcbcrConversionFeaturesKHR = PhysicalDeviceSamplerYcbcrConversionFeatures; + + struct SamplerYcbcrConversionImageFormatProperties + { + operator VkSamplerYcbcrConversionImageFormatProperties const&() const + { + return *reinterpret_cast(this); + } + + operator VkSamplerYcbcrConversionImageFormatProperties &() + { + return *reinterpret_cast(this); + } + + bool operator==( SamplerYcbcrConversionImageFormatProperties const& rhs ) const + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( combinedImageSamplerDescriptorCount == rhs.combinedImageSamplerDescriptorCount ); + } + + bool operator!=( SamplerYcbcrConversionImageFormatProperties const& rhs ) const + { + return !operator==( rhs ); + } + + private: + StructureType sType = StructureType::eSamplerYcbcrConversionImageFormatProperties; + + public: + void* pNext = nullptr; + uint32_t combinedImageSamplerDescriptorCount; + }; + static_assert( sizeof( SamplerYcbcrConversionImageFormatProperties ) == sizeof( VkSamplerYcbcrConversionImageFormatProperties ), "struct and wrapper have different size!" ); + + using SamplerYcbcrConversionImageFormatPropertiesKHR = SamplerYcbcrConversionImageFormatProperties; + + struct TextureLODGatherFormatPropertiesAMD + { + operator VkTextureLODGatherFormatPropertiesAMD const&() const + { + return *reinterpret_cast(this); + } + + operator VkTextureLODGatherFormatPropertiesAMD &() + { + return *reinterpret_cast(this); + } + + bool operator==( TextureLODGatherFormatPropertiesAMD const& rhs ) const + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( supportsTextureGatherLODBiasAMD == rhs.supportsTextureGatherLODBiasAMD ); + } + + bool operator!=( TextureLODGatherFormatPropertiesAMD const& rhs ) const + { + return !operator==( rhs ); + } + + private: + StructureType sType = StructureType::eTextureLodGatherFormatPropertiesAMD; + + public: + void* pNext = nullptr; + Bool32 supportsTextureGatherLODBiasAMD; + }; + static_assert( sizeof( TextureLODGatherFormatPropertiesAMD ) == sizeof( VkTextureLODGatherFormatPropertiesAMD ), "struct and wrapper have different size!" ); + + struct ProtectedSubmitInfo + { + ProtectedSubmitInfo( Bool32 protectedSubmit_ = 0 ) + : protectedSubmit( protectedSubmit_ ) + { + } + + ProtectedSubmitInfo( VkProtectedSubmitInfo const & rhs ) + { + memcpy( this, &rhs, sizeof( ProtectedSubmitInfo ) ); + } + + ProtectedSubmitInfo& operator=( VkProtectedSubmitInfo const & rhs ) + { + memcpy( this, &rhs, sizeof( ProtectedSubmitInfo ) ); + return *this; + } + ProtectedSubmitInfo& setPNext( const void* pNext_ ) + { + pNext = pNext_; + return *this; + } + + ProtectedSubmitInfo& setProtectedSubmit( Bool32 protectedSubmit_ ) + { + protectedSubmit = protectedSubmit_; + return *this; + } + + operator VkProtectedSubmitInfo const&() const + { + return *reinterpret_cast(this); + } + + operator VkProtectedSubmitInfo &() + { + return *reinterpret_cast(this); + } + + bool operator==( ProtectedSubmitInfo const& rhs ) const + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( protectedSubmit == rhs.protectedSubmit ); + } + + bool operator!=( ProtectedSubmitInfo const& rhs ) const + { + return !operator==( rhs ); + } + + private: + StructureType sType = StructureType::eProtectedSubmitInfo; + + public: + const void* pNext = nullptr; + Bool32 protectedSubmit; + }; + static_assert( sizeof( ProtectedSubmitInfo ) == sizeof( VkProtectedSubmitInfo ), "struct and wrapper have different size!" ); + + struct PhysicalDeviceProtectedMemoryFeatures + { + PhysicalDeviceProtectedMemoryFeatures( Bool32 protectedMemory_ = 0 ) + : protectedMemory( protectedMemory_ ) + { + } + + PhysicalDeviceProtectedMemoryFeatures( VkPhysicalDeviceProtectedMemoryFeatures const & rhs ) + { + memcpy( this, &rhs, sizeof( PhysicalDeviceProtectedMemoryFeatures ) ); + } + + PhysicalDeviceProtectedMemoryFeatures& operator=( VkPhysicalDeviceProtectedMemoryFeatures const & rhs ) + { + memcpy( this, &rhs, sizeof( PhysicalDeviceProtectedMemoryFeatures ) ); + return *this; + } + PhysicalDeviceProtectedMemoryFeatures& setPNext( void* pNext_ ) + { + pNext = pNext_; + return *this; + } + + PhysicalDeviceProtectedMemoryFeatures& setProtectedMemory( Bool32 protectedMemory_ ) + { + protectedMemory = protectedMemory_; + return *this; + } + + operator VkPhysicalDeviceProtectedMemoryFeatures const&() const + { + return *reinterpret_cast(this); + } + + operator VkPhysicalDeviceProtectedMemoryFeatures &() + { + return *reinterpret_cast(this); + } + + bool operator==( PhysicalDeviceProtectedMemoryFeatures const& rhs ) const + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( protectedMemory == rhs.protectedMemory ); + } + + bool operator!=( PhysicalDeviceProtectedMemoryFeatures const& rhs ) const + { + return !operator==( rhs ); + } + + private: + StructureType sType = StructureType::ePhysicalDeviceProtectedMemoryFeatures; + + public: + void* pNext = nullptr; + Bool32 protectedMemory; + }; + static_assert( sizeof( PhysicalDeviceProtectedMemoryFeatures ) == sizeof( VkPhysicalDeviceProtectedMemoryFeatures ), "struct and wrapper have different size!" ); + + struct PhysicalDeviceProtectedMemoryProperties + { + PhysicalDeviceProtectedMemoryProperties( Bool32 protectedNoFault_ = 0 ) + : protectedNoFault( protectedNoFault_ ) + { + } + + PhysicalDeviceProtectedMemoryProperties( VkPhysicalDeviceProtectedMemoryProperties const & rhs ) + { + memcpy( this, &rhs, sizeof( PhysicalDeviceProtectedMemoryProperties ) ); + } + + PhysicalDeviceProtectedMemoryProperties& operator=( VkPhysicalDeviceProtectedMemoryProperties const & rhs ) + { + memcpy( this, &rhs, sizeof( PhysicalDeviceProtectedMemoryProperties ) ); + return *this; + } + PhysicalDeviceProtectedMemoryProperties& setPNext( void* pNext_ ) + { + pNext = pNext_; + return *this; + } + + PhysicalDeviceProtectedMemoryProperties& setProtectedNoFault( Bool32 protectedNoFault_ ) + { + protectedNoFault = protectedNoFault_; + return *this; + } + + operator VkPhysicalDeviceProtectedMemoryProperties const&() const + { + return *reinterpret_cast(this); + } + + operator VkPhysicalDeviceProtectedMemoryProperties &() + { + return *reinterpret_cast(this); + } + + bool operator==( PhysicalDeviceProtectedMemoryProperties const& rhs ) const + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( protectedNoFault == rhs.protectedNoFault ); + } + + bool operator!=( PhysicalDeviceProtectedMemoryProperties const& rhs ) const + { + return !operator==( rhs ); + } + + private: + StructureType sType = StructureType::ePhysicalDeviceProtectedMemoryProperties; + + public: + void* pNext = nullptr; + Bool32 protectedNoFault; + }; + static_assert( sizeof( PhysicalDeviceProtectedMemoryProperties ) == sizeof( VkPhysicalDeviceProtectedMemoryProperties ), "struct and wrapper have different size!" ); + + struct PipelineCoverageToColorStateCreateInfoNV + { + PipelineCoverageToColorStateCreateInfoNV( PipelineCoverageToColorStateCreateFlagsNV flags_ = PipelineCoverageToColorStateCreateFlagsNV(), + Bool32 coverageToColorEnable_ = 0, + uint32_t coverageToColorLocation_ = 0 ) + : flags( flags_ ) + , coverageToColorEnable( coverageToColorEnable_ ) + , coverageToColorLocation( coverageToColorLocation_ ) + { + } + + PipelineCoverageToColorStateCreateInfoNV( VkPipelineCoverageToColorStateCreateInfoNV const & rhs ) + { + memcpy( this, &rhs, sizeof( PipelineCoverageToColorStateCreateInfoNV ) ); + } + + PipelineCoverageToColorStateCreateInfoNV& operator=( VkPipelineCoverageToColorStateCreateInfoNV const & rhs ) + { + memcpy( this, &rhs, sizeof( PipelineCoverageToColorStateCreateInfoNV ) ); + return *this; + } + PipelineCoverageToColorStateCreateInfoNV& setPNext( const void* pNext_ ) + { + pNext = pNext_; + return *this; + } + + PipelineCoverageToColorStateCreateInfoNV& setFlags( PipelineCoverageToColorStateCreateFlagsNV flags_ ) + { + flags = flags_; + return *this; + } + + PipelineCoverageToColorStateCreateInfoNV& setCoverageToColorEnable( Bool32 coverageToColorEnable_ ) + { + coverageToColorEnable = coverageToColorEnable_; + return *this; + } + + PipelineCoverageToColorStateCreateInfoNV& setCoverageToColorLocation( uint32_t coverageToColorLocation_ ) + { + coverageToColorLocation = coverageToColorLocation_; + return *this; + } + + operator VkPipelineCoverageToColorStateCreateInfoNV const&() const + { + return *reinterpret_cast(this); + } + + operator VkPipelineCoverageToColorStateCreateInfoNV &() + { + return *reinterpret_cast(this); + } + + bool operator==( PipelineCoverageToColorStateCreateInfoNV const& rhs ) const + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( flags == rhs.flags ) + && ( coverageToColorEnable == rhs.coverageToColorEnable ) + && ( coverageToColorLocation == rhs.coverageToColorLocation ); + } + + bool operator!=( PipelineCoverageToColorStateCreateInfoNV const& rhs ) const + { + return !operator==( rhs ); + } + + private: + StructureType sType = StructureType::ePipelineCoverageToColorStateCreateInfoNV; + + public: + const void* pNext = nullptr; + PipelineCoverageToColorStateCreateFlagsNV flags; + Bool32 coverageToColorEnable; + uint32_t coverageToColorLocation; + }; + static_assert( sizeof( PipelineCoverageToColorStateCreateInfoNV ) == sizeof( VkPipelineCoverageToColorStateCreateInfoNV ), "struct and wrapper have different size!" ); + + struct PhysicalDeviceSamplerFilterMinmaxPropertiesEXT + { + operator VkPhysicalDeviceSamplerFilterMinmaxPropertiesEXT const&() const + { + return *reinterpret_cast(this); + } + + operator VkPhysicalDeviceSamplerFilterMinmaxPropertiesEXT &() + { + return *reinterpret_cast(this); + } + + bool operator==( PhysicalDeviceSamplerFilterMinmaxPropertiesEXT const& rhs ) const + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( filterMinmaxSingleComponentFormats == rhs.filterMinmaxSingleComponentFormats ) + && ( filterMinmaxImageComponentMapping == rhs.filterMinmaxImageComponentMapping ); + } + + bool operator!=( PhysicalDeviceSamplerFilterMinmaxPropertiesEXT const& rhs ) const + { + return !operator==( rhs ); + } + + private: + StructureType sType = StructureType::ePhysicalDeviceSamplerFilterMinmaxPropertiesEXT; + + public: + void* pNext = nullptr; + Bool32 filterMinmaxSingleComponentFormats; + Bool32 filterMinmaxImageComponentMapping; + }; + static_assert( sizeof( PhysicalDeviceSamplerFilterMinmaxPropertiesEXT ) == sizeof( VkPhysicalDeviceSamplerFilterMinmaxPropertiesEXT ), "struct and wrapper have different size!" ); + + struct MultisamplePropertiesEXT + { + operator VkMultisamplePropertiesEXT const&() const + { + return *reinterpret_cast(this); + } + + operator VkMultisamplePropertiesEXT &() + { + return *reinterpret_cast(this); + } + + bool operator==( MultisamplePropertiesEXT const& rhs ) const + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( maxSampleLocationGridSize == rhs.maxSampleLocationGridSize ); + } + + bool operator!=( MultisamplePropertiesEXT const& rhs ) const + { + return !operator==( rhs ); + } + + private: + StructureType sType = StructureType::eMultisamplePropertiesEXT; + + public: + void* pNext = nullptr; + Extent2D maxSampleLocationGridSize; + }; + static_assert( sizeof( MultisamplePropertiesEXT ) == sizeof( VkMultisamplePropertiesEXT ), "struct and wrapper have different size!" ); + + struct PhysicalDeviceBlendOperationAdvancedFeaturesEXT + { + PhysicalDeviceBlendOperationAdvancedFeaturesEXT( Bool32 advancedBlendCoherentOperations_ = 0 ) + : advancedBlendCoherentOperations( advancedBlendCoherentOperations_ ) + { + } + + PhysicalDeviceBlendOperationAdvancedFeaturesEXT( VkPhysicalDeviceBlendOperationAdvancedFeaturesEXT const & rhs ) + { + memcpy( this, &rhs, sizeof( PhysicalDeviceBlendOperationAdvancedFeaturesEXT ) ); + } + + PhysicalDeviceBlendOperationAdvancedFeaturesEXT& operator=( VkPhysicalDeviceBlendOperationAdvancedFeaturesEXT const & rhs ) + { + memcpy( this, &rhs, sizeof( PhysicalDeviceBlendOperationAdvancedFeaturesEXT ) ); + return *this; + } + PhysicalDeviceBlendOperationAdvancedFeaturesEXT& setPNext( void* pNext_ ) + { + pNext = pNext_; + return *this; + } + + PhysicalDeviceBlendOperationAdvancedFeaturesEXT& setAdvancedBlendCoherentOperations( Bool32 advancedBlendCoherentOperations_ ) + { + advancedBlendCoherentOperations = advancedBlendCoherentOperations_; + return *this; + } + + operator VkPhysicalDeviceBlendOperationAdvancedFeaturesEXT const&() const + { + return *reinterpret_cast(this); + } + + operator VkPhysicalDeviceBlendOperationAdvancedFeaturesEXT &() + { + return *reinterpret_cast(this); + } + + bool operator==( PhysicalDeviceBlendOperationAdvancedFeaturesEXT const& rhs ) const + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( advancedBlendCoherentOperations == rhs.advancedBlendCoherentOperations ); + } + + bool operator!=( PhysicalDeviceBlendOperationAdvancedFeaturesEXT const& rhs ) const + { + return !operator==( rhs ); + } + + private: + StructureType sType = StructureType::ePhysicalDeviceBlendOperationAdvancedFeaturesEXT; + + public: + void* pNext = nullptr; + Bool32 advancedBlendCoherentOperations; + }; + static_assert( sizeof( PhysicalDeviceBlendOperationAdvancedFeaturesEXT ) == sizeof( VkPhysicalDeviceBlendOperationAdvancedFeaturesEXT ), "struct and wrapper have different size!" ); + + struct PhysicalDeviceBlendOperationAdvancedPropertiesEXT + { + operator VkPhysicalDeviceBlendOperationAdvancedPropertiesEXT const&() const + { + return *reinterpret_cast(this); + } + + operator VkPhysicalDeviceBlendOperationAdvancedPropertiesEXT &() + { + return *reinterpret_cast(this); + } + + bool operator==( PhysicalDeviceBlendOperationAdvancedPropertiesEXT const& rhs ) const + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( advancedBlendMaxColorAttachments == rhs.advancedBlendMaxColorAttachments ) + && ( advancedBlendIndependentBlend == rhs.advancedBlendIndependentBlend ) + && ( advancedBlendNonPremultipliedSrcColor == rhs.advancedBlendNonPremultipliedSrcColor ) + && ( advancedBlendNonPremultipliedDstColor == rhs.advancedBlendNonPremultipliedDstColor ) + && ( advancedBlendCorrelatedOverlap == rhs.advancedBlendCorrelatedOverlap ) + && ( advancedBlendAllOperations == rhs.advancedBlendAllOperations ); + } + + bool operator!=( PhysicalDeviceBlendOperationAdvancedPropertiesEXT const& rhs ) const + { + return !operator==( rhs ); + } + + private: + StructureType sType = StructureType::ePhysicalDeviceBlendOperationAdvancedPropertiesEXT; + + public: + void* pNext = nullptr; + uint32_t advancedBlendMaxColorAttachments; + Bool32 advancedBlendIndependentBlend; + Bool32 advancedBlendNonPremultipliedSrcColor; + Bool32 advancedBlendNonPremultipliedDstColor; + Bool32 advancedBlendCorrelatedOverlap; + Bool32 advancedBlendAllOperations; + }; + static_assert( sizeof( PhysicalDeviceBlendOperationAdvancedPropertiesEXT ) == sizeof( VkPhysicalDeviceBlendOperationAdvancedPropertiesEXT ), "struct and wrapper have different size!" ); + + struct PhysicalDeviceInlineUniformBlockFeaturesEXT + { + operator VkPhysicalDeviceInlineUniformBlockFeaturesEXT const&() const + { + return *reinterpret_cast(this); + } + + operator VkPhysicalDeviceInlineUniformBlockFeaturesEXT &() + { + return *reinterpret_cast(this); + } + + bool operator==( PhysicalDeviceInlineUniformBlockFeaturesEXT const& rhs ) const + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( inlineUniformBlock == rhs.inlineUniformBlock ) + && ( descriptorBindingInlineUniformBlockUpdateAfterBind == rhs.descriptorBindingInlineUniformBlockUpdateAfterBind ); + } + + bool operator!=( PhysicalDeviceInlineUniformBlockFeaturesEXT const& rhs ) const + { + return !operator==( rhs ); + } + + private: + StructureType sType = StructureType::ePhysicalDeviceInlineUniformBlockFeaturesEXT; + + public: + void* pNext = nullptr; + Bool32 inlineUniformBlock; + Bool32 descriptorBindingInlineUniformBlockUpdateAfterBind; + }; + static_assert( sizeof( PhysicalDeviceInlineUniformBlockFeaturesEXT ) == sizeof( VkPhysicalDeviceInlineUniformBlockFeaturesEXT ), "struct and wrapper have different size!" ); + + struct PhysicalDeviceInlineUniformBlockPropertiesEXT + { + operator VkPhysicalDeviceInlineUniformBlockPropertiesEXT const&() const + { + return *reinterpret_cast(this); + } + + operator VkPhysicalDeviceInlineUniformBlockPropertiesEXT &() + { + return *reinterpret_cast(this); + } + + bool operator==( PhysicalDeviceInlineUniformBlockPropertiesEXT const& rhs ) const + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( maxInlineUniformBlockSize == rhs.maxInlineUniformBlockSize ) + && ( maxPerStageDescriptorInlineUniformBlocks == rhs.maxPerStageDescriptorInlineUniformBlocks ) + && ( maxPerStageDescriptorUpdateAfterBindInlineUniformBlocks == rhs.maxPerStageDescriptorUpdateAfterBindInlineUniformBlocks ) + && ( maxDescriptorSetInlineUniformBlocks == rhs.maxDescriptorSetInlineUniformBlocks ) + && ( maxDescriptorSetUpdateAfterBindInlineUniformBlocks == rhs.maxDescriptorSetUpdateAfterBindInlineUniformBlocks ); + } + + bool operator!=( PhysicalDeviceInlineUniformBlockPropertiesEXT const& rhs ) const + { + return !operator==( rhs ); + } + + private: + StructureType sType = StructureType::ePhysicalDeviceInlineUniformBlockPropertiesEXT; + + public: + void* pNext = nullptr; + uint32_t maxInlineUniformBlockSize; + uint32_t maxPerStageDescriptorInlineUniformBlocks; + uint32_t maxPerStageDescriptorUpdateAfterBindInlineUniformBlocks; + uint32_t maxDescriptorSetInlineUniformBlocks; + uint32_t maxDescriptorSetUpdateAfterBindInlineUniformBlocks; + }; + static_assert( sizeof( PhysicalDeviceInlineUniformBlockPropertiesEXT ) == sizeof( VkPhysicalDeviceInlineUniformBlockPropertiesEXT ), "struct and wrapper have different size!" ); + + struct WriteDescriptorSetInlineUniformBlockEXT + { + WriteDescriptorSetInlineUniformBlockEXT( uint32_t dataSize_ = 0, + const void* pData_ = nullptr ) + : dataSize( dataSize_ ) + , pData( pData_ ) + { + } + + WriteDescriptorSetInlineUniformBlockEXT( VkWriteDescriptorSetInlineUniformBlockEXT const & rhs ) + { + memcpy( this, &rhs, sizeof( WriteDescriptorSetInlineUniformBlockEXT ) ); + } + + WriteDescriptorSetInlineUniformBlockEXT& operator=( VkWriteDescriptorSetInlineUniformBlockEXT const & rhs ) + { + memcpy( this, &rhs, sizeof( WriteDescriptorSetInlineUniformBlockEXT ) ); + return *this; + } + WriteDescriptorSetInlineUniformBlockEXT& setPNext( const void* pNext_ ) + { + pNext = pNext_; + return *this; + } + + WriteDescriptorSetInlineUniformBlockEXT& setDataSize( uint32_t dataSize_ ) + { + dataSize = dataSize_; + return *this; + } + + WriteDescriptorSetInlineUniformBlockEXT& setPData( const void* pData_ ) + { + pData = pData_; + return *this; + } + + operator VkWriteDescriptorSetInlineUniformBlockEXT const&() const + { + return *reinterpret_cast(this); + } + + operator VkWriteDescriptorSetInlineUniformBlockEXT &() + { + return *reinterpret_cast(this); + } + + bool operator==( WriteDescriptorSetInlineUniformBlockEXT const& rhs ) const + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( dataSize == rhs.dataSize ) + && ( pData == rhs.pData ); + } + + bool operator!=( WriteDescriptorSetInlineUniformBlockEXT const& rhs ) const + { + return !operator==( rhs ); + } + + private: + StructureType sType = StructureType::eWriteDescriptorSetInlineUniformBlockEXT; + + public: + const void* pNext = nullptr; + uint32_t dataSize; + const void* pData; + }; + static_assert( sizeof( WriteDescriptorSetInlineUniformBlockEXT ) == sizeof( VkWriteDescriptorSetInlineUniformBlockEXT ), "struct and wrapper have different size!" ); + + struct DescriptorPoolInlineUniformBlockCreateInfoEXT + { + DescriptorPoolInlineUniformBlockCreateInfoEXT( uint32_t maxInlineUniformBlockBindings_ = 0 ) + : maxInlineUniformBlockBindings( maxInlineUniformBlockBindings_ ) + { + } + + DescriptorPoolInlineUniformBlockCreateInfoEXT( VkDescriptorPoolInlineUniformBlockCreateInfoEXT const & rhs ) + { + memcpy( this, &rhs, sizeof( DescriptorPoolInlineUniformBlockCreateInfoEXT ) ); + } + + DescriptorPoolInlineUniformBlockCreateInfoEXT& operator=( VkDescriptorPoolInlineUniformBlockCreateInfoEXT const & rhs ) + { + memcpy( this, &rhs, sizeof( DescriptorPoolInlineUniformBlockCreateInfoEXT ) ); + return *this; + } + DescriptorPoolInlineUniformBlockCreateInfoEXT& setPNext( const void* pNext_ ) + { + pNext = pNext_; + return *this; + } + + DescriptorPoolInlineUniformBlockCreateInfoEXT& setMaxInlineUniformBlockBindings( uint32_t maxInlineUniformBlockBindings_ ) + { + maxInlineUniformBlockBindings = maxInlineUniformBlockBindings_; + return *this; + } + + operator VkDescriptorPoolInlineUniformBlockCreateInfoEXT const&() const + { + return *reinterpret_cast(this); + } + + operator VkDescriptorPoolInlineUniformBlockCreateInfoEXT &() + { + return *reinterpret_cast(this); + } + + bool operator==( DescriptorPoolInlineUniformBlockCreateInfoEXT const& rhs ) const + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( maxInlineUniformBlockBindings == rhs.maxInlineUniformBlockBindings ); + } + + bool operator!=( DescriptorPoolInlineUniformBlockCreateInfoEXT const& rhs ) const + { + return !operator==( rhs ); + } + + private: + StructureType sType = StructureType::eDescriptorPoolInlineUniformBlockCreateInfoEXT; + + public: + const void* pNext = nullptr; + uint32_t maxInlineUniformBlockBindings; + }; + static_assert( sizeof( DescriptorPoolInlineUniformBlockCreateInfoEXT ) == sizeof( VkDescriptorPoolInlineUniformBlockCreateInfoEXT ), "struct and wrapper have different size!" ); + + struct ImageFormatListCreateInfoKHR + { + ImageFormatListCreateInfoKHR( uint32_t viewFormatCount_ = 0, + const Format* pViewFormats_ = nullptr ) + : viewFormatCount( viewFormatCount_ ) + , pViewFormats( pViewFormats_ ) + { + } + + ImageFormatListCreateInfoKHR( VkImageFormatListCreateInfoKHR const & rhs ) + { + memcpy( this, &rhs, sizeof( ImageFormatListCreateInfoKHR ) ); + } + + ImageFormatListCreateInfoKHR& operator=( VkImageFormatListCreateInfoKHR const & rhs ) + { + memcpy( this, &rhs, sizeof( ImageFormatListCreateInfoKHR ) ); + return *this; + } + ImageFormatListCreateInfoKHR& setPNext( const void* pNext_ ) + { + pNext = pNext_; + return *this; + } + + ImageFormatListCreateInfoKHR& setViewFormatCount( uint32_t viewFormatCount_ ) + { + viewFormatCount = viewFormatCount_; + return *this; + } + + ImageFormatListCreateInfoKHR& setPViewFormats( const Format* pViewFormats_ ) + { + pViewFormats = pViewFormats_; + return *this; + } + + operator VkImageFormatListCreateInfoKHR const&() const + { + return *reinterpret_cast(this); + } + + operator VkImageFormatListCreateInfoKHR &() + { + return *reinterpret_cast(this); + } + + bool operator==( ImageFormatListCreateInfoKHR const& rhs ) const + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( viewFormatCount == rhs.viewFormatCount ) + && ( pViewFormats == rhs.pViewFormats ); + } + + bool operator!=( ImageFormatListCreateInfoKHR const& rhs ) const + { + return !operator==( rhs ); + } + + private: + StructureType sType = StructureType::eImageFormatListCreateInfoKHR; + + public: + const void* pNext = nullptr; + uint32_t viewFormatCount; + const Format* pViewFormats; + }; + static_assert( sizeof( ImageFormatListCreateInfoKHR ) == sizeof( VkImageFormatListCreateInfoKHR ), "struct and wrapper have different size!" ); + + struct ValidationCacheCreateInfoEXT + { + ValidationCacheCreateInfoEXT( ValidationCacheCreateFlagsEXT flags_ = ValidationCacheCreateFlagsEXT(), + size_t initialDataSize_ = 0, + const void* pInitialData_ = nullptr ) + : flags( flags_ ) + , initialDataSize( initialDataSize_ ) + , pInitialData( pInitialData_ ) + { + } + + ValidationCacheCreateInfoEXT( VkValidationCacheCreateInfoEXT const & rhs ) + { + memcpy( this, &rhs, sizeof( ValidationCacheCreateInfoEXT ) ); + } + + ValidationCacheCreateInfoEXT& operator=( VkValidationCacheCreateInfoEXT const & rhs ) + { + memcpy( this, &rhs, sizeof( ValidationCacheCreateInfoEXT ) ); + return *this; + } + ValidationCacheCreateInfoEXT& setPNext( const void* pNext_ ) + { + pNext = pNext_; + return *this; + } + + ValidationCacheCreateInfoEXT& setFlags( ValidationCacheCreateFlagsEXT flags_ ) + { + flags = flags_; + return *this; + } + + ValidationCacheCreateInfoEXT& setInitialDataSize( size_t initialDataSize_ ) + { + initialDataSize = initialDataSize_; + return *this; + } + + ValidationCacheCreateInfoEXT& setPInitialData( const void* pInitialData_ ) + { + pInitialData = pInitialData_; + return *this; + } + + operator VkValidationCacheCreateInfoEXT const&() const + { + return *reinterpret_cast(this); + } + + operator VkValidationCacheCreateInfoEXT &() + { + return *reinterpret_cast(this); + } + + bool operator==( ValidationCacheCreateInfoEXT const& rhs ) const + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( flags == rhs.flags ) + && ( initialDataSize == rhs.initialDataSize ) + && ( pInitialData == rhs.pInitialData ); + } + + bool operator!=( ValidationCacheCreateInfoEXT const& rhs ) const + { + return !operator==( rhs ); + } + + private: + StructureType sType = StructureType::eValidationCacheCreateInfoEXT; + + public: + const void* pNext = nullptr; + ValidationCacheCreateFlagsEXT flags; + size_t initialDataSize; + const void* pInitialData; + }; + static_assert( sizeof( ValidationCacheCreateInfoEXT ) == sizeof( VkValidationCacheCreateInfoEXT ), "struct and wrapper have different size!" ); + + struct ShaderModuleValidationCacheCreateInfoEXT + { + ShaderModuleValidationCacheCreateInfoEXT( ValidationCacheEXT validationCache_ = ValidationCacheEXT() ) + : validationCache( validationCache_ ) + { + } + + ShaderModuleValidationCacheCreateInfoEXT( VkShaderModuleValidationCacheCreateInfoEXT const & rhs ) + { + memcpy( this, &rhs, sizeof( ShaderModuleValidationCacheCreateInfoEXT ) ); + } + + ShaderModuleValidationCacheCreateInfoEXT& operator=( VkShaderModuleValidationCacheCreateInfoEXT const & rhs ) + { + memcpy( this, &rhs, sizeof( ShaderModuleValidationCacheCreateInfoEXT ) ); + return *this; + } + ShaderModuleValidationCacheCreateInfoEXT& setPNext( const void* pNext_ ) + { + pNext = pNext_; + return *this; + } + + ShaderModuleValidationCacheCreateInfoEXT& setValidationCache( ValidationCacheEXT validationCache_ ) + { + validationCache = validationCache_; + return *this; + } + + operator VkShaderModuleValidationCacheCreateInfoEXT const&() const + { + return *reinterpret_cast(this); + } + + operator VkShaderModuleValidationCacheCreateInfoEXT &() + { + return *reinterpret_cast(this); + } + + bool operator==( ShaderModuleValidationCacheCreateInfoEXT const& rhs ) const + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( validationCache == rhs.validationCache ); + } + + bool operator!=( ShaderModuleValidationCacheCreateInfoEXT const& rhs ) const + { + return !operator==( rhs ); + } + + private: + StructureType sType = StructureType::eShaderModuleValidationCacheCreateInfoEXT; + + public: + const void* pNext = nullptr; + ValidationCacheEXT validationCache; + }; + static_assert( sizeof( ShaderModuleValidationCacheCreateInfoEXT ) == sizeof( VkShaderModuleValidationCacheCreateInfoEXT ), "struct and wrapper have different size!" ); + + struct PhysicalDeviceMaintenance3Properties + { + operator VkPhysicalDeviceMaintenance3Properties const&() const + { + return *reinterpret_cast(this); + } + + operator VkPhysicalDeviceMaintenance3Properties &() + { + return *reinterpret_cast(this); + } + + bool operator==( PhysicalDeviceMaintenance3Properties const& rhs ) const + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( maxPerSetDescriptors == rhs.maxPerSetDescriptors ) + && ( maxMemoryAllocationSize == rhs.maxMemoryAllocationSize ); + } + + bool operator!=( PhysicalDeviceMaintenance3Properties const& rhs ) const + { + return !operator==( rhs ); + } + + private: + StructureType sType = StructureType::ePhysicalDeviceMaintenance3Properties; + + public: + void* pNext = nullptr; + uint32_t maxPerSetDescriptors; + DeviceSize maxMemoryAllocationSize; + }; + static_assert( sizeof( PhysicalDeviceMaintenance3Properties ) == sizeof( VkPhysicalDeviceMaintenance3Properties ), "struct and wrapper have different size!" ); + + using PhysicalDeviceMaintenance3PropertiesKHR = PhysicalDeviceMaintenance3Properties; + + struct DescriptorSetLayoutSupport + { + operator VkDescriptorSetLayoutSupport const&() const + { + return *reinterpret_cast(this); + } + + operator VkDescriptorSetLayoutSupport &() + { + return *reinterpret_cast(this); + } + + bool operator==( DescriptorSetLayoutSupport const& rhs ) const + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( supported == rhs.supported ); + } + + bool operator!=( DescriptorSetLayoutSupport const& rhs ) const + { + return !operator==( rhs ); + } + + private: + StructureType sType = StructureType::eDescriptorSetLayoutSupport; + + public: + void* pNext = nullptr; + Bool32 supported; + }; + static_assert( sizeof( DescriptorSetLayoutSupport ) == sizeof( VkDescriptorSetLayoutSupport ), "struct and wrapper have different size!" ); + + using DescriptorSetLayoutSupportKHR = DescriptorSetLayoutSupport; + + struct PhysicalDeviceShaderDrawParameterFeatures + { + PhysicalDeviceShaderDrawParameterFeatures( Bool32 shaderDrawParameters_ = 0 ) + : shaderDrawParameters( shaderDrawParameters_ ) + { + } + + PhysicalDeviceShaderDrawParameterFeatures( VkPhysicalDeviceShaderDrawParameterFeatures const & rhs ) + { + memcpy( this, &rhs, sizeof( PhysicalDeviceShaderDrawParameterFeatures ) ); + } + + PhysicalDeviceShaderDrawParameterFeatures& operator=( VkPhysicalDeviceShaderDrawParameterFeatures const & rhs ) + { + memcpy( this, &rhs, sizeof( PhysicalDeviceShaderDrawParameterFeatures ) ); + return *this; + } + PhysicalDeviceShaderDrawParameterFeatures& setPNext( void* pNext_ ) + { + pNext = pNext_; + return *this; + } + + PhysicalDeviceShaderDrawParameterFeatures& setShaderDrawParameters( Bool32 shaderDrawParameters_ ) + { + shaderDrawParameters = shaderDrawParameters_; + return *this; + } + + operator VkPhysicalDeviceShaderDrawParameterFeatures const&() const + { + return *reinterpret_cast(this); + } + + operator VkPhysicalDeviceShaderDrawParameterFeatures &() + { + return *reinterpret_cast(this); + } + + bool operator==( PhysicalDeviceShaderDrawParameterFeatures const& rhs ) const + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( shaderDrawParameters == rhs.shaderDrawParameters ); + } + + bool operator!=( PhysicalDeviceShaderDrawParameterFeatures const& rhs ) const + { + return !operator==( rhs ); + } + + private: + StructureType sType = StructureType::ePhysicalDeviceShaderDrawParameterFeatures; + + public: + void* pNext = nullptr; + Bool32 shaderDrawParameters; + }; + static_assert( sizeof( PhysicalDeviceShaderDrawParameterFeatures ) == sizeof( VkPhysicalDeviceShaderDrawParameterFeatures ), "struct and wrapper have different size!" ); + + struct DebugUtilsLabelEXT + { + DebugUtilsLabelEXT( const char* pLabelName_ = nullptr, + std::array const& color_ = { { 0, 0, 0, 0 } } ) + : pLabelName( pLabelName_ ) + { + memcpy( &color, color_.data(), 4 * sizeof( float ) ); + } + + DebugUtilsLabelEXT( VkDebugUtilsLabelEXT const & rhs ) + { + memcpy( this, &rhs, sizeof( DebugUtilsLabelEXT ) ); + } + + DebugUtilsLabelEXT& operator=( VkDebugUtilsLabelEXT const & rhs ) + { + memcpy( this, &rhs, sizeof( DebugUtilsLabelEXT ) ); + return *this; + } + DebugUtilsLabelEXT& setPNext( const void* pNext_ ) + { + pNext = pNext_; + return *this; + } + + DebugUtilsLabelEXT& setPLabelName( const char* pLabelName_ ) + { + pLabelName = pLabelName_; + return *this; + } + + DebugUtilsLabelEXT& setColor( std::array color_ ) + { + memcpy( &color, color_.data(), 4 * sizeof( float ) ); + return *this; + } + + operator VkDebugUtilsLabelEXT const&() const + { + return *reinterpret_cast(this); + } + + operator VkDebugUtilsLabelEXT &() + { + return *reinterpret_cast(this); + } + + bool operator==( DebugUtilsLabelEXT const& rhs ) const + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( pLabelName == rhs.pLabelName ) + && ( memcmp( color, rhs.color, 4 * sizeof( float ) ) == 0 ); + } + + bool operator!=( DebugUtilsLabelEXT const& rhs ) const + { + return !operator==( rhs ); + } + + private: + StructureType sType = StructureType::eDebugUtilsLabelEXT; + + public: + const void* pNext = nullptr; + const char* pLabelName; + float color[4]; + }; + static_assert( sizeof( DebugUtilsLabelEXT ) == sizeof( VkDebugUtilsLabelEXT ), "struct and wrapper have different size!" ); + + struct MemoryHostPointerPropertiesEXT + { + MemoryHostPointerPropertiesEXT( uint32_t memoryTypeBits_ = 0 ) + : memoryTypeBits( memoryTypeBits_ ) + { + } + + MemoryHostPointerPropertiesEXT( VkMemoryHostPointerPropertiesEXT const & rhs ) + { + memcpy( this, &rhs, sizeof( MemoryHostPointerPropertiesEXT ) ); + } + + MemoryHostPointerPropertiesEXT& operator=( VkMemoryHostPointerPropertiesEXT const & rhs ) + { + memcpy( this, &rhs, sizeof( MemoryHostPointerPropertiesEXT ) ); + return *this; + } + MemoryHostPointerPropertiesEXT& setPNext( void* pNext_ ) + { + pNext = pNext_; + return *this; + } + + MemoryHostPointerPropertiesEXT& setMemoryTypeBits( uint32_t memoryTypeBits_ ) + { + memoryTypeBits = memoryTypeBits_; + return *this; + } + + operator VkMemoryHostPointerPropertiesEXT const&() const + { + return *reinterpret_cast(this); + } + + operator VkMemoryHostPointerPropertiesEXT &() + { + return *reinterpret_cast(this); + } + + bool operator==( MemoryHostPointerPropertiesEXT const& rhs ) const + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( memoryTypeBits == rhs.memoryTypeBits ); + } + + bool operator!=( MemoryHostPointerPropertiesEXT const& rhs ) const + { + return !operator==( rhs ); + } + + private: + StructureType sType = StructureType::eMemoryHostPointerPropertiesEXT; + + public: + void* pNext = nullptr; + uint32_t memoryTypeBits; + }; + static_assert( sizeof( MemoryHostPointerPropertiesEXT ) == sizeof( VkMemoryHostPointerPropertiesEXT ), "struct and wrapper have different size!" ); + + struct PhysicalDeviceExternalMemoryHostPropertiesEXT + { + PhysicalDeviceExternalMemoryHostPropertiesEXT( DeviceSize minImportedHostPointerAlignment_ = 0 ) + : minImportedHostPointerAlignment( minImportedHostPointerAlignment_ ) + { + } + + PhysicalDeviceExternalMemoryHostPropertiesEXT( VkPhysicalDeviceExternalMemoryHostPropertiesEXT const & rhs ) + { + memcpy( this, &rhs, sizeof( PhysicalDeviceExternalMemoryHostPropertiesEXT ) ); + } + + PhysicalDeviceExternalMemoryHostPropertiesEXT& operator=( VkPhysicalDeviceExternalMemoryHostPropertiesEXT const & rhs ) + { + memcpy( this, &rhs, sizeof( PhysicalDeviceExternalMemoryHostPropertiesEXT ) ); + return *this; + } + PhysicalDeviceExternalMemoryHostPropertiesEXT& setPNext( void* pNext_ ) + { + pNext = pNext_; + return *this; + } + + PhysicalDeviceExternalMemoryHostPropertiesEXT& setMinImportedHostPointerAlignment( DeviceSize minImportedHostPointerAlignment_ ) + { + minImportedHostPointerAlignment = minImportedHostPointerAlignment_; + return *this; + } + + operator VkPhysicalDeviceExternalMemoryHostPropertiesEXT const&() const + { + return *reinterpret_cast(this); + } + + operator VkPhysicalDeviceExternalMemoryHostPropertiesEXT &() + { + return *reinterpret_cast(this); + } + + bool operator==( PhysicalDeviceExternalMemoryHostPropertiesEXT const& rhs ) const + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( minImportedHostPointerAlignment == rhs.minImportedHostPointerAlignment ); + } + + bool operator!=( PhysicalDeviceExternalMemoryHostPropertiesEXT const& rhs ) const + { + return !operator==( rhs ); + } + + private: + StructureType sType = StructureType::ePhysicalDeviceExternalMemoryHostPropertiesEXT; + + public: + void* pNext = nullptr; + DeviceSize minImportedHostPointerAlignment; + }; + static_assert( sizeof( PhysicalDeviceExternalMemoryHostPropertiesEXT ) == sizeof( VkPhysicalDeviceExternalMemoryHostPropertiesEXT ), "struct and wrapper have different size!" ); + + struct PhysicalDeviceConservativeRasterizationPropertiesEXT + { + PhysicalDeviceConservativeRasterizationPropertiesEXT( float primitiveOverestimationSize_ = 0, + float maxExtraPrimitiveOverestimationSize_ = 0, + float extraPrimitiveOverestimationSizeGranularity_ = 0, + Bool32 primitiveUnderestimation_ = 0, + Bool32 conservativePointAndLineRasterization_ = 0, + Bool32 degenerateTrianglesRasterized_ = 0, + Bool32 degenerateLinesRasterized_ = 0, + Bool32 fullyCoveredFragmentShaderInputVariable_ = 0, + Bool32 conservativeRasterizationPostDepthCoverage_ = 0 ) + : primitiveOverestimationSize( primitiveOverestimationSize_ ) + , maxExtraPrimitiveOverestimationSize( maxExtraPrimitiveOverestimationSize_ ) + , extraPrimitiveOverestimationSizeGranularity( extraPrimitiveOverestimationSizeGranularity_ ) + , primitiveUnderestimation( primitiveUnderestimation_ ) + , conservativePointAndLineRasterization( conservativePointAndLineRasterization_ ) + , degenerateTrianglesRasterized( degenerateTrianglesRasterized_ ) + , degenerateLinesRasterized( degenerateLinesRasterized_ ) + , fullyCoveredFragmentShaderInputVariable( fullyCoveredFragmentShaderInputVariable_ ) + , conservativeRasterizationPostDepthCoverage( conservativeRasterizationPostDepthCoverage_ ) + { + } + + PhysicalDeviceConservativeRasterizationPropertiesEXT( VkPhysicalDeviceConservativeRasterizationPropertiesEXT const & rhs ) + { + memcpy( this, &rhs, sizeof( PhysicalDeviceConservativeRasterizationPropertiesEXT ) ); + } + + PhysicalDeviceConservativeRasterizationPropertiesEXT& operator=( VkPhysicalDeviceConservativeRasterizationPropertiesEXT const & rhs ) + { + memcpy( this, &rhs, sizeof( PhysicalDeviceConservativeRasterizationPropertiesEXT ) ); + return *this; + } + PhysicalDeviceConservativeRasterizationPropertiesEXT& setPNext( void* pNext_ ) + { + pNext = pNext_; + return *this; + } + + PhysicalDeviceConservativeRasterizationPropertiesEXT& setPrimitiveOverestimationSize( float primitiveOverestimationSize_ ) + { + primitiveOverestimationSize = primitiveOverestimationSize_; + return *this; + } + + PhysicalDeviceConservativeRasterizationPropertiesEXT& setMaxExtraPrimitiveOverestimationSize( float maxExtraPrimitiveOverestimationSize_ ) + { + maxExtraPrimitiveOverestimationSize = maxExtraPrimitiveOverestimationSize_; + return *this; + } + + PhysicalDeviceConservativeRasterizationPropertiesEXT& setExtraPrimitiveOverestimationSizeGranularity( float extraPrimitiveOverestimationSizeGranularity_ ) + { + extraPrimitiveOverestimationSizeGranularity = extraPrimitiveOverestimationSizeGranularity_; + return *this; + } + + PhysicalDeviceConservativeRasterizationPropertiesEXT& setPrimitiveUnderestimation( Bool32 primitiveUnderestimation_ ) + { + primitiveUnderestimation = primitiveUnderestimation_; + return *this; + } + + PhysicalDeviceConservativeRasterizationPropertiesEXT& setConservativePointAndLineRasterization( Bool32 conservativePointAndLineRasterization_ ) + { + conservativePointAndLineRasterization = conservativePointAndLineRasterization_; + return *this; + } + + PhysicalDeviceConservativeRasterizationPropertiesEXT& setDegenerateTrianglesRasterized( Bool32 degenerateTrianglesRasterized_ ) + { + degenerateTrianglesRasterized = degenerateTrianglesRasterized_; + return *this; + } + + PhysicalDeviceConservativeRasterizationPropertiesEXT& setDegenerateLinesRasterized( Bool32 degenerateLinesRasterized_ ) + { + degenerateLinesRasterized = degenerateLinesRasterized_; + return *this; + } + + PhysicalDeviceConservativeRasterizationPropertiesEXT& setFullyCoveredFragmentShaderInputVariable( Bool32 fullyCoveredFragmentShaderInputVariable_ ) + { + fullyCoveredFragmentShaderInputVariable = fullyCoveredFragmentShaderInputVariable_; + return *this; + } + + PhysicalDeviceConservativeRasterizationPropertiesEXT& setConservativeRasterizationPostDepthCoverage( Bool32 conservativeRasterizationPostDepthCoverage_ ) + { + conservativeRasterizationPostDepthCoverage = conservativeRasterizationPostDepthCoverage_; + return *this; + } + + operator VkPhysicalDeviceConservativeRasterizationPropertiesEXT const&() const + { + return *reinterpret_cast(this); + } + + operator VkPhysicalDeviceConservativeRasterizationPropertiesEXT &() + { + return *reinterpret_cast(this); + } + + bool operator==( PhysicalDeviceConservativeRasterizationPropertiesEXT const& rhs ) const + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( primitiveOverestimationSize == rhs.primitiveOverestimationSize ) + && ( maxExtraPrimitiveOverestimationSize == rhs.maxExtraPrimitiveOverestimationSize ) + && ( extraPrimitiveOverestimationSizeGranularity == rhs.extraPrimitiveOverestimationSizeGranularity ) + && ( primitiveUnderestimation == rhs.primitiveUnderestimation ) + && ( conservativePointAndLineRasterization == rhs.conservativePointAndLineRasterization ) + && ( degenerateTrianglesRasterized == rhs.degenerateTrianglesRasterized ) + && ( degenerateLinesRasterized == rhs.degenerateLinesRasterized ) + && ( fullyCoveredFragmentShaderInputVariable == rhs.fullyCoveredFragmentShaderInputVariable ) + && ( conservativeRasterizationPostDepthCoverage == rhs.conservativeRasterizationPostDepthCoverage ); + } + + bool operator!=( PhysicalDeviceConservativeRasterizationPropertiesEXT const& rhs ) const + { + return !operator==( rhs ); + } + + private: + StructureType sType = StructureType::ePhysicalDeviceConservativeRasterizationPropertiesEXT; + + public: + void* pNext = nullptr; + float primitiveOverestimationSize; + float maxExtraPrimitiveOverestimationSize; + float extraPrimitiveOverestimationSizeGranularity; + Bool32 primitiveUnderestimation; + Bool32 conservativePointAndLineRasterization; + Bool32 degenerateTrianglesRasterized; + Bool32 degenerateLinesRasterized; + Bool32 fullyCoveredFragmentShaderInputVariable; + Bool32 conservativeRasterizationPostDepthCoverage; + }; + static_assert( sizeof( PhysicalDeviceConservativeRasterizationPropertiesEXT ) == sizeof( VkPhysicalDeviceConservativeRasterizationPropertiesEXT ), "struct and wrapper have different size!" ); + + struct PhysicalDeviceShaderCorePropertiesAMD + { + operator VkPhysicalDeviceShaderCorePropertiesAMD const&() const + { + return *reinterpret_cast(this); + } + + operator VkPhysicalDeviceShaderCorePropertiesAMD &() + { + return *reinterpret_cast(this); + } + + bool operator==( PhysicalDeviceShaderCorePropertiesAMD const& rhs ) const + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( shaderEngineCount == rhs.shaderEngineCount ) + && ( shaderArraysPerEngineCount == rhs.shaderArraysPerEngineCount ) + && ( computeUnitsPerShaderArray == rhs.computeUnitsPerShaderArray ) + && ( simdPerComputeUnit == rhs.simdPerComputeUnit ) + && ( wavefrontsPerSimd == rhs.wavefrontsPerSimd ) + && ( wavefrontSize == rhs.wavefrontSize ) + && ( sgprsPerSimd == rhs.sgprsPerSimd ) + && ( minSgprAllocation == rhs.minSgprAllocation ) + && ( maxSgprAllocation == rhs.maxSgprAllocation ) + && ( sgprAllocationGranularity == rhs.sgprAllocationGranularity ) + && ( vgprsPerSimd == rhs.vgprsPerSimd ) + && ( minVgprAllocation == rhs.minVgprAllocation ) + && ( maxVgprAllocation == rhs.maxVgprAllocation ) + && ( vgprAllocationGranularity == rhs.vgprAllocationGranularity ); + } + + bool operator!=( PhysicalDeviceShaderCorePropertiesAMD const& rhs ) const + { + return !operator==( rhs ); + } + + private: + StructureType sType = StructureType::ePhysicalDeviceShaderCorePropertiesAMD; + + public: + void* pNext = nullptr; + uint32_t shaderEngineCount; + uint32_t shaderArraysPerEngineCount; + uint32_t computeUnitsPerShaderArray; + uint32_t simdPerComputeUnit; + uint32_t wavefrontsPerSimd; + uint32_t wavefrontSize; + uint32_t sgprsPerSimd; + uint32_t minSgprAllocation; + uint32_t maxSgprAllocation; + uint32_t sgprAllocationGranularity; + uint32_t vgprsPerSimd; + uint32_t minVgprAllocation; + uint32_t maxVgprAllocation; + uint32_t vgprAllocationGranularity; + }; + static_assert( sizeof( PhysicalDeviceShaderCorePropertiesAMD ) == sizeof( VkPhysicalDeviceShaderCorePropertiesAMD ), "struct and wrapper have different size!" ); + + struct PhysicalDeviceDescriptorIndexingFeaturesEXT + { + PhysicalDeviceDescriptorIndexingFeaturesEXT( Bool32 shaderInputAttachmentArrayDynamicIndexing_ = 0, + Bool32 shaderUniformTexelBufferArrayDynamicIndexing_ = 0, + Bool32 shaderStorageTexelBufferArrayDynamicIndexing_ = 0, + Bool32 shaderUniformBufferArrayNonUniformIndexing_ = 0, + Bool32 shaderSampledImageArrayNonUniformIndexing_ = 0, + Bool32 shaderStorageBufferArrayNonUniformIndexing_ = 0, + Bool32 shaderStorageImageArrayNonUniformIndexing_ = 0, + Bool32 shaderInputAttachmentArrayNonUniformIndexing_ = 0, + Bool32 shaderUniformTexelBufferArrayNonUniformIndexing_ = 0, + Bool32 shaderStorageTexelBufferArrayNonUniformIndexing_ = 0, + Bool32 descriptorBindingUniformBufferUpdateAfterBind_ = 0, + Bool32 descriptorBindingSampledImageUpdateAfterBind_ = 0, + Bool32 descriptorBindingStorageImageUpdateAfterBind_ = 0, + Bool32 descriptorBindingStorageBufferUpdateAfterBind_ = 0, + Bool32 descriptorBindingUniformTexelBufferUpdateAfterBind_ = 0, + Bool32 descriptorBindingStorageTexelBufferUpdateAfterBind_ = 0, + Bool32 descriptorBindingUpdateUnusedWhilePending_ = 0, + Bool32 descriptorBindingPartiallyBound_ = 0, + Bool32 descriptorBindingVariableDescriptorCount_ = 0, + Bool32 runtimeDescriptorArray_ = 0 ) + : shaderInputAttachmentArrayDynamicIndexing( shaderInputAttachmentArrayDynamicIndexing_ ) + , shaderUniformTexelBufferArrayDynamicIndexing( shaderUniformTexelBufferArrayDynamicIndexing_ ) + , shaderStorageTexelBufferArrayDynamicIndexing( shaderStorageTexelBufferArrayDynamicIndexing_ ) + , shaderUniformBufferArrayNonUniformIndexing( shaderUniformBufferArrayNonUniformIndexing_ ) + , shaderSampledImageArrayNonUniformIndexing( shaderSampledImageArrayNonUniformIndexing_ ) + , shaderStorageBufferArrayNonUniformIndexing( shaderStorageBufferArrayNonUniformIndexing_ ) + , shaderStorageImageArrayNonUniformIndexing( shaderStorageImageArrayNonUniformIndexing_ ) + , shaderInputAttachmentArrayNonUniformIndexing( shaderInputAttachmentArrayNonUniformIndexing_ ) + , shaderUniformTexelBufferArrayNonUniformIndexing( shaderUniformTexelBufferArrayNonUniformIndexing_ ) + , shaderStorageTexelBufferArrayNonUniformIndexing( shaderStorageTexelBufferArrayNonUniformIndexing_ ) + , descriptorBindingUniformBufferUpdateAfterBind( descriptorBindingUniformBufferUpdateAfterBind_ ) + , descriptorBindingSampledImageUpdateAfterBind( descriptorBindingSampledImageUpdateAfterBind_ ) + , descriptorBindingStorageImageUpdateAfterBind( descriptorBindingStorageImageUpdateAfterBind_ ) + , descriptorBindingStorageBufferUpdateAfterBind( descriptorBindingStorageBufferUpdateAfterBind_ ) + , descriptorBindingUniformTexelBufferUpdateAfterBind( descriptorBindingUniformTexelBufferUpdateAfterBind_ ) + , descriptorBindingStorageTexelBufferUpdateAfterBind( descriptorBindingStorageTexelBufferUpdateAfterBind_ ) + , descriptorBindingUpdateUnusedWhilePending( descriptorBindingUpdateUnusedWhilePending_ ) + , descriptorBindingPartiallyBound( descriptorBindingPartiallyBound_ ) + , descriptorBindingVariableDescriptorCount( descriptorBindingVariableDescriptorCount_ ) + , runtimeDescriptorArray( runtimeDescriptorArray_ ) + { + } + + PhysicalDeviceDescriptorIndexingFeaturesEXT( VkPhysicalDeviceDescriptorIndexingFeaturesEXT const & rhs ) + { + memcpy( this, &rhs, sizeof( PhysicalDeviceDescriptorIndexingFeaturesEXT ) ); + } + + PhysicalDeviceDescriptorIndexingFeaturesEXT& operator=( VkPhysicalDeviceDescriptorIndexingFeaturesEXT const & rhs ) + { + memcpy( this, &rhs, sizeof( PhysicalDeviceDescriptorIndexingFeaturesEXT ) ); + return *this; + } + PhysicalDeviceDescriptorIndexingFeaturesEXT& setPNext( void* pNext_ ) + { + pNext = pNext_; + return *this; + } + + PhysicalDeviceDescriptorIndexingFeaturesEXT& setShaderInputAttachmentArrayDynamicIndexing( Bool32 shaderInputAttachmentArrayDynamicIndexing_ ) + { + shaderInputAttachmentArrayDynamicIndexing = shaderInputAttachmentArrayDynamicIndexing_; + return *this; + } + + PhysicalDeviceDescriptorIndexingFeaturesEXT& setShaderUniformTexelBufferArrayDynamicIndexing( Bool32 shaderUniformTexelBufferArrayDynamicIndexing_ ) + { + shaderUniformTexelBufferArrayDynamicIndexing = shaderUniformTexelBufferArrayDynamicIndexing_; + return *this; + } + + PhysicalDeviceDescriptorIndexingFeaturesEXT& setShaderStorageTexelBufferArrayDynamicIndexing( Bool32 shaderStorageTexelBufferArrayDynamicIndexing_ ) + { + shaderStorageTexelBufferArrayDynamicIndexing = shaderStorageTexelBufferArrayDynamicIndexing_; + return *this; + } + + PhysicalDeviceDescriptorIndexingFeaturesEXT& setShaderUniformBufferArrayNonUniformIndexing( Bool32 shaderUniformBufferArrayNonUniformIndexing_ ) + { + shaderUniformBufferArrayNonUniformIndexing = shaderUniformBufferArrayNonUniformIndexing_; + return *this; + } + + PhysicalDeviceDescriptorIndexingFeaturesEXT& setShaderSampledImageArrayNonUniformIndexing( Bool32 shaderSampledImageArrayNonUniformIndexing_ ) + { + shaderSampledImageArrayNonUniformIndexing = shaderSampledImageArrayNonUniformIndexing_; + return *this; + } + + PhysicalDeviceDescriptorIndexingFeaturesEXT& setShaderStorageBufferArrayNonUniformIndexing( Bool32 shaderStorageBufferArrayNonUniformIndexing_ ) + { + shaderStorageBufferArrayNonUniformIndexing = shaderStorageBufferArrayNonUniformIndexing_; + return *this; + } + + PhysicalDeviceDescriptorIndexingFeaturesEXT& setShaderStorageImageArrayNonUniformIndexing( Bool32 shaderStorageImageArrayNonUniformIndexing_ ) + { + shaderStorageImageArrayNonUniformIndexing = shaderStorageImageArrayNonUniformIndexing_; + return *this; + } + + PhysicalDeviceDescriptorIndexingFeaturesEXT& setShaderInputAttachmentArrayNonUniformIndexing( Bool32 shaderInputAttachmentArrayNonUniformIndexing_ ) + { + shaderInputAttachmentArrayNonUniformIndexing = shaderInputAttachmentArrayNonUniformIndexing_; + return *this; + } + + PhysicalDeviceDescriptorIndexingFeaturesEXT& setShaderUniformTexelBufferArrayNonUniformIndexing( Bool32 shaderUniformTexelBufferArrayNonUniformIndexing_ ) + { + shaderUniformTexelBufferArrayNonUniformIndexing = shaderUniformTexelBufferArrayNonUniformIndexing_; + return *this; + } + + PhysicalDeviceDescriptorIndexingFeaturesEXT& setShaderStorageTexelBufferArrayNonUniformIndexing( Bool32 shaderStorageTexelBufferArrayNonUniformIndexing_ ) + { + shaderStorageTexelBufferArrayNonUniformIndexing = shaderStorageTexelBufferArrayNonUniformIndexing_; + return *this; + } + + PhysicalDeviceDescriptorIndexingFeaturesEXT& setDescriptorBindingUniformBufferUpdateAfterBind( Bool32 descriptorBindingUniformBufferUpdateAfterBind_ ) + { + descriptorBindingUniformBufferUpdateAfterBind = descriptorBindingUniformBufferUpdateAfterBind_; + return *this; + } + + PhysicalDeviceDescriptorIndexingFeaturesEXT& setDescriptorBindingSampledImageUpdateAfterBind( Bool32 descriptorBindingSampledImageUpdateAfterBind_ ) + { + descriptorBindingSampledImageUpdateAfterBind = descriptorBindingSampledImageUpdateAfterBind_; + return *this; + } + + PhysicalDeviceDescriptorIndexingFeaturesEXT& setDescriptorBindingStorageImageUpdateAfterBind( Bool32 descriptorBindingStorageImageUpdateAfterBind_ ) + { + descriptorBindingStorageImageUpdateAfterBind = descriptorBindingStorageImageUpdateAfterBind_; + return *this; + } + + PhysicalDeviceDescriptorIndexingFeaturesEXT& setDescriptorBindingStorageBufferUpdateAfterBind( Bool32 descriptorBindingStorageBufferUpdateAfterBind_ ) + { + descriptorBindingStorageBufferUpdateAfterBind = descriptorBindingStorageBufferUpdateAfterBind_; + return *this; + } + + PhysicalDeviceDescriptorIndexingFeaturesEXT& setDescriptorBindingUniformTexelBufferUpdateAfterBind( Bool32 descriptorBindingUniformTexelBufferUpdateAfterBind_ ) + { + descriptorBindingUniformTexelBufferUpdateAfterBind = descriptorBindingUniformTexelBufferUpdateAfterBind_; + return *this; + } + + PhysicalDeviceDescriptorIndexingFeaturesEXT& setDescriptorBindingStorageTexelBufferUpdateAfterBind( Bool32 descriptorBindingStorageTexelBufferUpdateAfterBind_ ) + { + descriptorBindingStorageTexelBufferUpdateAfterBind = descriptorBindingStorageTexelBufferUpdateAfterBind_; + return *this; + } + + PhysicalDeviceDescriptorIndexingFeaturesEXT& setDescriptorBindingUpdateUnusedWhilePending( Bool32 descriptorBindingUpdateUnusedWhilePending_ ) + { + descriptorBindingUpdateUnusedWhilePending = descriptorBindingUpdateUnusedWhilePending_; + return *this; + } + + PhysicalDeviceDescriptorIndexingFeaturesEXT& setDescriptorBindingPartiallyBound( Bool32 descriptorBindingPartiallyBound_ ) + { + descriptorBindingPartiallyBound = descriptorBindingPartiallyBound_; + return *this; + } + + PhysicalDeviceDescriptorIndexingFeaturesEXT& setDescriptorBindingVariableDescriptorCount( Bool32 descriptorBindingVariableDescriptorCount_ ) + { + descriptorBindingVariableDescriptorCount = descriptorBindingVariableDescriptorCount_; + return *this; + } + + PhysicalDeviceDescriptorIndexingFeaturesEXT& setRuntimeDescriptorArray( Bool32 runtimeDescriptorArray_ ) + { + runtimeDescriptorArray = runtimeDescriptorArray_; + return *this; + } + + operator VkPhysicalDeviceDescriptorIndexingFeaturesEXT const&() const + { + return *reinterpret_cast(this); + } + + operator VkPhysicalDeviceDescriptorIndexingFeaturesEXT &() + { + return *reinterpret_cast(this); + } + + bool operator==( PhysicalDeviceDescriptorIndexingFeaturesEXT const& rhs ) const + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( shaderInputAttachmentArrayDynamicIndexing == rhs.shaderInputAttachmentArrayDynamicIndexing ) + && ( shaderUniformTexelBufferArrayDynamicIndexing == rhs.shaderUniformTexelBufferArrayDynamicIndexing ) + && ( shaderStorageTexelBufferArrayDynamicIndexing == rhs.shaderStorageTexelBufferArrayDynamicIndexing ) + && ( shaderUniformBufferArrayNonUniformIndexing == rhs.shaderUniformBufferArrayNonUniformIndexing ) + && ( shaderSampledImageArrayNonUniformIndexing == rhs.shaderSampledImageArrayNonUniformIndexing ) + && ( shaderStorageBufferArrayNonUniformIndexing == rhs.shaderStorageBufferArrayNonUniformIndexing ) + && ( shaderStorageImageArrayNonUniformIndexing == rhs.shaderStorageImageArrayNonUniformIndexing ) + && ( shaderInputAttachmentArrayNonUniformIndexing == rhs.shaderInputAttachmentArrayNonUniformIndexing ) + && ( shaderUniformTexelBufferArrayNonUniformIndexing == rhs.shaderUniformTexelBufferArrayNonUniformIndexing ) + && ( shaderStorageTexelBufferArrayNonUniformIndexing == rhs.shaderStorageTexelBufferArrayNonUniformIndexing ) + && ( descriptorBindingUniformBufferUpdateAfterBind == rhs.descriptorBindingUniformBufferUpdateAfterBind ) + && ( descriptorBindingSampledImageUpdateAfterBind == rhs.descriptorBindingSampledImageUpdateAfterBind ) + && ( descriptorBindingStorageImageUpdateAfterBind == rhs.descriptorBindingStorageImageUpdateAfterBind ) + && ( descriptorBindingStorageBufferUpdateAfterBind == rhs.descriptorBindingStorageBufferUpdateAfterBind ) + && ( descriptorBindingUniformTexelBufferUpdateAfterBind == rhs.descriptorBindingUniformTexelBufferUpdateAfterBind ) + && ( descriptorBindingStorageTexelBufferUpdateAfterBind == rhs.descriptorBindingStorageTexelBufferUpdateAfterBind ) + && ( descriptorBindingUpdateUnusedWhilePending == rhs.descriptorBindingUpdateUnusedWhilePending ) + && ( descriptorBindingPartiallyBound == rhs.descriptorBindingPartiallyBound ) + && ( descriptorBindingVariableDescriptorCount == rhs.descriptorBindingVariableDescriptorCount ) + && ( runtimeDescriptorArray == rhs.runtimeDescriptorArray ); + } + + bool operator!=( PhysicalDeviceDescriptorIndexingFeaturesEXT const& rhs ) const + { + return !operator==( rhs ); + } + + private: + StructureType sType = StructureType::ePhysicalDeviceDescriptorIndexingFeaturesEXT; + + public: + void* pNext = nullptr; + Bool32 shaderInputAttachmentArrayDynamicIndexing; + Bool32 shaderUniformTexelBufferArrayDynamicIndexing; + Bool32 shaderStorageTexelBufferArrayDynamicIndexing; + Bool32 shaderUniformBufferArrayNonUniformIndexing; + Bool32 shaderSampledImageArrayNonUniformIndexing; + Bool32 shaderStorageBufferArrayNonUniformIndexing; + Bool32 shaderStorageImageArrayNonUniformIndexing; + Bool32 shaderInputAttachmentArrayNonUniformIndexing; + Bool32 shaderUniformTexelBufferArrayNonUniformIndexing; + Bool32 shaderStorageTexelBufferArrayNonUniformIndexing; + Bool32 descriptorBindingUniformBufferUpdateAfterBind; + Bool32 descriptorBindingSampledImageUpdateAfterBind; + Bool32 descriptorBindingStorageImageUpdateAfterBind; + Bool32 descriptorBindingStorageBufferUpdateAfterBind; + Bool32 descriptorBindingUniformTexelBufferUpdateAfterBind; + Bool32 descriptorBindingStorageTexelBufferUpdateAfterBind; + Bool32 descriptorBindingUpdateUnusedWhilePending; + Bool32 descriptorBindingPartiallyBound; + Bool32 descriptorBindingVariableDescriptorCount; + Bool32 runtimeDescriptorArray; + }; + static_assert( sizeof( PhysicalDeviceDescriptorIndexingFeaturesEXT ) == sizeof( VkPhysicalDeviceDescriptorIndexingFeaturesEXT ), "struct and wrapper have different size!" ); + + struct PhysicalDeviceDescriptorIndexingPropertiesEXT + { + operator VkPhysicalDeviceDescriptorIndexingPropertiesEXT const&() const + { + return *reinterpret_cast(this); + } + + operator VkPhysicalDeviceDescriptorIndexingPropertiesEXT &() + { + return *reinterpret_cast(this); + } + + bool operator==( PhysicalDeviceDescriptorIndexingPropertiesEXT const& rhs ) const + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( maxUpdateAfterBindDescriptorsInAllPools == rhs.maxUpdateAfterBindDescriptorsInAllPools ) + && ( shaderUniformBufferArrayNonUniformIndexingNative == rhs.shaderUniformBufferArrayNonUniformIndexingNative ) + && ( shaderSampledImageArrayNonUniformIndexingNative == rhs.shaderSampledImageArrayNonUniformIndexingNative ) + && ( shaderStorageBufferArrayNonUniformIndexingNative == rhs.shaderStorageBufferArrayNonUniformIndexingNative ) + && ( shaderStorageImageArrayNonUniformIndexingNative == rhs.shaderStorageImageArrayNonUniformIndexingNative ) + && ( shaderInputAttachmentArrayNonUniformIndexingNative == rhs.shaderInputAttachmentArrayNonUniformIndexingNative ) + && ( robustBufferAccessUpdateAfterBind == rhs.robustBufferAccessUpdateAfterBind ) + && ( quadDivergentImplicitLod == rhs.quadDivergentImplicitLod ) + && ( maxPerStageDescriptorUpdateAfterBindSamplers == rhs.maxPerStageDescriptorUpdateAfterBindSamplers ) + && ( maxPerStageDescriptorUpdateAfterBindUniformBuffers == rhs.maxPerStageDescriptorUpdateAfterBindUniformBuffers ) + && ( maxPerStageDescriptorUpdateAfterBindStorageBuffers == rhs.maxPerStageDescriptorUpdateAfterBindStorageBuffers ) + && ( maxPerStageDescriptorUpdateAfterBindSampledImages == rhs.maxPerStageDescriptorUpdateAfterBindSampledImages ) + && ( maxPerStageDescriptorUpdateAfterBindStorageImages == rhs.maxPerStageDescriptorUpdateAfterBindStorageImages ) + && ( maxPerStageDescriptorUpdateAfterBindInputAttachments == rhs.maxPerStageDescriptorUpdateAfterBindInputAttachments ) + && ( maxPerStageUpdateAfterBindResources == rhs.maxPerStageUpdateAfterBindResources ) + && ( maxDescriptorSetUpdateAfterBindSamplers == rhs.maxDescriptorSetUpdateAfterBindSamplers ) + && ( maxDescriptorSetUpdateAfterBindUniformBuffers == rhs.maxDescriptorSetUpdateAfterBindUniformBuffers ) + && ( maxDescriptorSetUpdateAfterBindUniformBuffersDynamic == rhs.maxDescriptorSetUpdateAfterBindUniformBuffersDynamic ) + && ( maxDescriptorSetUpdateAfterBindStorageBuffers == rhs.maxDescriptorSetUpdateAfterBindStorageBuffers ) + && ( maxDescriptorSetUpdateAfterBindStorageBuffersDynamic == rhs.maxDescriptorSetUpdateAfterBindStorageBuffersDynamic ) + && ( maxDescriptorSetUpdateAfterBindSampledImages == rhs.maxDescriptorSetUpdateAfterBindSampledImages ) + && ( maxDescriptorSetUpdateAfterBindStorageImages == rhs.maxDescriptorSetUpdateAfterBindStorageImages ) + && ( maxDescriptorSetUpdateAfterBindInputAttachments == rhs.maxDescriptorSetUpdateAfterBindInputAttachments ); + } + + bool operator!=( PhysicalDeviceDescriptorIndexingPropertiesEXT const& rhs ) const + { + return !operator==( rhs ); + } + + private: + StructureType sType = StructureType::ePhysicalDeviceDescriptorIndexingPropertiesEXT; + + public: + void* pNext = nullptr; + uint32_t maxUpdateAfterBindDescriptorsInAllPools; + Bool32 shaderUniformBufferArrayNonUniformIndexingNative; + Bool32 shaderSampledImageArrayNonUniformIndexingNative; + Bool32 shaderStorageBufferArrayNonUniformIndexingNative; + Bool32 shaderStorageImageArrayNonUniformIndexingNative; + Bool32 shaderInputAttachmentArrayNonUniformIndexingNative; + Bool32 robustBufferAccessUpdateAfterBind; + Bool32 quadDivergentImplicitLod; + uint32_t maxPerStageDescriptorUpdateAfterBindSamplers; + uint32_t maxPerStageDescriptorUpdateAfterBindUniformBuffers; + uint32_t maxPerStageDescriptorUpdateAfterBindStorageBuffers; + uint32_t maxPerStageDescriptorUpdateAfterBindSampledImages; + uint32_t maxPerStageDescriptorUpdateAfterBindStorageImages; + uint32_t maxPerStageDescriptorUpdateAfterBindInputAttachments; + uint32_t maxPerStageUpdateAfterBindResources; + uint32_t maxDescriptorSetUpdateAfterBindSamplers; + uint32_t maxDescriptorSetUpdateAfterBindUniformBuffers; + uint32_t maxDescriptorSetUpdateAfterBindUniformBuffersDynamic; + uint32_t maxDescriptorSetUpdateAfterBindStorageBuffers; + uint32_t maxDescriptorSetUpdateAfterBindStorageBuffersDynamic; + uint32_t maxDescriptorSetUpdateAfterBindSampledImages; + uint32_t maxDescriptorSetUpdateAfterBindStorageImages; + uint32_t maxDescriptorSetUpdateAfterBindInputAttachments; + }; + static_assert( sizeof( PhysicalDeviceDescriptorIndexingPropertiesEXT ) == sizeof( VkPhysicalDeviceDescriptorIndexingPropertiesEXT ), "struct and wrapper have different size!" ); + + struct DescriptorSetVariableDescriptorCountAllocateInfoEXT + { + DescriptorSetVariableDescriptorCountAllocateInfoEXT( uint32_t descriptorSetCount_ = 0, + const uint32_t* pDescriptorCounts_ = nullptr ) + : descriptorSetCount( descriptorSetCount_ ) + , pDescriptorCounts( pDescriptorCounts_ ) + { + } + + DescriptorSetVariableDescriptorCountAllocateInfoEXT( VkDescriptorSetVariableDescriptorCountAllocateInfoEXT const & rhs ) + { + memcpy( this, &rhs, sizeof( DescriptorSetVariableDescriptorCountAllocateInfoEXT ) ); + } + + DescriptorSetVariableDescriptorCountAllocateInfoEXT& operator=( VkDescriptorSetVariableDescriptorCountAllocateInfoEXT const & rhs ) + { + memcpy( this, &rhs, sizeof( DescriptorSetVariableDescriptorCountAllocateInfoEXT ) ); + return *this; + } + DescriptorSetVariableDescriptorCountAllocateInfoEXT& setPNext( const void* pNext_ ) + { + pNext = pNext_; + return *this; + } + + DescriptorSetVariableDescriptorCountAllocateInfoEXT& setDescriptorSetCount( uint32_t descriptorSetCount_ ) + { + descriptorSetCount = descriptorSetCount_; + return *this; + } + + DescriptorSetVariableDescriptorCountAllocateInfoEXT& setPDescriptorCounts( const uint32_t* pDescriptorCounts_ ) + { + pDescriptorCounts = pDescriptorCounts_; + return *this; + } + + operator VkDescriptorSetVariableDescriptorCountAllocateInfoEXT const&() const + { + return *reinterpret_cast(this); + } + + operator VkDescriptorSetVariableDescriptorCountAllocateInfoEXT &() + { + return *reinterpret_cast(this); + } + + bool operator==( DescriptorSetVariableDescriptorCountAllocateInfoEXT const& rhs ) const + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( descriptorSetCount == rhs.descriptorSetCount ) + && ( pDescriptorCounts == rhs.pDescriptorCounts ); + } + + bool operator!=( DescriptorSetVariableDescriptorCountAllocateInfoEXT const& rhs ) const + { + return !operator==( rhs ); + } + + private: + StructureType sType = StructureType::eDescriptorSetVariableDescriptorCountAllocateInfoEXT; + + public: + const void* pNext = nullptr; + uint32_t descriptorSetCount; + const uint32_t* pDescriptorCounts; + }; + static_assert( sizeof( DescriptorSetVariableDescriptorCountAllocateInfoEXT ) == sizeof( VkDescriptorSetVariableDescriptorCountAllocateInfoEXT ), "struct and wrapper have different size!" ); + + struct DescriptorSetVariableDescriptorCountLayoutSupportEXT + { + operator VkDescriptorSetVariableDescriptorCountLayoutSupportEXT const&() const + { + return *reinterpret_cast(this); + } + + operator VkDescriptorSetVariableDescriptorCountLayoutSupportEXT &() + { + return *reinterpret_cast(this); + } + + bool operator==( DescriptorSetVariableDescriptorCountLayoutSupportEXT const& rhs ) const + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( maxVariableDescriptorCount == rhs.maxVariableDescriptorCount ); + } + + bool operator!=( DescriptorSetVariableDescriptorCountLayoutSupportEXT const& rhs ) const + { + return !operator==( rhs ); + } + + private: + StructureType sType = StructureType::eDescriptorSetVariableDescriptorCountLayoutSupportEXT; + + public: + void* pNext = nullptr; + uint32_t maxVariableDescriptorCount; + }; + static_assert( sizeof( DescriptorSetVariableDescriptorCountLayoutSupportEXT ) == sizeof( VkDescriptorSetVariableDescriptorCountLayoutSupportEXT ), "struct and wrapper have different size!" ); + + struct SubpassEndInfoKHR + { + SubpassEndInfoKHR( ) + { + } + + SubpassEndInfoKHR( VkSubpassEndInfoKHR const & rhs ) + { + memcpy( this, &rhs, sizeof( SubpassEndInfoKHR ) ); + } + + SubpassEndInfoKHR& operator=( VkSubpassEndInfoKHR const & rhs ) + { + memcpy( this, &rhs, sizeof( SubpassEndInfoKHR ) ); + return *this; + } + SubpassEndInfoKHR& setPNext( const void* pNext_ ) + { + pNext = pNext_; + return *this; + } + + operator VkSubpassEndInfoKHR const&() const + { + return *reinterpret_cast(this); + } + + operator VkSubpassEndInfoKHR &() + { + return *reinterpret_cast(this); + } + + bool operator==( SubpassEndInfoKHR const& rhs ) const + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ); + } + + bool operator!=( SubpassEndInfoKHR const& rhs ) const + { + return !operator==( rhs ); + } + + private: + StructureType sType = StructureType::eSubpassEndInfoKHR; + + public: + const void* pNext = nullptr; + }; + static_assert( sizeof( SubpassEndInfoKHR ) == sizeof( VkSubpassEndInfoKHR ), "struct and wrapper have different size!" ); + + struct PipelineVertexInputDivisorStateCreateInfoEXT + { + PipelineVertexInputDivisorStateCreateInfoEXT( uint32_t vertexBindingDivisorCount_ = 0, + const VertexInputBindingDivisorDescriptionEXT* pVertexBindingDivisors_ = nullptr ) + : vertexBindingDivisorCount( vertexBindingDivisorCount_ ) + , pVertexBindingDivisors( pVertexBindingDivisors_ ) + { + } + + PipelineVertexInputDivisorStateCreateInfoEXT( VkPipelineVertexInputDivisorStateCreateInfoEXT const & rhs ) + { + memcpy( this, &rhs, sizeof( PipelineVertexInputDivisorStateCreateInfoEXT ) ); + } + + PipelineVertexInputDivisorStateCreateInfoEXT& operator=( VkPipelineVertexInputDivisorStateCreateInfoEXT const & rhs ) + { + memcpy( this, &rhs, sizeof( PipelineVertexInputDivisorStateCreateInfoEXT ) ); + return *this; + } + PipelineVertexInputDivisorStateCreateInfoEXT& setPNext( const void* pNext_ ) + { + pNext = pNext_; + return *this; + } + + PipelineVertexInputDivisorStateCreateInfoEXT& setVertexBindingDivisorCount( uint32_t vertexBindingDivisorCount_ ) + { + vertexBindingDivisorCount = vertexBindingDivisorCount_; + return *this; + } + + PipelineVertexInputDivisorStateCreateInfoEXT& setPVertexBindingDivisors( const VertexInputBindingDivisorDescriptionEXT* pVertexBindingDivisors_ ) + { + pVertexBindingDivisors = pVertexBindingDivisors_; + return *this; + } + + operator VkPipelineVertexInputDivisorStateCreateInfoEXT const&() const + { + return *reinterpret_cast(this); + } + + operator VkPipelineVertexInputDivisorStateCreateInfoEXT &() + { + return *reinterpret_cast(this); + } + + bool operator==( PipelineVertexInputDivisorStateCreateInfoEXT const& rhs ) const + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( vertexBindingDivisorCount == rhs.vertexBindingDivisorCount ) + && ( pVertexBindingDivisors == rhs.pVertexBindingDivisors ); + } + + bool operator!=( PipelineVertexInputDivisorStateCreateInfoEXT const& rhs ) const + { + return !operator==( rhs ); + } + + private: + StructureType sType = StructureType::ePipelineVertexInputDivisorStateCreateInfoEXT; + + public: + const void* pNext = nullptr; + uint32_t vertexBindingDivisorCount; + const VertexInputBindingDivisorDescriptionEXT* pVertexBindingDivisors; + }; + static_assert( sizeof( PipelineVertexInputDivisorStateCreateInfoEXT ) == sizeof( VkPipelineVertexInputDivisorStateCreateInfoEXT ), "struct and wrapper have different size!" ); + + struct PhysicalDeviceVertexAttributeDivisorPropertiesEXT + { + PhysicalDeviceVertexAttributeDivisorPropertiesEXT( uint32_t maxVertexAttribDivisor_ = 0 ) + : maxVertexAttribDivisor( maxVertexAttribDivisor_ ) + { + } + + PhysicalDeviceVertexAttributeDivisorPropertiesEXT( VkPhysicalDeviceVertexAttributeDivisorPropertiesEXT const & rhs ) + { + memcpy( this, &rhs, sizeof( PhysicalDeviceVertexAttributeDivisorPropertiesEXT ) ); + } + + PhysicalDeviceVertexAttributeDivisorPropertiesEXT& operator=( VkPhysicalDeviceVertexAttributeDivisorPropertiesEXT const & rhs ) + { + memcpy( this, &rhs, sizeof( PhysicalDeviceVertexAttributeDivisorPropertiesEXT ) ); + return *this; + } + PhysicalDeviceVertexAttributeDivisorPropertiesEXT& setPNext( void* pNext_ ) + { + pNext = pNext_; + return *this; + } + + PhysicalDeviceVertexAttributeDivisorPropertiesEXT& setMaxVertexAttribDivisor( uint32_t maxVertexAttribDivisor_ ) + { + maxVertexAttribDivisor = maxVertexAttribDivisor_; + return *this; + } + + operator VkPhysicalDeviceVertexAttributeDivisorPropertiesEXT const&() const + { + return *reinterpret_cast(this); + } + + operator VkPhysicalDeviceVertexAttributeDivisorPropertiesEXT &() + { + return *reinterpret_cast(this); + } + + bool operator==( PhysicalDeviceVertexAttributeDivisorPropertiesEXT const& rhs ) const + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( maxVertexAttribDivisor == rhs.maxVertexAttribDivisor ); + } + + bool operator!=( PhysicalDeviceVertexAttributeDivisorPropertiesEXT const& rhs ) const + { + return !operator==( rhs ); + } + + private: + StructureType sType = StructureType::ePhysicalDeviceVertexAttributeDivisorPropertiesEXT; + + public: + void* pNext = nullptr; + uint32_t maxVertexAttribDivisor; + }; + static_assert( sizeof( PhysicalDeviceVertexAttributeDivisorPropertiesEXT ) == sizeof( VkPhysicalDeviceVertexAttributeDivisorPropertiesEXT ), "struct and wrapper have different size!" ); + + struct PhysicalDevicePCIBusInfoPropertiesEXT + { + operator VkPhysicalDevicePCIBusInfoPropertiesEXT const&() const + { + return *reinterpret_cast(this); + } + + operator VkPhysicalDevicePCIBusInfoPropertiesEXT &() + { + return *reinterpret_cast(this); + } + + bool operator==( PhysicalDevicePCIBusInfoPropertiesEXT const& rhs ) const + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( pciDomain == rhs.pciDomain ) + && ( pciBus == rhs.pciBus ) + && ( pciDevice == rhs.pciDevice ) + && ( pciFunction == rhs.pciFunction ); + } + + bool operator!=( PhysicalDevicePCIBusInfoPropertiesEXT const& rhs ) const + { + return !operator==( rhs ); + } + + private: + StructureType sType = StructureType::ePhysicalDevicePciBusInfoPropertiesEXT; + + public: + void* pNext = nullptr; + uint16_t pciDomain; + uint8_t pciBus; + uint8_t pciDevice; + uint8_t pciFunction; + }; + static_assert( sizeof( PhysicalDevicePCIBusInfoPropertiesEXT ) == sizeof( VkPhysicalDevicePCIBusInfoPropertiesEXT ), "struct and wrapper have different size!" ); + +#ifdef VK_USE_PLATFORM_ANDROID_ANDROID + struct ImportAndroidHardwareBufferInfoANDROID + { + ImportAndroidHardwareBufferInfoANDROID( struct AHardwareBuffer* buffer_ = nullptr ) + : buffer( buffer_ ) + { + } + + ImportAndroidHardwareBufferInfoANDROID( VkImportAndroidHardwareBufferInfoANDROID const & rhs ) + { + memcpy( this, &rhs, sizeof( ImportAndroidHardwareBufferInfoANDROID ) ); + } + + ImportAndroidHardwareBufferInfoANDROID& operator=( VkImportAndroidHardwareBufferInfoANDROID const & rhs ) + { + memcpy( this, &rhs, sizeof( ImportAndroidHardwareBufferInfoANDROID ) ); + return *this; + } + ImportAndroidHardwareBufferInfoANDROID& setPNext( const void* pNext_ ) + { + pNext = pNext_; + return *this; + } + + ImportAndroidHardwareBufferInfoANDROID& setBuffer( struct AHardwareBuffer* buffer_ ) + { + buffer = buffer_; + return *this; + } + + operator VkImportAndroidHardwareBufferInfoANDROID const&() const + { + return *reinterpret_cast(this); + } + + operator VkImportAndroidHardwareBufferInfoANDROID &() + { + return *reinterpret_cast(this); + } + + bool operator==( ImportAndroidHardwareBufferInfoANDROID const& rhs ) const + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( buffer == rhs.buffer ); + } + + bool operator!=( ImportAndroidHardwareBufferInfoANDROID const& rhs ) const + { + return !operator==( rhs ); + } + + private: + StructureType sType = StructureType::eImportAndroidHardwareBufferInfoANDROID; + + public: + const void* pNext = nullptr; + struct AHardwareBuffer* buffer; + }; + static_assert( sizeof( ImportAndroidHardwareBufferInfoANDROID ) == sizeof( VkImportAndroidHardwareBufferInfoANDROID ), "struct and wrapper have different size!" ); +#endif /*VK_USE_PLATFORM_ANDROID_ANDROID*/ + +#ifdef VK_USE_PLATFORM_ANDROID_ANDROID + struct AndroidHardwareBufferUsageANDROID + { + operator VkAndroidHardwareBufferUsageANDROID const&() const + { + return *reinterpret_cast(this); + } + + operator VkAndroidHardwareBufferUsageANDROID &() + { + return *reinterpret_cast(this); + } + + bool operator==( AndroidHardwareBufferUsageANDROID const& rhs ) const + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( androidHardwareBufferUsage == rhs.androidHardwareBufferUsage ); + } + + bool operator!=( AndroidHardwareBufferUsageANDROID const& rhs ) const + { + return !operator==( rhs ); + } + + private: + StructureType sType = StructureType::eAndroidHardwareBufferUsageANDROID; + + public: + void* pNext = nullptr; + uint64_t androidHardwareBufferUsage; + }; + static_assert( sizeof( AndroidHardwareBufferUsageANDROID ) == sizeof( VkAndroidHardwareBufferUsageANDROID ), "struct and wrapper have different size!" ); +#endif /*VK_USE_PLATFORM_ANDROID_ANDROID*/ + +#ifdef VK_USE_PLATFORM_ANDROID_ANDROID + struct AndroidHardwareBufferPropertiesANDROID + { + operator VkAndroidHardwareBufferPropertiesANDROID const&() const + { + return *reinterpret_cast(this); + } + + operator VkAndroidHardwareBufferPropertiesANDROID &() + { + return *reinterpret_cast(this); + } + + bool operator==( AndroidHardwareBufferPropertiesANDROID const& rhs ) const + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( allocationSize == rhs.allocationSize ) + && ( memoryTypeBits == rhs.memoryTypeBits ); + } + + bool operator!=( AndroidHardwareBufferPropertiesANDROID const& rhs ) const + { + return !operator==( rhs ); + } + + private: + StructureType sType = StructureType::eAndroidHardwareBufferPropertiesANDROID; + + public: + void* pNext = nullptr; + DeviceSize allocationSize; + uint32_t memoryTypeBits; + }; + static_assert( sizeof( AndroidHardwareBufferPropertiesANDROID ) == sizeof( VkAndroidHardwareBufferPropertiesANDROID ), "struct and wrapper have different size!" ); +#endif /*VK_USE_PLATFORM_ANDROID_ANDROID*/ + +#ifdef VK_USE_PLATFORM_ANDROID_ANDROID + struct MemoryGetAndroidHardwareBufferInfoANDROID + { + MemoryGetAndroidHardwareBufferInfoANDROID( DeviceMemory memory_ = DeviceMemory() ) + : memory( memory_ ) + { + } + + MemoryGetAndroidHardwareBufferInfoANDROID( VkMemoryGetAndroidHardwareBufferInfoANDROID const & rhs ) + { + memcpy( this, &rhs, sizeof( MemoryGetAndroidHardwareBufferInfoANDROID ) ); + } + + MemoryGetAndroidHardwareBufferInfoANDROID& operator=( VkMemoryGetAndroidHardwareBufferInfoANDROID const & rhs ) + { + memcpy( this, &rhs, sizeof( MemoryGetAndroidHardwareBufferInfoANDROID ) ); + return *this; + } + MemoryGetAndroidHardwareBufferInfoANDROID& setPNext( const void* pNext_ ) + { + pNext = pNext_; + return *this; + } + + MemoryGetAndroidHardwareBufferInfoANDROID& setMemory( DeviceMemory memory_ ) + { + memory = memory_; + return *this; + } + + operator VkMemoryGetAndroidHardwareBufferInfoANDROID const&() const + { + return *reinterpret_cast(this); + } + + operator VkMemoryGetAndroidHardwareBufferInfoANDROID &() + { + return *reinterpret_cast(this); + } + + bool operator==( MemoryGetAndroidHardwareBufferInfoANDROID const& rhs ) const + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( memory == rhs.memory ); + } + + bool operator!=( MemoryGetAndroidHardwareBufferInfoANDROID const& rhs ) const + { + return !operator==( rhs ); + } + + private: + StructureType sType = StructureType::eMemoryGetAndroidHardwareBufferInfoANDROID; + + public: + const void* pNext = nullptr; + DeviceMemory memory; + }; + static_assert( sizeof( MemoryGetAndroidHardwareBufferInfoANDROID ) == sizeof( VkMemoryGetAndroidHardwareBufferInfoANDROID ), "struct and wrapper have different size!" ); +#endif /*VK_USE_PLATFORM_ANDROID_ANDROID*/ + + struct CommandBufferInheritanceConditionalRenderingInfoEXT + { + CommandBufferInheritanceConditionalRenderingInfoEXT( Bool32 conditionalRenderingEnable_ = 0 ) + : conditionalRenderingEnable( conditionalRenderingEnable_ ) + { + } + + CommandBufferInheritanceConditionalRenderingInfoEXT( VkCommandBufferInheritanceConditionalRenderingInfoEXT const & rhs ) + { + memcpy( this, &rhs, sizeof( CommandBufferInheritanceConditionalRenderingInfoEXT ) ); + } + + CommandBufferInheritanceConditionalRenderingInfoEXT& operator=( VkCommandBufferInheritanceConditionalRenderingInfoEXT const & rhs ) + { + memcpy( this, &rhs, sizeof( CommandBufferInheritanceConditionalRenderingInfoEXT ) ); + return *this; + } + CommandBufferInheritanceConditionalRenderingInfoEXT& setPNext( const void* pNext_ ) + { + pNext = pNext_; + return *this; + } + + CommandBufferInheritanceConditionalRenderingInfoEXT& setConditionalRenderingEnable( Bool32 conditionalRenderingEnable_ ) + { + conditionalRenderingEnable = conditionalRenderingEnable_; + return *this; + } + + operator VkCommandBufferInheritanceConditionalRenderingInfoEXT const&() const + { + return *reinterpret_cast(this); + } + + operator VkCommandBufferInheritanceConditionalRenderingInfoEXT &() + { + return *reinterpret_cast(this); + } + + bool operator==( CommandBufferInheritanceConditionalRenderingInfoEXT const& rhs ) const + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( conditionalRenderingEnable == rhs.conditionalRenderingEnable ); + } + + bool operator!=( CommandBufferInheritanceConditionalRenderingInfoEXT const& rhs ) const + { + return !operator==( rhs ); + } + + private: + StructureType sType = StructureType::eCommandBufferInheritanceConditionalRenderingInfoEXT; + + public: + const void* pNext = nullptr; + Bool32 conditionalRenderingEnable; + }; + static_assert( sizeof( CommandBufferInheritanceConditionalRenderingInfoEXT ) == sizeof( VkCommandBufferInheritanceConditionalRenderingInfoEXT ), "struct and wrapper have different size!" ); + +#ifdef VK_USE_PLATFORM_ANDROID_ANDROID + struct ExternalFormatANDROID + { + ExternalFormatANDROID( uint64_t externalFormat_ = 0 ) + : externalFormat( externalFormat_ ) + { + } + + ExternalFormatANDROID( VkExternalFormatANDROID const & rhs ) + { + memcpy( this, &rhs, sizeof( ExternalFormatANDROID ) ); + } + + ExternalFormatANDROID& operator=( VkExternalFormatANDROID const & rhs ) + { + memcpy( this, &rhs, sizeof( ExternalFormatANDROID ) ); + return *this; + } + ExternalFormatANDROID& setPNext( void* pNext_ ) + { + pNext = pNext_; + return *this; + } + + ExternalFormatANDROID& setExternalFormat( uint64_t externalFormat_ ) + { + externalFormat = externalFormat_; + return *this; + } + + operator VkExternalFormatANDROID const&() const + { + return *reinterpret_cast(this); + } + + operator VkExternalFormatANDROID &() + { + return *reinterpret_cast(this); + } + + bool operator==( ExternalFormatANDROID const& rhs ) const + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( externalFormat == rhs.externalFormat ); + } + + bool operator!=( ExternalFormatANDROID const& rhs ) const + { + return !operator==( rhs ); + } + + private: + StructureType sType = StructureType::eExternalFormatANDROID; + + public: + void* pNext = nullptr; + uint64_t externalFormat; + }; + static_assert( sizeof( ExternalFormatANDROID ) == sizeof( VkExternalFormatANDROID ), "struct and wrapper have different size!" ); +#endif /*VK_USE_PLATFORM_ANDROID_ANDROID*/ + + struct PhysicalDevice8BitStorageFeaturesKHR + { + PhysicalDevice8BitStorageFeaturesKHR( Bool32 storageBuffer8BitAccess_ = 0, + Bool32 uniformAndStorageBuffer8BitAccess_ = 0, + Bool32 storagePushConstant8_ = 0 ) + : storageBuffer8BitAccess( storageBuffer8BitAccess_ ) + , uniformAndStorageBuffer8BitAccess( uniformAndStorageBuffer8BitAccess_ ) + , storagePushConstant8( storagePushConstant8_ ) + { + } + + PhysicalDevice8BitStorageFeaturesKHR( VkPhysicalDevice8BitStorageFeaturesKHR const & rhs ) + { + memcpy( this, &rhs, sizeof( PhysicalDevice8BitStorageFeaturesKHR ) ); + } + + PhysicalDevice8BitStorageFeaturesKHR& operator=( VkPhysicalDevice8BitStorageFeaturesKHR const & rhs ) + { + memcpy( this, &rhs, sizeof( PhysicalDevice8BitStorageFeaturesKHR ) ); + return *this; + } + PhysicalDevice8BitStorageFeaturesKHR& setPNext( void* pNext_ ) + { + pNext = pNext_; + return *this; + } + + PhysicalDevice8BitStorageFeaturesKHR& setStorageBuffer8BitAccess( Bool32 storageBuffer8BitAccess_ ) + { + storageBuffer8BitAccess = storageBuffer8BitAccess_; + return *this; + } + + PhysicalDevice8BitStorageFeaturesKHR& setUniformAndStorageBuffer8BitAccess( Bool32 uniformAndStorageBuffer8BitAccess_ ) + { + uniformAndStorageBuffer8BitAccess = uniformAndStorageBuffer8BitAccess_; + return *this; + } + + PhysicalDevice8BitStorageFeaturesKHR& setStoragePushConstant8( Bool32 storagePushConstant8_ ) + { + storagePushConstant8 = storagePushConstant8_; + return *this; + } + + operator VkPhysicalDevice8BitStorageFeaturesKHR const&() const + { + return *reinterpret_cast(this); + } + + operator VkPhysicalDevice8BitStorageFeaturesKHR &() + { + return *reinterpret_cast(this); + } + + bool operator==( PhysicalDevice8BitStorageFeaturesKHR const& rhs ) const + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( storageBuffer8BitAccess == rhs.storageBuffer8BitAccess ) + && ( uniformAndStorageBuffer8BitAccess == rhs.uniformAndStorageBuffer8BitAccess ) + && ( storagePushConstant8 == rhs.storagePushConstant8 ); + } + + bool operator!=( PhysicalDevice8BitStorageFeaturesKHR const& rhs ) const + { + return !operator==( rhs ); + } + + private: + StructureType sType = StructureType::ePhysicalDevice8BitStorageFeaturesKHR; + + public: + void* pNext = nullptr; + Bool32 storageBuffer8BitAccess; + Bool32 uniformAndStorageBuffer8BitAccess; + Bool32 storagePushConstant8; + }; + static_assert( sizeof( PhysicalDevice8BitStorageFeaturesKHR ) == sizeof( VkPhysicalDevice8BitStorageFeaturesKHR ), "struct and wrapper have different size!" ); + + struct PhysicalDeviceConditionalRenderingFeaturesEXT + { + PhysicalDeviceConditionalRenderingFeaturesEXT( Bool32 conditionalRendering_ = 0, + Bool32 inheritedConditionalRendering_ = 0 ) + : conditionalRendering( conditionalRendering_ ) + , inheritedConditionalRendering( inheritedConditionalRendering_ ) + { + } + + PhysicalDeviceConditionalRenderingFeaturesEXT( VkPhysicalDeviceConditionalRenderingFeaturesEXT const & rhs ) + { + memcpy( this, &rhs, sizeof( PhysicalDeviceConditionalRenderingFeaturesEXT ) ); + } + + PhysicalDeviceConditionalRenderingFeaturesEXT& operator=( VkPhysicalDeviceConditionalRenderingFeaturesEXT const & rhs ) + { + memcpy( this, &rhs, sizeof( PhysicalDeviceConditionalRenderingFeaturesEXT ) ); + return *this; + } + PhysicalDeviceConditionalRenderingFeaturesEXT& setPNext( void* pNext_ ) + { + pNext = pNext_; + return *this; + } + + PhysicalDeviceConditionalRenderingFeaturesEXT& setConditionalRendering( Bool32 conditionalRendering_ ) + { + conditionalRendering = conditionalRendering_; + return *this; + } + + PhysicalDeviceConditionalRenderingFeaturesEXT& setInheritedConditionalRendering( Bool32 inheritedConditionalRendering_ ) + { + inheritedConditionalRendering = inheritedConditionalRendering_; + return *this; + } + + operator VkPhysicalDeviceConditionalRenderingFeaturesEXT const&() const + { + return *reinterpret_cast(this); + } + + operator VkPhysicalDeviceConditionalRenderingFeaturesEXT &() + { + return *reinterpret_cast(this); + } + + bool operator==( PhysicalDeviceConditionalRenderingFeaturesEXT const& rhs ) const + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( conditionalRendering == rhs.conditionalRendering ) + && ( inheritedConditionalRendering == rhs.inheritedConditionalRendering ); + } + + bool operator!=( PhysicalDeviceConditionalRenderingFeaturesEXT const& rhs ) const + { + return !operator==( rhs ); + } + + private: + StructureType sType = StructureType::ePhysicalDeviceConditionalRenderingFeaturesEXT; + + public: + void* pNext = nullptr; + Bool32 conditionalRendering; + Bool32 inheritedConditionalRendering; + }; + static_assert( sizeof( PhysicalDeviceConditionalRenderingFeaturesEXT ) == sizeof( VkPhysicalDeviceConditionalRenderingFeaturesEXT ), "struct and wrapper have different size!" ); + + struct PhysicalDeviceVulkanMemoryModelFeaturesKHR + { + operator VkPhysicalDeviceVulkanMemoryModelFeaturesKHR const&() const + { + return *reinterpret_cast(this); + } + + operator VkPhysicalDeviceVulkanMemoryModelFeaturesKHR &() + { + return *reinterpret_cast(this); + } + + bool operator==( PhysicalDeviceVulkanMemoryModelFeaturesKHR const& rhs ) const + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( vulkanMemoryModel == rhs.vulkanMemoryModel ) + && ( vulkanMemoryModelDeviceScope == rhs.vulkanMemoryModelDeviceScope ); + } + + bool operator!=( PhysicalDeviceVulkanMemoryModelFeaturesKHR const& rhs ) const + { + return !operator==( rhs ); + } + + private: + StructureType sType = StructureType::ePhysicalDeviceVulkanMemoryModelFeaturesKHR; + + public: + void* pNext = nullptr; + Bool32 vulkanMemoryModel; + Bool32 vulkanMemoryModelDeviceScope; + }; + static_assert( sizeof( PhysicalDeviceVulkanMemoryModelFeaturesKHR ) == sizeof( VkPhysicalDeviceVulkanMemoryModelFeaturesKHR ), "struct and wrapper have different size!" ); + + struct PhysicalDeviceShaderAtomicInt64FeaturesKHR + { + PhysicalDeviceShaderAtomicInt64FeaturesKHR( Bool32 shaderBufferInt64Atomics_ = 0, + Bool32 shaderSharedInt64Atomics_ = 0 ) + : shaderBufferInt64Atomics( shaderBufferInt64Atomics_ ) + , shaderSharedInt64Atomics( shaderSharedInt64Atomics_ ) + { + } + + PhysicalDeviceShaderAtomicInt64FeaturesKHR( VkPhysicalDeviceShaderAtomicInt64FeaturesKHR const & rhs ) + { + memcpy( this, &rhs, sizeof( PhysicalDeviceShaderAtomicInt64FeaturesKHR ) ); + } + + PhysicalDeviceShaderAtomicInt64FeaturesKHR& operator=( VkPhysicalDeviceShaderAtomicInt64FeaturesKHR const & rhs ) + { + memcpy( this, &rhs, sizeof( PhysicalDeviceShaderAtomicInt64FeaturesKHR ) ); + return *this; + } + PhysicalDeviceShaderAtomicInt64FeaturesKHR& setPNext( void* pNext_ ) + { + pNext = pNext_; + return *this; + } + + PhysicalDeviceShaderAtomicInt64FeaturesKHR& setShaderBufferInt64Atomics( Bool32 shaderBufferInt64Atomics_ ) + { + shaderBufferInt64Atomics = shaderBufferInt64Atomics_; + return *this; + } + + PhysicalDeviceShaderAtomicInt64FeaturesKHR& setShaderSharedInt64Atomics( Bool32 shaderSharedInt64Atomics_ ) + { + shaderSharedInt64Atomics = shaderSharedInt64Atomics_; + return *this; + } + + operator VkPhysicalDeviceShaderAtomicInt64FeaturesKHR const&() const + { + return *reinterpret_cast(this); + } + + operator VkPhysicalDeviceShaderAtomicInt64FeaturesKHR &() + { + return *reinterpret_cast(this); + } + + bool operator==( PhysicalDeviceShaderAtomicInt64FeaturesKHR const& rhs ) const + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( shaderBufferInt64Atomics == rhs.shaderBufferInt64Atomics ) + && ( shaderSharedInt64Atomics == rhs.shaderSharedInt64Atomics ); + } + + bool operator!=( PhysicalDeviceShaderAtomicInt64FeaturesKHR const& rhs ) const + { + return !operator==( rhs ); + } + + private: + StructureType sType = StructureType::ePhysicalDeviceShaderAtomicInt64FeaturesKHR; + + public: + void* pNext = nullptr; + Bool32 shaderBufferInt64Atomics; + Bool32 shaderSharedInt64Atomics; + }; + static_assert( sizeof( PhysicalDeviceShaderAtomicInt64FeaturesKHR ) == sizeof( VkPhysicalDeviceShaderAtomicInt64FeaturesKHR ), "struct and wrapper have different size!" ); + + struct PhysicalDeviceVertexAttributeDivisorFeaturesEXT + { + PhysicalDeviceVertexAttributeDivisorFeaturesEXT( Bool32 vertexAttributeInstanceRateDivisor_ = 0, + Bool32 vertexAttributeInstanceRateZeroDivisor_ = 0 ) + : vertexAttributeInstanceRateDivisor( vertexAttributeInstanceRateDivisor_ ) + , vertexAttributeInstanceRateZeroDivisor( vertexAttributeInstanceRateZeroDivisor_ ) + { + } + + PhysicalDeviceVertexAttributeDivisorFeaturesEXT( VkPhysicalDeviceVertexAttributeDivisorFeaturesEXT const & rhs ) + { + memcpy( this, &rhs, sizeof( PhysicalDeviceVertexAttributeDivisorFeaturesEXT ) ); + } + + PhysicalDeviceVertexAttributeDivisorFeaturesEXT& operator=( VkPhysicalDeviceVertexAttributeDivisorFeaturesEXT const & rhs ) + { + memcpy( this, &rhs, sizeof( PhysicalDeviceVertexAttributeDivisorFeaturesEXT ) ); + return *this; + } + PhysicalDeviceVertexAttributeDivisorFeaturesEXT& setPNext( void* pNext_ ) + { + pNext = pNext_; + return *this; + } + + PhysicalDeviceVertexAttributeDivisorFeaturesEXT& setVertexAttributeInstanceRateDivisor( Bool32 vertexAttributeInstanceRateDivisor_ ) + { + vertexAttributeInstanceRateDivisor = vertexAttributeInstanceRateDivisor_; + return *this; + } + + PhysicalDeviceVertexAttributeDivisorFeaturesEXT& setVertexAttributeInstanceRateZeroDivisor( Bool32 vertexAttributeInstanceRateZeroDivisor_ ) + { + vertexAttributeInstanceRateZeroDivisor = vertexAttributeInstanceRateZeroDivisor_; + return *this; + } + + operator VkPhysicalDeviceVertexAttributeDivisorFeaturesEXT const&() const + { + return *reinterpret_cast(this); + } + + operator VkPhysicalDeviceVertexAttributeDivisorFeaturesEXT &() + { + return *reinterpret_cast(this); + } + + bool operator==( PhysicalDeviceVertexAttributeDivisorFeaturesEXT const& rhs ) const + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( vertexAttributeInstanceRateDivisor == rhs.vertexAttributeInstanceRateDivisor ) + && ( vertexAttributeInstanceRateZeroDivisor == rhs.vertexAttributeInstanceRateZeroDivisor ); + } + + bool operator!=( PhysicalDeviceVertexAttributeDivisorFeaturesEXT const& rhs ) const + { + return !operator==( rhs ); + } + + private: + StructureType sType = StructureType::ePhysicalDeviceVertexAttributeDivisorFeaturesEXT; + + public: + void* pNext = nullptr; + Bool32 vertexAttributeInstanceRateDivisor; + Bool32 vertexAttributeInstanceRateZeroDivisor; + }; + static_assert( sizeof( PhysicalDeviceVertexAttributeDivisorFeaturesEXT ) == sizeof( VkPhysicalDeviceVertexAttributeDivisorFeaturesEXT ), "struct and wrapper have different size!" ); + + struct ImageViewASTCDecodeModeEXT + { + ImageViewASTCDecodeModeEXT( Format decodeMode_ = Format::eUndefined ) + : decodeMode( decodeMode_ ) + { + } + + ImageViewASTCDecodeModeEXT( VkImageViewASTCDecodeModeEXT const & rhs ) + { + memcpy( this, &rhs, sizeof( ImageViewASTCDecodeModeEXT ) ); + } + + ImageViewASTCDecodeModeEXT& operator=( VkImageViewASTCDecodeModeEXT const & rhs ) + { + memcpy( this, &rhs, sizeof( ImageViewASTCDecodeModeEXT ) ); + return *this; + } + ImageViewASTCDecodeModeEXT& setPNext( const void* pNext_ ) + { + pNext = pNext_; + return *this; + } + + ImageViewASTCDecodeModeEXT& setDecodeMode( Format decodeMode_ ) + { + decodeMode = decodeMode_; + return *this; + } + + operator VkImageViewASTCDecodeModeEXT const&() const + { + return *reinterpret_cast(this); + } + + operator VkImageViewASTCDecodeModeEXT &() + { + return *reinterpret_cast(this); + } + + bool operator==( ImageViewASTCDecodeModeEXT const& rhs ) const + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( decodeMode == rhs.decodeMode ); + } + + bool operator!=( ImageViewASTCDecodeModeEXT const& rhs ) const + { + return !operator==( rhs ); + } + + private: + StructureType sType = StructureType::eImageViewAstcDecodeModeEXT; + + public: + const void* pNext = nullptr; + Format decodeMode; + }; + static_assert( sizeof( ImageViewASTCDecodeModeEXT ) == sizeof( VkImageViewASTCDecodeModeEXT ), "struct and wrapper have different size!" ); + + struct PhysicalDeviceASTCDecodeFeaturesEXT + { + PhysicalDeviceASTCDecodeFeaturesEXT( Bool32 decodeModeSharedExponent_ = 0 ) + : decodeModeSharedExponent( decodeModeSharedExponent_ ) + { + } + + PhysicalDeviceASTCDecodeFeaturesEXT( VkPhysicalDeviceASTCDecodeFeaturesEXT const & rhs ) + { + memcpy( this, &rhs, sizeof( PhysicalDeviceASTCDecodeFeaturesEXT ) ); + } + + PhysicalDeviceASTCDecodeFeaturesEXT& operator=( VkPhysicalDeviceASTCDecodeFeaturesEXT const & rhs ) + { + memcpy( this, &rhs, sizeof( PhysicalDeviceASTCDecodeFeaturesEXT ) ); + return *this; + } + PhysicalDeviceASTCDecodeFeaturesEXT& setPNext( void* pNext_ ) + { + pNext = pNext_; + return *this; + } + + PhysicalDeviceASTCDecodeFeaturesEXT& setDecodeModeSharedExponent( Bool32 decodeModeSharedExponent_ ) + { + decodeModeSharedExponent = decodeModeSharedExponent_; + return *this; + } + + operator VkPhysicalDeviceASTCDecodeFeaturesEXT const&() const + { + return *reinterpret_cast(this); + } + + operator VkPhysicalDeviceASTCDecodeFeaturesEXT &() + { + return *reinterpret_cast(this); + } + + bool operator==( PhysicalDeviceASTCDecodeFeaturesEXT const& rhs ) const + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( decodeModeSharedExponent == rhs.decodeModeSharedExponent ); + } + + bool operator!=( PhysicalDeviceASTCDecodeFeaturesEXT const& rhs ) const + { + return !operator==( rhs ); + } + + private: + StructureType sType = StructureType::ePhysicalDeviceAstcDecodeFeaturesEXT; + + public: + void* pNext = nullptr; + Bool32 decodeModeSharedExponent; + }; + static_assert( sizeof( PhysicalDeviceASTCDecodeFeaturesEXT ) == sizeof( VkPhysicalDeviceASTCDecodeFeaturesEXT ), "struct and wrapper have different size!" ); + + struct PhysicalDeviceTransformFeedbackFeaturesEXT + { + PhysicalDeviceTransformFeedbackFeaturesEXT( Bool32 transformFeedback_ = 0, + Bool32 geometryStreams_ = 0 ) + : transformFeedback( transformFeedback_ ) + , geometryStreams( geometryStreams_ ) + { + } + + PhysicalDeviceTransformFeedbackFeaturesEXT( VkPhysicalDeviceTransformFeedbackFeaturesEXT const & rhs ) + { + memcpy( this, &rhs, sizeof( PhysicalDeviceTransformFeedbackFeaturesEXT ) ); + } + + PhysicalDeviceTransformFeedbackFeaturesEXT& operator=( VkPhysicalDeviceTransformFeedbackFeaturesEXT const & rhs ) + { + memcpy( this, &rhs, sizeof( PhysicalDeviceTransformFeedbackFeaturesEXT ) ); + return *this; + } + PhysicalDeviceTransformFeedbackFeaturesEXT& setPNext( void* pNext_ ) + { + pNext = pNext_; + return *this; + } + + PhysicalDeviceTransformFeedbackFeaturesEXT& setTransformFeedback( Bool32 transformFeedback_ ) + { + transformFeedback = transformFeedback_; + return *this; + } + + PhysicalDeviceTransformFeedbackFeaturesEXT& setGeometryStreams( Bool32 geometryStreams_ ) + { + geometryStreams = geometryStreams_; + return *this; + } + + operator VkPhysicalDeviceTransformFeedbackFeaturesEXT const&() const + { + return *reinterpret_cast(this); + } + + operator VkPhysicalDeviceTransformFeedbackFeaturesEXT &() + { + return *reinterpret_cast(this); + } + + bool operator==( PhysicalDeviceTransformFeedbackFeaturesEXT const& rhs ) const + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( transformFeedback == rhs.transformFeedback ) + && ( geometryStreams == rhs.geometryStreams ); + } + + bool operator!=( PhysicalDeviceTransformFeedbackFeaturesEXT const& rhs ) const + { + return !operator==( rhs ); + } + + private: + StructureType sType = StructureType::ePhysicalDeviceTransformFeedbackFeaturesEXT; + + public: + void* pNext = nullptr; + Bool32 transformFeedback; + Bool32 geometryStreams; + }; + static_assert( sizeof( PhysicalDeviceTransformFeedbackFeaturesEXT ) == sizeof( VkPhysicalDeviceTransformFeedbackFeaturesEXT ), "struct and wrapper have different size!" ); + + struct PhysicalDeviceTransformFeedbackPropertiesEXT + { + operator VkPhysicalDeviceTransformFeedbackPropertiesEXT const&() const + { + return *reinterpret_cast(this); + } + + operator VkPhysicalDeviceTransformFeedbackPropertiesEXT &() + { + return *reinterpret_cast(this); + } + + bool operator==( PhysicalDeviceTransformFeedbackPropertiesEXT const& rhs ) const + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( maxTransformFeedbackStreams == rhs.maxTransformFeedbackStreams ) + && ( maxTransformFeedbackBuffers == rhs.maxTransformFeedbackBuffers ) + && ( maxTransformFeedbackBufferSize == rhs.maxTransformFeedbackBufferSize ) + && ( maxTransformFeedbackStreamDataSize == rhs.maxTransformFeedbackStreamDataSize ) + && ( maxTransformFeedbackBufferDataSize == rhs.maxTransformFeedbackBufferDataSize ) + && ( maxTransformFeedbackBufferDataStride == rhs.maxTransformFeedbackBufferDataStride ) + && ( transformFeedbackQueries == rhs.transformFeedbackQueries ) + && ( transformFeedbackStreamsLinesTriangles == rhs.transformFeedbackStreamsLinesTriangles ) + && ( transformFeedbackRasterizationStreamSelect == rhs.transformFeedbackRasterizationStreamSelect ) + && ( transformFeedbackDraw == rhs.transformFeedbackDraw ); + } + + bool operator!=( PhysicalDeviceTransformFeedbackPropertiesEXT const& rhs ) const + { + return !operator==( rhs ); + } + + private: + StructureType sType = StructureType::ePhysicalDeviceTransformFeedbackPropertiesEXT; + + public: + void* pNext = nullptr; + uint32_t maxTransformFeedbackStreams; + uint32_t maxTransformFeedbackBuffers; + DeviceSize maxTransformFeedbackBufferSize; + uint32_t maxTransformFeedbackStreamDataSize; + uint32_t maxTransformFeedbackBufferDataSize; + uint32_t maxTransformFeedbackBufferDataStride; + Bool32 transformFeedbackQueries; + Bool32 transformFeedbackStreamsLinesTriangles; + Bool32 transformFeedbackRasterizationStreamSelect; + Bool32 transformFeedbackDraw; + }; + static_assert( sizeof( PhysicalDeviceTransformFeedbackPropertiesEXT ) == sizeof( VkPhysicalDeviceTransformFeedbackPropertiesEXT ), "struct and wrapper have different size!" ); + + struct PipelineRasterizationStateStreamCreateInfoEXT + { + PipelineRasterizationStateStreamCreateInfoEXT( PipelineRasterizationStateStreamCreateFlagsEXT flags_ = PipelineRasterizationStateStreamCreateFlagsEXT(), + uint32_t rasterizationStream_ = 0 ) + : flags( flags_ ) + , rasterizationStream( rasterizationStream_ ) + { + } + + PipelineRasterizationStateStreamCreateInfoEXT( VkPipelineRasterizationStateStreamCreateInfoEXT const & rhs ) + { + memcpy( this, &rhs, sizeof( PipelineRasterizationStateStreamCreateInfoEXT ) ); + } + + PipelineRasterizationStateStreamCreateInfoEXT& operator=( VkPipelineRasterizationStateStreamCreateInfoEXT const & rhs ) + { + memcpy( this, &rhs, sizeof( PipelineRasterizationStateStreamCreateInfoEXT ) ); + return *this; + } + PipelineRasterizationStateStreamCreateInfoEXT& setPNext( const void* pNext_ ) + { + pNext = pNext_; + return *this; + } + + PipelineRasterizationStateStreamCreateInfoEXT& setFlags( PipelineRasterizationStateStreamCreateFlagsEXT flags_ ) + { + flags = flags_; + return *this; + } + + PipelineRasterizationStateStreamCreateInfoEXT& setRasterizationStream( uint32_t rasterizationStream_ ) + { + rasterizationStream = rasterizationStream_; + return *this; + } + + operator VkPipelineRasterizationStateStreamCreateInfoEXT const&() const + { + return *reinterpret_cast(this); + } + + operator VkPipelineRasterizationStateStreamCreateInfoEXT &() + { + return *reinterpret_cast(this); + } + + bool operator==( PipelineRasterizationStateStreamCreateInfoEXT const& rhs ) const + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( flags == rhs.flags ) + && ( rasterizationStream == rhs.rasterizationStream ); + } + + bool operator!=( PipelineRasterizationStateStreamCreateInfoEXT const& rhs ) const + { + return !operator==( rhs ); + } + + private: + StructureType sType = StructureType::ePipelineRasterizationStateStreamCreateInfoEXT; + + public: + const void* pNext = nullptr; + PipelineRasterizationStateStreamCreateFlagsEXT flags; + uint32_t rasterizationStream; + }; + static_assert( sizeof( PipelineRasterizationStateStreamCreateInfoEXT ) == sizeof( VkPipelineRasterizationStateStreamCreateInfoEXT ), "struct and wrapper have different size!" ); + + struct PhysicalDeviceRepresentativeFragmentTestFeaturesNV + { + PhysicalDeviceRepresentativeFragmentTestFeaturesNV( Bool32 representativeFragmentTest_ = 0 ) + : representativeFragmentTest( representativeFragmentTest_ ) + { + } + + PhysicalDeviceRepresentativeFragmentTestFeaturesNV( VkPhysicalDeviceRepresentativeFragmentTestFeaturesNV const & rhs ) + { + memcpy( this, &rhs, sizeof( PhysicalDeviceRepresentativeFragmentTestFeaturesNV ) ); + } + + PhysicalDeviceRepresentativeFragmentTestFeaturesNV& operator=( VkPhysicalDeviceRepresentativeFragmentTestFeaturesNV const & rhs ) + { + memcpy( this, &rhs, sizeof( PhysicalDeviceRepresentativeFragmentTestFeaturesNV ) ); + return *this; + } + PhysicalDeviceRepresentativeFragmentTestFeaturesNV& setPNext( void* pNext_ ) + { + pNext = pNext_; + return *this; + } + + PhysicalDeviceRepresentativeFragmentTestFeaturesNV& setRepresentativeFragmentTest( Bool32 representativeFragmentTest_ ) + { + representativeFragmentTest = representativeFragmentTest_; + return *this; + } + + operator VkPhysicalDeviceRepresentativeFragmentTestFeaturesNV const&() const + { + return *reinterpret_cast(this); + } + + operator VkPhysicalDeviceRepresentativeFragmentTestFeaturesNV &() + { + return *reinterpret_cast(this); + } + + bool operator==( PhysicalDeviceRepresentativeFragmentTestFeaturesNV const& rhs ) const + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( representativeFragmentTest == rhs.representativeFragmentTest ); + } + + bool operator!=( PhysicalDeviceRepresentativeFragmentTestFeaturesNV const& rhs ) const + { + return !operator==( rhs ); + } + + private: + StructureType sType = StructureType::ePhysicalDeviceRepresentativeFragmentTestFeaturesNV; + + public: + void* pNext = nullptr; + Bool32 representativeFragmentTest; + }; + static_assert( sizeof( PhysicalDeviceRepresentativeFragmentTestFeaturesNV ) == sizeof( VkPhysicalDeviceRepresentativeFragmentTestFeaturesNV ), "struct and wrapper have different size!" ); + + struct PipelineRepresentativeFragmentTestStateCreateInfoNV + { + PipelineRepresentativeFragmentTestStateCreateInfoNV( Bool32 representativeFragmentTestEnable_ = 0 ) + : representativeFragmentTestEnable( representativeFragmentTestEnable_ ) + { + } + + PipelineRepresentativeFragmentTestStateCreateInfoNV( VkPipelineRepresentativeFragmentTestStateCreateInfoNV const & rhs ) + { + memcpy( this, &rhs, sizeof( PipelineRepresentativeFragmentTestStateCreateInfoNV ) ); + } + + PipelineRepresentativeFragmentTestStateCreateInfoNV& operator=( VkPipelineRepresentativeFragmentTestStateCreateInfoNV const & rhs ) + { + memcpy( this, &rhs, sizeof( PipelineRepresentativeFragmentTestStateCreateInfoNV ) ); + return *this; + } + PipelineRepresentativeFragmentTestStateCreateInfoNV& setPNext( const void* pNext_ ) + { + pNext = pNext_; + return *this; + } + + PipelineRepresentativeFragmentTestStateCreateInfoNV& setRepresentativeFragmentTestEnable( Bool32 representativeFragmentTestEnable_ ) + { + representativeFragmentTestEnable = representativeFragmentTestEnable_; + return *this; + } + + operator VkPipelineRepresentativeFragmentTestStateCreateInfoNV const&() const + { + return *reinterpret_cast(this); + } + + operator VkPipelineRepresentativeFragmentTestStateCreateInfoNV &() + { + return *reinterpret_cast(this); + } + + bool operator==( PipelineRepresentativeFragmentTestStateCreateInfoNV const& rhs ) const + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( representativeFragmentTestEnable == rhs.representativeFragmentTestEnable ); + } + + bool operator!=( PipelineRepresentativeFragmentTestStateCreateInfoNV const& rhs ) const + { + return !operator==( rhs ); + } + + private: + StructureType sType = StructureType::ePipelineRepresentativeFragmentTestStateCreateInfoNV; + + public: + const void* pNext = nullptr; + Bool32 representativeFragmentTestEnable; + }; + static_assert( sizeof( PipelineRepresentativeFragmentTestStateCreateInfoNV ) == sizeof( VkPipelineRepresentativeFragmentTestStateCreateInfoNV ), "struct and wrapper have different size!" ); + + struct PhysicalDeviceExclusiveScissorFeaturesNV + { + PhysicalDeviceExclusiveScissorFeaturesNV( Bool32 exclusiveScissor_ = 0 ) + : exclusiveScissor( exclusiveScissor_ ) + { + } + + PhysicalDeviceExclusiveScissorFeaturesNV( VkPhysicalDeviceExclusiveScissorFeaturesNV const & rhs ) + { + memcpy( this, &rhs, sizeof( PhysicalDeviceExclusiveScissorFeaturesNV ) ); + } + + PhysicalDeviceExclusiveScissorFeaturesNV& operator=( VkPhysicalDeviceExclusiveScissorFeaturesNV const & rhs ) + { + memcpy( this, &rhs, sizeof( PhysicalDeviceExclusiveScissorFeaturesNV ) ); + return *this; + } + PhysicalDeviceExclusiveScissorFeaturesNV& setPNext( void* pNext_ ) + { + pNext = pNext_; + return *this; + } + + PhysicalDeviceExclusiveScissorFeaturesNV& setExclusiveScissor( Bool32 exclusiveScissor_ ) + { + exclusiveScissor = exclusiveScissor_; + return *this; + } + + operator VkPhysicalDeviceExclusiveScissorFeaturesNV const&() const + { + return *reinterpret_cast(this); + } + + operator VkPhysicalDeviceExclusiveScissorFeaturesNV &() + { + return *reinterpret_cast(this); + } + + bool operator==( PhysicalDeviceExclusiveScissorFeaturesNV const& rhs ) const + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( exclusiveScissor == rhs.exclusiveScissor ); + } + + bool operator!=( PhysicalDeviceExclusiveScissorFeaturesNV const& rhs ) const + { + return !operator==( rhs ); + } + + private: + StructureType sType = StructureType::ePhysicalDeviceExclusiveScissorFeaturesNV; + + public: + void* pNext = nullptr; + Bool32 exclusiveScissor; + }; + static_assert( sizeof( PhysicalDeviceExclusiveScissorFeaturesNV ) == sizeof( VkPhysicalDeviceExclusiveScissorFeaturesNV ), "struct and wrapper have different size!" ); + + struct PipelineViewportExclusiveScissorStateCreateInfoNV + { + PipelineViewportExclusiveScissorStateCreateInfoNV( uint32_t exclusiveScissorCount_ = 0, + const Rect2D* pExclusiveScissors_ = nullptr ) + : exclusiveScissorCount( exclusiveScissorCount_ ) + , pExclusiveScissors( pExclusiveScissors_ ) + { + } + + PipelineViewportExclusiveScissorStateCreateInfoNV( VkPipelineViewportExclusiveScissorStateCreateInfoNV const & rhs ) + { + memcpy( this, &rhs, sizeof( PipelineViewportExclusiveScissorStateCreateInfoNV ) ); + } + + PipelineViewportExclusiveScissorStateCreateInfoNV& operator=( VkPipelineViewportExclusiveScissorStateCreateInfoNV const & rhs ) + { + memcpy( this, &rhs, sizeof( PipelineViewportExclusiveScissorStateCreateInfoNV ) ); + return *this; + } + PipelineViewportExclusiveScissorStateCreateInfoNV& setPNext( const void* pNext_ ) + { + pNext = pNext_; + return *this; + } + + PipelineViewportExclusiveScissorStateCreateInfoNV& setExclusiveScissorCount( uint32_t exclusiveScissorCount_ ) + { + exclusiveScissorCount = exclusiveScissorCount_; + return *this; + } + + PipelineViewportExclusiveScissorStateCreateInfoNV& setPExclusiveScissors( const Rect2D* pExclusiveScissors_ ) + { + pExclusiveScissors = pExclusiveScissors_; + return *this; + } + + operator VkPipelineViewportExclusiveScissorStateCreateInfoNV const&() const + { + return *reinterpret_cast(this); + } + + operator VkPipelineViewportExclusiveScissorStateCreateInfoNV &() + { + return *reinterpret_cast(this); + } + + bool operator==( PipelineViewportExclusiveScissorStateCreateInfoNV const& rhs ) const + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( exclusiveScissorCount == rhs.exclusiveScissorCount ) + && ( pExclusiveScissors == rhs.pExclusiveScissors ); + } + + bool operator!=( PipelineViewportExclusiveScissorStateCreateInfoNV const& rhs ) const + { + return !operator==( rhs ); + } + + private: + StructureType sType = StructureType::ePipelineViewportExclusiveScissorStateCreateInfoNV; + + public: + const void* pNext = nullptr; + uint32_t exclusiveScissorCount; + const Rect2D* pExclusiveScissors; + }; + static_assert( sizeof( PipelineViewportExclusiveScissorStateCreateInfoNV ) == sizeof( VkPipelineViewportExclusiveScissorStateCreateInfoNV ), "struct and wrapper have different size!" ); + + struct PhysicalDeviceCornerSampledImageFeaturesNV + { + PhysicalDeviceCornerSampledImageFeaturesNV( Bool32 cornerSampledImage_ = 0 ) + : cornerSampledImage( cornerSampledImage_ ) + { + } + + PhysicalDeviceCornerSampledImageFeaturesNV( VkPhysicalDeviceCornerSampledImageFeaturesNV const & rhs ) + { + memcpy( this, &rhs, sizeof( PhysicalDeviceCornerSampledImageFeaturesNV ) ); + } + + PhysicalDeviceCornerSampledImageFeaturesNV& operator=( VkPhysicalDeviceCornerSampledImageFeaturesNV const & rhs ) + { + memcpy( this, &rhs, sizeof( PhysicalDeviceCornerSampledImageFeaturesNV ) ); + return *this; + } + PhysicalDeviceCornerSampledImageFeaturesNV& setPNext( void* pNext_ ) + { + pNext = pNext_; + return *this; + } + + PhysicalDeviceCornerSampledImageFeaturesNV& setCornerSampledImage( Bool32 cornerSampledImage_ ) + { + cornerSampledImage = cornerSampledImage_; + return *this; + } + + operator VkPhysicalDeviceCornerSampledImageFeaturesNV const&() const + { + return *reinterpret_cast(this); + } + + operator VkPhysicalDeviceCornerSampledImageFeaturesNV &() + { + return *reinterpret_cast(this); + } + + bool operator==( PhysicalDeviceCornerSampledImageFeaturesNV const& rhs ) const + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( cornerSampledImage == rhs.cornerSampledImage ); + } + + bool operator!=( PhysicalDeviceCornerSampledImageFeaturesNV const& rhs ) const + { + return !operator==( rhs ); + } + + private: + StructureType sType = StructureType::ePhysicalDeviceCornerSampledImageFeaturesNV; + + public: + void* pNext = nullptr; + Bool32 cornerSampledImage; + }; + static_assert( sizeof( PhysicalDeviceCornerSampledImageFeaturesNV ) == sizeof( VkPhysicalDeviceCornerSampledImageFeaturesNV ), "struct and wrapper have different size!" ); + + struct PhysicalDeviceComputeShaderDerivativesFeaturesNV + { + PhysicalDeviceComputeShaderDerivativesFeaturesNV( Bool32 computeDerivativeGroupQuads_ = 0, + Bool32 computeDerivativeGroupLinear_ = 0 ) + : computeDerivativeGroupQuads( computeDerivativeGroupQuads_ ) + , computeDerivativeGroupLinear( computeDerivativeGroupLinear_ ) + { + } + + PhysicalDeviceComputeShaderDerivativesFeaturesNV( VkPhysicalDeviceComputeShaderDerivativesFeaturesNV const & rhs ) + { + memcpy( this, &rhs, sizeof( PhysicalDeviceComputeShaderDerivativesFeaturesNV ) ); + } + + PhysicalDeviceComputeShaderDerivativesFeaturesNV& operator=( VkPhysicalDeviceComputeShaderDerivativesFeaturesNV const & rhs ) + { + memcpy( this, &rhs, sizeof( PhysicalDeviceComputeShaderDerivativesFeaturesNV ) ); + return *this; + } + PhysicalDeviceComputeShaderDerivativesFeaturesNV& setPNext( void* pNext_ ) + { + pNext = pNext_; + return *this; + } + + PhysicalDeviceComputeShaderDerivativesFeaturesNV& setComputeDerivativeGroupQuads( Bool32 computeDerivativeGroupQuads_ ) + { + computeDerivativeGroupQuads = computeDerivativeGroupQuads_; + return *this; + } + + PhysicalDeviceComputeShaderDerivativesFeaturesNV& setComputeDerivativeGroupLinear( Bool32 computeDerivativeGroupLinear_ ) + { + computeDerivativeGroupLinear = computeDerivativeGroupLinear_; + return *this; + } + + operator VkPhysicalDeviceComputeShaderDerivativesFeaturesNV const&() const + { + return *reinterpret_cast(this); + } + + operator VkPhysicalDeviceComputeShaderDerivativesFeaturesNV &() + { + return *reinterpret_cast(this); + } + + bool operator==( PhysicalDeviceComputeShaderDerivativesFeaturesNV const& rhs ) const + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( computeDerivativeGroupQuads == rhs.computeDerivativeGroupQuads ) + && ( computeDerivativeGroupLinear == rhs.computeDerivativeGroupLinear ); + } + + bool operator!=( PhysicalDeviceComputeShaderDerivativesFeaturesNV const& rhs ) const + { + return !operator==( rhs ); + } + + private: + StructureType sType = StructureType::ePhysicalDeviceComputeShaderDerivativesFeaturesNV; + + public: + void* pNext = nullptr; + Bool32 computeDerivativeGroupQuads; + Bool32 computeDerivativeGroupLinear; + }; + static_assert( sizeof( PhysicalDeviceComputeShaderDerivativesFeaturesNV ) == sizeof( VkPhysicalDeviceComputeShaderDerivativesFeaturesNV ), "struct and wrapper have different size!" ); + + struct PhysicalDeviceFragmentShaderBarycentricFeaturesNV + { + PhysicalDeviceFragmentShaderBarycentricFeaturesNV( Bool32 fragmentShaderBarycentric_ = 0 ) + : fragmentShaderBarycentric( fragmentShaderBarycentric_ ) + { + } + + PhysicalDeviceFragmentShaderBarycentricFeaturesNV( VkPhysicalDeviceFragmentShaderBarycentricFeaturesNV const & rhs ) + { + memcpy( this, &rhs, sizeof( PhysicalDeviceFragmentShaderBarycentricFeaturesNV ) ); + } + + PhysicalDeviceFragmentShaderBarycentricFeaturesNV& operator=( VkPhysicalDeviceFragmentShaderBarycentricFeaturesNV const & rhs ) + { + memcpy( this, &rhs, sizeof( PhysicalDeviceFragmentShaderBarycentricFeaturesNV ) ); + return *this; + } + PhysicalDeviceFragmentShaderBarycentricFeaturesNV& setPNext( void* pNext_ ) + { + pNext = pNext_; + return *this; + } + + PhysicalDeviceFragmentShaderBarycentricFeaturesNV& setFragmentShaderBarycentric( Bool32 fragmentShaderBarycentric_ ) + { + fragmentShaderBarycentric = fragmentShaderBarycentric_; + return *this; + } + + operator VkPhysicalDeviceFragmentShaderBarycentricFeaturesNV const&() const + { + return *reinterpret_cast(this); + } + + operator VkPhysicalDeviceFragmentShaderBarycentricFeaturesNV &() + { + return *reinterpret_cast(this); + } + + bool operator==( PhysicalDeviceFragmentShaderBarycentricFeaturesNV const& rhs ) const + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( fragmentShaderBarycentric == rhs.fragmentShaderBarycentric ); + } + + bool operator!=( PhysicalDeviceFragmentShaderBarycentricFeaturesNV const& rhs ) const + { + return !operator==( rhs ); + } + + private: + StructureType sType = StructureType::ePhysicalDeviceFragmentShaderBarycentricFeaturesNV; + + public: + void* pNext = nullptr; + Bool32 fragmentShaderBarycentric; + }; + static_assert( sizeof( PhysicalDeviceFragmentShaderBarycentricFeaturesNV ) == sizeof( VkPhysicalDeviceFragmentShaderBarycentricFeaturesNV ), "struct and wrapper have different size!" ); + + struct PhysicalDeviceShaderImageFootprintFeaturesNV + { + PhysicalDeviceShaderImageFootprintFeaturesNV( Bool32 imageFootprint_ = 0 ) + : imageFootprint( imageFootprint_ ) + { + } + + PhysicalDeviceShaderImageFootprintFeaturesNV( VkPhysicalDeviceShaderImageFootprintFeaturesNV const & rhs ) + { + memcpy( this, &rhs, sizeof( PhysicalDeviceShaderImageFootprintFeaturesNV ) ); + } + + PhysicalDeviceShaderImageFootprintFeaturesNV& operator=( VkPhysicalDeviceShaderImageFootprintFeaturesNV const & rhs ) + { + memcpy( this, &rhs, sizeof( PhysicalDeviceShaderImageFootprintFeaturesNV ) ); + return *this; + } + PhysicalDeviceShaderImageFootprintFeaturesNV& setPNext( void* pNext_ ) + { + pNext = pNext_; + return *this; + } + + PhysicalDeviceShaderImageFootprintFeaturesNV& setImageFootprint( Bool32 imageFootprint_ ) + { + imageFootprint = imageFootprint_; + return *this; + } + + operator VkPhysicalDeviceShaderImageFootprintFeaturesNV const&() const + { + return *reinterpret_cast(this); + } + + operator VkPhysicalDeviceShaderImageFootprintFeaturesNV &() + { + return *reinterpret_cast(this); + } + + bool operator==( PhysicalDeviceShaderImageFootprintFeaturesNV const& rhs ) const + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( imageFootprint == rhs.imageFootprint ); + } + + bool operator!=( PhysicalDeviceShaderImageFootprintFeaturesNV const& rhs ) const + { + return !operator==( rhs ); + } + + private: + StructureType sType = StructureType::ePhysicalDeviceShaderImageFootprintFeaturesNV; + + public: + void* pNext = nullptr; + Bool32 imageFootprint; + }; + static_assert( sizeof( PhysicalDeviceShaderImageFootprintFeaturesNV ) == sizeof( VkPhysicalDeviceShaderImageFootprintFeaturesNV ), "struct and wrapper have different size!" ); + + struct PhysicalDeviceShadingRateImageFeaturesNV + { + PhysicalDeviceShadingRateImageFeaturesNV( Bool32 shadingRateImage_ = 0, + Bool32 shadingRateCoarseSampleOrder_ = 0 ) + : shadingRateImage( shadingRateImage_ ) + , shadingRateCoarseSampleOrder( shadingRateCoarseSampleOrder_ ) + { + } + + PhysicalDeviceShadingRateImageFeaturesNV( VkPhysicalDeviceShadingRateImageFeaturesNV const & rhs ) + { + memcpy( this, &rhs, sizeof( PhysicalDeviceShadingRateImageFeaturesNV ) ); + } + + PhysicalDeviceShadingRateImageFeaturesNV& operator=( VkPhysicalDeviceShadingRateImageFeaturesNV const & rhs ) + { + memcpy( this, &rhs, sizeof( PhysicalDeviceShadingRateImageFeaturesNV ) ); + return *this; + } + PhysicalDeviceShadingRateImageFeaturesNV& setPNext( void* pNext_ ) + { + pNext = pNext_; + return *this; + } + + PhysicalDeviceShadingRateImageFeaturesNV& setShadingRateImage( Bool32 shadingRateImage_ ) + { + shadingRateImage = shadingRateImage_; + return *this; + } + + PhysicalDeviceShadingRateImageFeaturesNV& setShadingRateCoarseSampleOrder( Bool32 shadingRateCoarseSampleOrder_ ) + { + shadingRateCoarseSampleOrder = shadingRateCoarseSampleOrder_; + return *this; + } + + operator VkPhysicalDeviceShadingRateImageFeaturesNV const&() const + { + return *reinterpret_cast(this); + } + + operator VkPhysicalDeviceShadingRateImageFeaturesNV &() + { + return *reinterpret_cast(this); + } + + bool operator==( PhysicalDeviceShadingRateImageFeaturesNV const& rhs ) const + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( shadingRateImage == rhs.shadingRateImage ) + && ( shadingRateCoarseSampleOrder == rhs.shadingRateCoarseSampleOrder ); + } + + bool operator!=( PhysicalDeviceShadingRateImageFeaturesNV const& rhs ) const + { + return !operator==( rhs ); + } + + private: + StructureType sType = StructureType::ePhysicalDeviceShadingRateImageFeaturesNV; + + public: + void* pNext = nullptr; + Bool32 shadingRateImage; + Bool32 shadingRateCoarseSampleOrder; + }; + static_assert( sizeof( PhysicalDeviceShadingRateImageFeaturesNV ) == sizeof( VkPhysicalDeviceShadingRateImageFeaturesNV ), "struct and wrapper have different size!" ); + + struct PhysicalDeviceShadingRateImagePropertiesNV + { + operator VkPhysicalDeviceShadingRateImagePropertiesNV const&() const + { + return *reinterpret_cast(this); + } + + operator VkPhysicalDeviceShadingRateImagePropertiesNV &() + { + return *reinterpret_cast(this); + } + + bool operator==( PhysicalDeviceShadingRateImagePropertiesNV const& rhs ) const + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( shadingRateTexelSize == rhs.shadingRateTexelSize ) + && ( shadingRatePaletteSize == rhs.shadingRatePaletteSize ) + && ( shadingRateMaxCoarseSamples == rhs.shadingRateMaxCoarseSamples ); + } + + bool operator!=( PhysicalDeviceShadingRateImagePropertiesNV const& rhs ) const + { + return !operator==( rhs ); + } + + private: + StructureType sType = StructureType::ePhysicalDeviceShadingRateImagePropertiesNV; + + public: + void* pNext = nullptr; + Extent2D shadingRateTexelSize; + uint32_t shadingRatePaletteSize; + uint32_t shadingRateMaxCoarseSamples; + }; + static_assert( sizeof( PhysicalDeviceShadingRateImagePropertiesNV ) == sizeof( VkPhysicalDeviceShadingRateImagePropertiesNV ), "struct and wrapper have different size!" ); + + struct PhysicalDeviceMeshShaderFeaturesNV + { + PhysicalDeviceMeshShaderFeaturesNV( Bool32 taskShader_ = 0, + Bool32 meshShader_ = 0 ) + : taskShader( taskShader_ ) + , meshShader( meshShader_ ) + { + } + + PhysicalDeviceMeshShaderFeaturesNV( VkPhysicalDeviceMeshShaderFeaturesNV const & rhs ) + { + memcpy( this, &rhs, sizeof( PhysicalDeviceMeshShaderFeaturesNV ) ); + } + + PhysicalDeviceMeshShaderFeaturesNV& operator=( VkPhysicalDeviceMeshShaderFeaturesNV const & rhs ) + { + memcpy( this, &rhs, sizeof( PhysicalDeviceMeshShaderFeaturesNV ) ); + return *this; + } + PhysicalDeviceMeshShaderFeaturesNV& setPNext( void* pNext_ ) + { + pNext = pNext_; + return *this; + } + + PhysicalDeviceMeshShaderFeaturesNV& setTaskShader( Bool32 taskShader_ ) + { + taskShader = taskShader_; + return *this; + } + + PhysicalDeviceMeshShaderFeaturesNV& setMeshShader( Bool32 meshShader_ ) + { + meshShader = meshShader_; + return *this; + } + + operator VkPhysicalDeviceMeshShaderFeaturesNV const&() const + { + return *reinterpret_cast(this); + } + + operator VkPhysicalDeviceMeshShaderFeaturesNV &() + { + return *reinterpret_cast(this); + } + + bool operator==( PhysicalDeviceMeshShaderFeaturesNV const& rhs ) const + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( taskShader == rhs.taskShader ) + && ( meshShader == rhs.meshShader ); + } + + bool operator!=( PhysicalDeviceMeshShaderFeaturesNV const& rhs ) const + { + return !operator==( rhs ); + } + + private: + StructureType sType = StructureType::ePhysicalDeviceMeshShaderFeaturesNV; + + public: + void* pNext = nullptr; + Bool32 taskShader; + Bool32 meshShader; + }; + static_assert( sizeof( PhysicalDeviceMeshShaderFeaturesNV ) == sizeof( VkPhysicalDeviceMeshShaderFeaturesNV ), "struct and wrapper have different size!" ); + + struct PhysicalDeviceMeshShaderPropertiesNV + { + PhysicalDeviceMeshShaderPropertiesNV( uint32_t maxDrawMeshTasksCount_ = 0, + uint32_t maxTaskWorkGroupInvocations_ = 0, + std::array const& maxTaskWorkGroupSize_ = { { 0, 0, 0 } }, + uint32_t maxTaskTotalMemorySize_ = 0, + uint32_t maxTaskOutputCount_ = 0, + uint32_t maxMeshWorkGroupInvocations_ = 0, + std::array const& maxMeshWorkGroupSize_ = { { 0, 0, 0 } }, + uint32_t maxMeshTotalMemorySize_ = 0, + uint32_t maxMeshOutputVertices_ = 0, + uint32_t maxMeshOutputPrimitives_ = 0, + uint32_t maxMeshMultiviewViewCount_ = 0, + uint32_t meshOutputPerVertexGranularity_ = 0, + uint32_t meshOutputPerPrimitiveGranularity_ = 0 ) + : maxDrawMeshTasksCount( maxDrawMeshTasksCount_ ) + , maxTaskWorkGroupInvocations( maxTaskWorkGroupInvocations_ ) + , maxTaskTotalMemorySize( maxTaskTotalMemorySize_ ) + , maxTaskOutputCount( maxTaskOutputCount_ ) + , maxMeshWorkGroupInvocations( maxMeshWorkGroupInvocations_ ) + , maxMeshTotalMemorySize( maxMeshTotalMemorySize_ ) + , maxMeshOutputVertices( maxMeshOutputVertices_ ) + , maxMeshOutputPrimitives( maxMeshOutputPrimitives_ ) + , maxMeshMultiviewViewCount( maxMeshMultiviewViewCount_ ) + , meshOutputPerVertexGranularity( meshOutputPerVertexGranularity_ ) + , meshOutputPerPrimitiveGranularity( meshOutputPerPrimitiveGranularity_ ) + { + memcpy( &maxTaskWorkGroupSize, maxTaskWorkGroupSize_.data(), 3 * sizeof( uint32_t ) ); + memcpy( &maxMeshWorkGroupSize, maxMeshWorkGroupSize_.data(), 3 * sizeof( uint32_t ) ); + } + + PhysicalDeviceMeshShaderPropertiesNV( VkPhysicalDeviceMeshShaderPropertiesNV const & rhs ) + { + memcpy( this, &rhs, sizeof( PhysicalDeviceMeshShaderPropertiesNV ) ); + } + + PhysicalDeviceMeshShaderPropertiesNV& operator=( VkPhysicalDeviceMeshShaderPropertiesNV const & rhs ) + { + memcpy( this, &rhs, sizeof( PhysicalDeviceMeshShaderPropertiesNV ) ); + return *this; + } + PhysicalDeviceMeshShaderPropertiesNV& setPNext( void* pNext_ ) + { + pNext = pNext_; + return *this; + } + + PhysicalDeviceMeshShaderPropertiesNV& setMaxDrawMeshTasksCount( uint32_t maxDrawMeshTasksCount_ ) + { + maxDrawMeshTasksCount = maxDrawMeshTasksCount_; + return *this; + } + + PhysicalDeviceMeshShaderPropertiesNV& setMaxTaskWorkGroupInvocations( uint32_t maxTaskWorkGroupInvocations_ ) + { + maxTaskWorkGroupInvocations = maxTaskWorkGroupInvocations_; + return *this; + } + + PhysicalDeviceMeshShaderPropertiesNV& setMaxTaskWorkGroupSize( std::array maxTaskWorkGroupSize_ ) + { + memcpy( &maxTaskWorkGroupSize, maxTaskWorkGroupSize_.data(), 3 * sizeof( uint32_t ) ); + return *this; + } + + PhysicalDeviceMeshShaderPropertiesNV& setMaxTaskTotalMemorySize( uint32_t maxTaskTotalMemorySize_ ) + { + maxTaskTotalMemorySize = maxTaskTotalMemorySize_; + return *this; + } + + PhysicalDeviceMeshShaderPropertiesNV& setMaxTaskOutputCount( uint32_t maxTaskOutputCount_ ) + { + maxTaskOutputCount = maxTaskOutputCount_; + return *this; + } + + PhysicalDeviceMeshShaderPropertiesNV& setMaxMeshWorkGroupInvocations( uint32_t maxMeshWorkGroupInvocations_ ) + { + maxMeshWorkGroupInvocations = maxMeshWorkGroupInvocations_; + return *this; + } + + PhysicalDeviceMeshShaderPropertiesNV& setMaxMeshWorkGroupSize( std::array maxMeshWorkGroupSize_ ) + { + memcpy( &maxMeshWorkGroupSize, maxMeshWorkGroupSize_.data(), 3 * sizeof( uint32_t ) ); + return *this; + } + + PhysicalDeviceMeshShaderPropertiesNV& setMaxMeshTotalMemorySize( uint32_t maxMeshTotalMemorySize_ ) + { + maxMeshTotalMemorySize = maxMeshTotalMemorySize_; + return *this; + } + + PhysicalDeviceMeshShaderPropertiesNV& setMaxMeshOutputVertices( uint32_t maxMeshOutputVertices_ ) + { + maxMeshOutputVertices = maxMeshOutputVertices_; + return *this; + } + + PhysicalDeviceMeshShaderPropertiesNV& setMaxMeshOutputPrimitives( uint32_t maxMeshOutputPrimitives_ ) + { + maxMeshOutputPrimitives = maxMeshOutputPrimitives_; + return *this; + } + + PhysicalDeviceMeshShaderPropertiesNV& setMaxMeshMultiviewViewCount( uint32_t maxMeshMultiviewViewCount_ ) + { + maxMeshMultiviewViewCount = maxMeshMultiviewViewCount_; + return *this; + } + + PhysicalDeviceMeshShaderPropertiesNV& setMeshOutputPerVertexGranularity( uint32_t meshOutputPerVertexGranularity_ ) + { + meshOutputPerVertexGranularity = meshOutputPerVertexGranularity_; + return *this; + } + + PhysicalDeviceMeshShaderPropertiesNV& setMeshOutputPerPrimitiveGranularity( uint32_t meshOutputPerPrimitiveGranularity_ ) + { + meshOutputPerPrimitiveGranularity = meshOutputPerPrimitiveGranularity_; + return *this; + } + + operator VkPhysicalDeviceMeshShaderPropertiesNV const&() const + { + return *reinterpret_cast(this); + } + + operator VkPhysicalDeviceMeshShaderPropertiesNV &() + { + return *reinterpret_cast(this); + } + + bool operator==( PhysicalDeviceMeshShaderPropertiesNV const& rhs ) const + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( maxDrawMeshTasksCount == rhs.maxDrawMeshTasksCount ) + && ( maxTaskWorkGroupInvocations == rhs.maxTaskWorkGroupInvocations ) + && ( memcmp( maxTaskWorkGroupSize, rhs.maxTaskWorkGroupSize, 3 * sizeof( uint32_t ) ) == 0 ) + && ( maxTaskTotalMemorySize == rhs.maxTaskTotalMemorySize ) + && ( maxTaskOutputCount == rhs.maxTaskOutputCount ) + && ( maxMeshWorkGroupInvocations == rhs.maxMeshWorkGroupInvocations ) + && ( memcmp( maxMeshWorkGroupSize, rhs.maxMeshWorkGroupSize, 3 * sizeof( uint32_t ) ) == 0 ) + && ( maxMeshTotalMemorySize == rhs.maxMeshTotalMemorySize ) + && ( maxMeshOutputVertices == rhs.maxMeshOutputVertices ) + && ( maxMeshOutputPrimitives == rhs.maxMeshOutputPrimitives ) + && ( maxMeshMultiviewViewCount == rhs.maxMeshMultiviewViewCount ) + && ( meshOutputPerVertexGranularity == rhs.meshOutputPerVertexGranularity ) + && ( meshOutputPerPrimitiveGranularity == rhs.meshOutputPerPrimitiveGranularity ); + } + + bool operator!=( PhysicalDeviceMeshShaderPropertiesNV const& rhs ) const + { + return !operator==( rhs ); + } + + private: + StructureType sType = StructureType::ePhysicalDeviceMeshShaderPropertiesNV; + + public: + void* pNext = nullptr; + uint32_t maxDrawMeshTasksCount; + uint32_t maxTaskWorkGroupInvocations; + uint32_t maxTaskWorkGroupSize[3]; + uint32_t maxTaskTotalMemorySize; + uint32_t maxTaskOutputCount; + uint32_t maxMeshWorkGroupInvocations; + uint32_t maxMeshWorkGroupSize[3]; + uint32_t maxMeshTotalMemorySize; + uint32_t maxMeshOutputVertices; + uint32_t maxMeshOutputPrimitives; + uint32_t maxMeshMultiviewViewCount; + uint32_t meshOutputPerVertexGranularity; + uint32_t meshOutputPerPrimitiveGranularity; + }; + static_assert( sizeof( PhysicalDeviceMeshShaderPropertiesNV ) == sizeof( VkPhysicalDeviceMeshShaderPropertiesNV ), "struct and wrapper have different size!" ); + + struct GeometryTrianglesNV + { + GeometryTrianglesNV( Buffer vertexData_ = Buffer(), + DeviceSize vertexOffset_ = 0, + uint32_t vertexCount_ = 0, + DeviceSize vertexStride_ = 0, + Format vertexFormat_ = Format::eUndefined, + Buffer indexData_ = Buffer(), + DeviceSize indexOffset_ = 0, + uint32_t indexCount_ = 0, + IndexType indexType_ = IndexType::eUint16, + Buffer transformData_ = Buffer(), + DeviceSize transformOffset_ = 0 ) + : vertexData( vertexData_ ) + , vertexOffset( vertexOffset_ ) + , vertexCount( vertexCount_ ) + , vertexStride( vertexStride_ ) + , vertexFormat( vertexFormat_ ) + , indexData( indexData_ ) + , indexOffset( indexOffset_ ) + , indexCount( indexCount_ ) + , indexType( indexType_ ) + , transformData( transformData_ ) + , transformOffset( transformOffset_ ) + { + } + + GeometryTrianglesNV( VkGeometryTrianglesNV const & rhs ) + { + memcpy( this, &rhs, sizeof( GeometryTrianglesNV ) ); + } + + GeometryTrianglesNV& operator=( VkGeometryTrianglesNV const & rhs ) + { + memcpy( this, &rhs, sizeof( GeometryTrianglesNV ) ); + return *this; + } + GeometryTrianglesNV& setPNext( const void* pNext_ ) + { + pNext = pNext_; + return *this; + } + + GeometryTrianglesNV& setVertexData( Buffer vertexData_ ) + { + vertexData = vertexData_; + return *this; + } + + GeometryTrianglesNV& setVertexOffset( DeviceSize vertexOffset_ ) + { + vertexOffset = vertexOffset_; + return *this; + } + + GeometryTrianglesNV& setVertexCount( uint32_t vertexCount_ ) + { + vertexCount = vertexCount_; + return *this; + } + + GeometryTrianglesNV& setVertexStride( DeviceSize vertexStride_ ) + { + vertexStride = vertexStride_; + return *this; + } + + GeometryTrianglesNV& setVertexFormat( Format vertexFormat_ ) + { + vertexFormat = vertexFormat_; + return *this; + } + + GeometryTrianglesNV& setIndexData( Buffer indexData_ ) + { + indexData = indexData_; + return *this; + } + + GeometryTrianglesNV& setIndexOffset( DeviceSize indexOffset_ ) + { + indexOffset = indexOffset_; + return *this; + } + + GeometryTrianglesNV& setIndexCount( uint32_t indexCount_ ) + { + indexCount = indexCount_; + return *this; + } + + GeometryTrianglesNV& setIndexType( IndexType indexType_ ) + { + indexType = indexType_; + return *this; + } + + GeometryTrianglesNV& setTransformData( Buffer transformData_ ) + { + transformData = transformData_; + return *this; + } + + GeometryTrianglesNV& setTransformOffset( DeviceSize transformOffset_ ) + { + transformOffset = transformOffset_; + return *this; + } + + operator VkGeometryTrianglesNV const&() const + { + return *reinterpret_cast(this); + } + + operator VkGeometryTrianglesNV &() + { + return *reinterpret_cast(this); + } + + bool operator==( GeometryTrianglesNV const& rhs ) const + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( vertexData == rhs.vertexData ) + && ( vertexOffset == rhs.vertexOffset ) + && ( vertexCount == rhs.vertexCount ) + && ( vertexStride == rhs.vertexStride ) + && ( vertexFormat == rhs.vertexFormat ) + && ( indexData == rhs.indexData ) + && ( indexOffset == rhs.indexOffset ) + && ( indexCount == rhs.indexCount ) + && ( indexType == rhs.indexType ) + && ( transformData == rhs.transformData ) + && ( transformOffset == rhs.transformOffset ); + } + + bool operator!=( GeometryTrianglesNV const& rhs ) const + { + return !operator==( rhs ); + } + + private: + StructureType sType = StructureType::eGeometryTrianglesNV; + + public: + const void* pNext = nullptr; + Buffer vertexData; + DeviceSize vertexOffset; + uint32_t vertexCount; + DeviceSize vertexStride; + Format vertexFormat; + Buffer indexData; + DeviceSize indexOffset; + uint32_t indexCount; + IndexType indexType; + Buffer transformData; + DeviceSize transformOffset; + }; + static_assert( sizeof( GeometryTrianglesNV ) == sizeof( VkGeometryTrianglesNV ), "struct and wrapper have different size!" ); + + struct GeometryAABBNV + { + GeometryAABBNV( Buffer aabbData_ = Buffer(), + uint32_t numAABBs_ = 0, + uint32_t stride_ = 0, + DeviceSize offset_ = 0 ) + : aabbData( aabbData_ ) + , numAABBs( numAABBs_ ) + , stride( stride_ ) + , offset( offset_ ) + { + } + + GeometryAABBNV( VkGeometryAABBNV const & rhs ) + { + memcpy( this, &rhs, sizeof( GeometryAABBNV ) ); + } + + GeometryAABBNV& operator=( VkGeometryAABBNV const & rhs ) + { + memcpy( this, &rhs, sizeof( GeometryAABBNV ) ); + return *this; + } + GeometryAABBNV& setPNext( const void* pNext_ ) + { + pNext = pNext_; + return *this; + } + + GeometryAABBNV& setAabbData( Buffer aabbData_ ) + { + aabbData = aabbData_; + return *this; + } + + GeometryAABBNV& setNumAABBs( uint32_t numAABBs_ ) + { + numAABBs = numAABBs_; + return *this; + } + + GeometryAABBNV& setStride( uint32_t stride_ ) + { + stride = stride_; + return *this; + } + + GeometryAABBNV& setOffset( DeviceSize offset_ ) + { + offset = offset_; + return *this; + } + + operator VkGeometryAABBNV const&() const + { + return *reinterpret_cast(this); + } + + operator VkGeometryAABBNV &() + { + return *reinterpret_cast(this); + } + + bool operator==( GeometryAABBNV const& rhs ) const + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( aabbData == rhs.aabbData ) + && ( numAABBs == rhs.numAABBs ) + && ( stride == rhs.stride ) + && ( offset == rhs.offset ); + } + + bool operator!=( GeometryAABBNV const& rhs ) const + { + return !operator==( rhs ); + } + + private: + StructureType sType = StructureType::eGeometryAabbNV; + + public: + const void* pNext = nullptr; + Buffer aabbData; + uint32_t numAABBs; + uint32_t stride; + DeviceSize offset; + }; + static_assert( sizeof( GeometryAABBNV ) == sizeof( VkGeometryAABBNV ), "struct and wrapper have different size!" ); + + struct GeometryDataNV + { + GeometryDataNV( GeometryTrianglesNV triangles_ = GeometryTrianglesNV(), + GeometryAABBNV aabbs_ = GeometryAABBNV() ) + : triangles( triangles_ ) + , aabbs( aabbs_ ) + { + } + + GeometryDataNV( VkGeometryDataNV const & rhs ) + { + memcpy( this, &rhs, sizeof( GeometryDataNV ) ); + } + + GeometryDataNV& operator=( VkGeometryDataNV const & rhs ) + { + memcpy( this, &rhs, sizeof( GeometryDataNV ) ); + return *this; + } + GeometryDataNV& setTriangles( GeometryTrianglesNV triangles_ ) + { + triangles = triangles_; + return *this; + } + + GeometryDataNV& setAabbs( GeometryAABBNV aabbs_ ) + { + aabbs = aabbs_; + return *this; + } + + operator VkGeometryDataNV const&() const + { + return *reinterpret_cast(this); + } + + operator VkGeometryDataNV &() + { + return *reinterpret_cast(this); + } + + bool operator==( GeometryDataNV const& rhs ) const + { + return ( triangles == rhs.triangles ) + && ( aabbs == rhs.aabbs ); + } + + bool operator!=( GeometryDataNV const& rhs ) const + { + return !operator==( rhs ); + } + + GeometryTrianglesNV triangles; + GeometryAABBNV aabbs; + }; + static_assert( sizeof( GeometryDataNV ) == sizeof( VkGeometryDataNV ), "struct and wrapper have different size!" ); + + struct BindAccelerationStructureMemoryInfoNV + { + BindAccelerationStructureMemoryInfoNV( AccelerationStructureNV accelerationStructure_ = AccelerationStructureNV(), + DeviceMemory memory_ = DeviceMemory(), + DeviceSize memoryOffset_ = 0, + uint32_t deviceIndexCount_ = 0, + const uint32_t* pDeviceIndices_ = nullptr ) + : accelerationStructure( accelerationStructure_ ) + , memory( memory_ ) + , memoryOffset( memoryOffset_ ) + , deviceIndexCount( deviceIndexCount_ ) + , pDeviceIndices( pDeviceIndices_ ) + { + } + + BindAccelerationStructureMemoryInfoNV( VkBindAccelerationStructureMemoryInfoNV const & rhs ) + { + memcpy( this, &rhs, sizeof( BindAccelerationStructureMemoryInfoNV ) ); + } + + BindAccelerationStructureMemoryInfoNV& operator=( VkBindAccelerationStructureMemoryInfoNV const & rhs ) + { + memcpy( this, &rhs, sizeof( BindAccelerationStructureMemoryInfoNV ) ); + return *this; + } + BindAccelerationStructureMemoryInfoNV& setPNext( const void* pNext_ ) + { + pNext = pNext_; + return *this; + } + + BindAccelerationStructureMemoryInfoNV& setAccelerationStructure( AccelerationStructureNV accelerationStructure_ ) + { + accelerationStructure = accelerationStructure_; + return *this; + } + + BindAccelerationStructureMemoryInfoNV& setMemory( DeviceMemory memory_ ) + { + memory = memory_; + return *this; + } + + BindAccelerationStructureMemoryInfoNV& setMemoryOffset( DeviceSize memoryOffset_ ) + { + memoryOffset = memoryOffset_; + return *this; + } + + BindAccelerationStructureMemoryInfoNV& setDeviceIndexCount( uint32_t deviceIndexCount_ ) + { + deviceIndexCount = deviceIndexCount_; + return *this; + } + + BindAccelerationStructureMemoryInfoNV& setPDeviceIndices( const uint32_t* pDeviceIndices_ ) + { + pDeviceIndices = pDeviceIndices_; + return *this; + } + + operator VkBindAccelerationStructureMemoryInfoNV const&() const + { + return *reinterpret_cast(this); + } + + operator VkBindAccelerationStructureMemoryInfoNV &() + { + return *reinterpret_cast(this); + } + + bool operator==( BindAccelerationStructureMemoryInfoNV const& rhs ) const + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( accelerationStructure == rhs.accelerationStructure ) + && ( memory == rhs.memory ) + && ( memoryOffset == rhs.memoryOffset ) + && ( deviceIndexCount == rhs.deviceIndexCount ) + && ( pDeviceIndices == rhs.pDeviceIndices ); + } + + bool operator!=( BindAccelerationStructureMemoryInfoNV const& rhs ) const + { + return !operator==( rhs ); + } + + private: + StructureType sType = StructureType::eBindAccelerationStructureMemoryInfoNV; + + public: + const void* pNext = nullptr; + AccelerationStructureNV accelerationStructure; + DeviceMemory memory; + DeviceSize memoryOffset; + uint32_t deviceIndexCount; + const uint32_t* pDeviceIndices; + }; + static_assert( sizeof( BindAccelerationStructureMemoryInfoNV ) == sizeof( VkBindAccelerationStructureMemoryInfoNV ), "struct and wrapper have different size!" ); + + struct WriteDescriptorSetAccelerationStructureNV + { + WriteDescriptorSetAccelerationStructureNV( uint32_t accelerationStructureCount_ = 0, + const AccelerationStructureNV* pAccelerationStructures_ = nullptr ) + : accelerationStructureCount( accelerationStructureCount_ ) + , pAccelerationStructures( pAccelerationStructures_ ) + { + } + + WriteDescriptorSetAccelerationStructureNV( VkWriteDescriptorSetAccelerationStructureNV const & rhs ) + { + memcpy( this, &rhs, sizeof( WriteDescriptorSetAccelerationStructureNV ) ); + } + + WriteDescriptorSetAccelerationStructureNV& operator=( VkWriteDescriptorSetAccelerationStructureNV const & rhs ) + { + memcpy( this, &rhs, sizeof( WriteDescriptorSetAccelerationStructureNV ) ); + return *this; + } + WriteDescriptorSetAccelerationStructureNV& setPNext( const void* pNext_ ) + { + pNext = pNext_; + return *this; + } + + WriteDescriptorSetAccelerationStructureNV& setAccelerationStructureCount( uint32_t accelerationStructureCount_ ) + { + accelerationStructureCount = accelerationStructureCount_; + return *this; + } + + WriteDescriptorSetAccelerationStructureNV& setPAccelerationStructures( const AccelerationStructureNV* pAccelerationStructures_ ) + { + pAccelerationStructures = pAccelerationStructures_; + return *this; + } + + operator VkWriteDescriptorSetAccelerationStructureNV const&() const + { + return *reinterpret_cast(this); + } + + operator VkWriteDescriptorSetAccelerationStructureNV &() + { + return *reinterpret_cast(this); + } + + bool operator==( WriteDescriptorSetAccelerationStructureNV const& rhs ) const + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( accelerationStructureCount == rhs.accelerationStructureCount ) + && ( pAccelerationStructures == rhs.pAccelerationStructures ); + } + + bool operator!=( WriteDescriptorSetAccelerationStructureNV const& rhs ) const + { + return !operator==( rhs ); + } + + private: + StructureType sType = StructureType::eWriteDescriptorSetAccelerationStructureNV; + + public: + const void* pNext = nullptr; + uint32_t accelerationStructureCount; + const AccelerationStructureNV* pAccelerationStructures; + }; + static_assert( sizeof( WriteDescriptorSetAccelerationStructureNV ) == sizeof( VkWriteDescriptorSetAccelerationStructureNV ), "struct and wrapper have different size!" ); + + struct PhysicalDeviceRayTracingPropertiesNV + { + PhysicalDeviceRayTracingPropertiesNV( uint32_t shaderGroupHandleSize_ = 0, + uint32_t maxRecursionDepth_ = 0, + uint32_t maxShaderGroupStride_ = 0, + uint32_t shaderGroupBaseAlignment_ = 0, + uint64_t maxGeometryCount_ = 0, + uint64_t maxInstanceCount_ = 0, + uint64_t maxTriangleCount_ = 0, + uint32_t maxDescriptorSetAccelerationStructures_ = 0 ) + : shaderGroupHandleSize( shaderGroupHandleSize_ ) + , maxRecursionDepth( maxRecursionDepth_ ) + , maxShaderGroupStride( maxShaderGroupStride_ ) + , shaderGroupBaseAlignment( shaderGroupBaseAlignment_ ) + , maxGeometryCount( maxGeometryCount_ ) + , maxInstanceCount( maxInstanceCount_ ) + , maxTriangleCount( maxTriangleCount_ ) + , maxDescriptorSetAccelerationStructures( maxDescriptorSetAccelerationStructures_ ) + { + } + + PhysicalDeviceRayTracingPropertiesNV( VkPhysicalDeviceRayTracingPropertiesNV const & rhs ) + { + memcpy( this, &rhs, sizeof( PhysicalDeviceRayTracingPropertiesNV ) ); + } + + PhysicalDeviceRayTracingPropertiesNV& operator=( VkPhysicalDeviceRayTracingPropertiesNV const & rhs ) + { + memcpy( this, &rhs, sizeof( PhysicalDeviceRayTracingPropertiesNV ) ); + return *this; + } + PhysicalDeviceRayTracingPropertiesNV& setPNext( void* pNext_ ) + { + pNext = pNext_; + return *this; + } + + PhysicalDeviceRayTracingPropertiesNV& setShaderGroupHandleSize( uint32_t shaderGroupHandleSize_ ) + { + shaderGroupHandleSize = shaderGroupHandleSize_; + return *this; + } + + PhysicalDeviceRayTracingPropertiesNV& setMaxRecursionDepth( uint32_t maxRecursionDepth_ ) + { + maxRecursionDepth = maxRecursionDepth_; + return *this; + } + + PhysicalDeviceRayTracingPropertiesNV& setMaxShaderGroupStride( uint32_t maxShaderGroupStride_ ) + { + maxShaderGroupStride = maxShaderGroupStride_; + return *this; + } + + PhysicalDeviceRayTracingPropertiesNV& setShaderGroupBaseAlignment( uint32_t shaderGroupBaseAlignment_ ) + { + shaderGroupBaseAlignment = shaderGroupBaseAlignment_; + return *this; + } + + PhysicalDeviceRayTracingPropertiesNV& setMaxGeometryCount( uint64_t maxGeometryCount_ ) + { + maxGeometryCount = maxGeometryCount_; + return *this; + } + + PhysicalDeviceRayTracingPropertiesNV& setMaxInstanceCount( uint64_t maxInstanceCount_ ) + { + maxInstanceCount = maxInstanceCount_; + return *this; + } + + PhysicalDeviceRayTracingPropertiesNV& setMaxTriangleCount( uint64_t maxTriangleCount_ ) + { + maxTriangleCount = maxTriangleCount_; + return *this; + } + + PhysicalDeviceRayTracingPropertiesNV& setMaxDescriptorSetAccelerationStructures( uint32_t maxDescriptorSetAccelerationStructures_ ) + { + maxDescriptorSetAccelerationStructures = maxDescriptorSetAccelerationStructures_; + return *this; + } + + operator VkPhysicalDeviceRayTracingPropertiesNV const&() const + { + return *reinterpret_cast(this); + } + + operator VkPhysicalDeviceRayTracingPropertiesNV &() + { + return *reinterpret_cast(this); + } + + bool operator==( PhysicalDeviceRayTracingPropertiesNV const& rhs ) const + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( shaderGroupHandleSize == rhs.shaderGroupHandleSize ) + && ( maxRecursionDepth == rhs.maxRecursionDepth ) + && ( maxShaderGroupStride == rhs.maxShaderGroupStride ) + && ( shaderGroupBaseAlignment == rhs.shaderGroupBaseAlignment ) + && ( maxGeometryCount == rhs.maxGeometryCount ) + && ( maxInstanceCount == rhs.maxInstanceCount ) + && ( maxTriangleCount == rhs.maxTriangleCount ) + && ( maxDescriptorSetAccelerationStructures == rhs.maxDescriptorSetAccelerationStructures ); + } + + bool operator!=( PhysicalDeviceRayTracingPropertiesNV const& rhs ) const + { + return !operator==( rhs ); + } + + private: + StructureType sType = StructureType::ePhysicalDeviceRayTracingPropertiesNV; + + public: + void* pNext = nullptr; + uint32_t shaderGroupHandleSize; + uint32_t maxRecursionDepth; + uint32_t maxShaderGroupStride; + uint32_t shaderGroupBaseAlignment; + uint64_t maxGeometryCount; + uint64_t maxInstanceCount; + uint64_t maxTriangleCount; + uint32_t maxDescriptorSetAccelerationStructures; + }; + static_assert( sizeof( PhysicalDeviceRayTracingPropertiesNV ) == sizeof( VkPhysicalDeviceRayTracingPropertiesNV ), "struct and wrapper have different size!" ); + + struct PhysicalDeviceImageDrmFormatModifierInfoEXT + { + PhysicalDeviceImageDrmFormatModifierInfoEXT( uint64_t drmFormatModifier_ = 0, + SharingMode sharingMode_ = SharingMode::eExclusive, + uint32_t queueFamilyIndexCount_ = 0, + const uint32_t* pQueueFamilyIndices_ = nullptr ) + : drmFormatModifier( drmFormatModifier_ ) + , sharingMode( sharingMode_ ) + , queueFamilyIndexCount( queueFamilyIndexCount_ ) + , pQueueFamilyIndices( pQueueFamilyIndices_ ) + { + } + + PhysicalDeviceImageDrmFormatModifierInfoEXT( VkPhysicalDeviceImageDrmFormatModifierInfoEXT const & rhs ) + { + memcpy( this, &rhs, sizeof( PhysicalDeviceImageDrmFormatModifierInfoEXT ) ); + } + + PhysicalDeviceImageDrmFormatModifierInfoEXT& operator=( VkPhysicalDeviceImageDrmFormatModifierInfoEXT const & rhs ) + { + memcpy( this, &rhs, sizeof( PhysicalDeviceImageDrmFormatModifierInfoEXT ) ); + return *this; + } + PhysicalDeviceImageDrmFormatModifierInfoEXT& setPNext( const void* pNext_ ) + { + pNext = pNext_; + return *this; + } + + PhysicalDeviceImageDrmFormatModifierInfoEXT& setDrmFormatModifier( uint64_t drmFormatModifier_ ) + { + drmFormatModifier = drmFormatModifier_; + return *this; + } + + PhysicalDeviceImageDrmFormatModifierInfoEXT& setSharingMode( SharingMode sharingMode_ ) + { + sharingMode = sharingMode_; + return *this; + } + + PhysicalDeviceImageDrmFormatModifierInfoEXT& setQueueFamilyIndexCount( uint32_t queueFamilyIndexCount_ ) + { + queueFamilyIndexCount = queueFamilyIndexCount_; + return *this; + } + + PhysicalDeviceImageDrmFormatModifierInfoEXT& setPQueueFamilyIndices( const uint32_t* pQueueFamilyIndices_ ) + { + pQueueFamilyIndices = pQueueFamilyIndices_; + return *this; + } + + operator VkPhysicalDeviceImageDrmFormatModifierInfoEXT const&() const + { + return *reinterpret_cast(this); + } + + operator VkPhysicalDeviceImageDrmFormatModifierInfoEXT &() + { + return *reinterpret_cast(this); + } + + bool operator==( PhysicalDeviceImageDrmFormatModifierInfoEXT const& rhs ) const + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( drmFormatModifier == rhs.drmFormatModifier ) + && ( sharingMode == rhs.sharingMode ) + && ( queueFamilyIndexCount == rhs.queueFamilyIndexCount ) + && ( pQueueFamilyIndices == rhs.pQueueFamilyIndices ); + } + + bool operator!=( PhysicalDeviceImageDrmFormatModifierInfoEXT const& rhs ) const + { + return !operator==( rhs ); + } + + private: + StructureType sType = StructureType::ePhysicalDeviceImageDrmFormatModifierInfoEXT; + + public: + const void* pNext = nullptr; + uint64_t drmFormatModifier; + SharingMode sharingMode; + uint32_t queueFamilyIndexCount; + const uint32_t* pQueueFamilyIndices; + }; + static_assert( sizeof( PhysicalDeviceImageDrmFormatModifierInfoEXT ) == sizeof( VkPhysicalDeviceImageDrmFormatModifierInfoEXT ), "struct and wrapper have different size!" ); + + struct ImageDrmFormatModifierListCreateInfoEXT + { + ImageDrmFormatModifierListCreateInfoEXT( uint32_t drmFormatModifierCount_ = 0, + const uint64_t* pDrmFormatModifiers_ = nullptr ) + : drmFormatModifierCount( drmFormatModifierCount_ ) + , pDrmFormatModifiers( pDrmFormatModifiers_ ) + { + } + + ImageDrmFormatModifierListCreateInfoEXT( VkImageDrmFormatModifierListCreateInfoEXT const & rhs ) + { + memcpy( this, &rhs, sizeof( ImageDrmFormatModifierListCreateInfoEXT ) ); + } + + ImageDrmFormatModifierListCreateInfoEXT& operator=( VkImageDrmFormatModifierListCreateInfoEXT const & rhs ) + { + memcpy( this, &rhs, sizeof( ImageDrmFormatModifierListCreateInfoEXT ) ); + return *this; + } + ImageDrmFormatModifierListCreateInfoEXT& setPNext( const void* pNext_ ) + { + pNext = pNext_; + return *this; + } + + ImageDrmFormatModifierListCreateInfoEXT& setDrmFormatModifierCount( uint32_t drmFormatModifierCount_ ) + { + drmFormatModifierCount = drmFormatModifierCount_; + return *this; + } + + ImageDrmFormatModifierListCreateInfoEXT& setPDrmFormatModifiers( const uint64_t* pDrmFormatModifiers_ ) + { + pDrmFormatModifiers = pDrmFormatModifiers_; + return *this; + } + + operator VkImageDrmFormatModifierListCreateInfoEXT const&() const + { + return *reinterpret_cast(this); + } + + operator VkImageDrmFormatModifierListCreateInfoEXT &() + { + return *reinterpret_cast(this); + } + + bool operator==( ImageDrmFormatModifierListCreateInfoEXT const& rhs ) const + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( drmFormatModifierCount == rhs.drmFormatModifierCount ) + && ( pDrmFormatModifiers == rhs.pDrmFormatModifiers ); + } + + bool operator!=( ImageDrmFormatModifierListCreateInfoEXT const& rhs ) const + { + return !operator==( rhs ); + } + + private: + StructureType sType = StructureType::eImageDrmFormatModifierListCreateInfoEXT; + + public: + const void* pNext = nullptr; + uint32_t drmFormatModifierCount; + const uint64_t* pDrmFormatModifiers; + }; + static_assert( sizeof( ImageDrmFormatModifierListCreateInfoEXT ) == sizeof( VkImageDrmFormatModifierListCreateInfoEXT ), "struct and wrapper have different size!" ); + + struct ImageDrmFormatModifierExplicitCreateInfoEXT + { + ImageDrmFormatModifierExplicitCreateInfoEXT( uint64_t drmFormatModifier_ = 0, + uint32_t drmFormatModifierPlaneCount_ = 0, + const SubresourceLayout* pPlaneLayouts_ = nullptr ) + : drmFormatModifier( drmFormatModifier_ ) + , drmFormatModifierPlaneCount( drmFormatModifierPlaneCount_ ) + , pPlaneLayouts( pPlaneLayouts_ ) + { + } + + ImageDrmFormatModifierExplicitCreateInfoEXT( VkImageDrmFormatModifierExplicitCreateInfoEXT const & rhs ) + { + memcpy( this, &rhs, sizeof( ImageDrmFormatModifierExplicitCreateInfoEXT ) ); + } + + ImageDrmFormatModifierExplicitCreateInfoEXT& operator=( VkImageDrmFormatModifierExplicitCreateInfoEXT const & rhs ) + { + memcpy( this, &rhs, sizeof( ImageDrmFormatModifierExplicitCreateInfoEXT ) ); + return *this; + } + ImageDrmFormatModifierExplicitCreateInfoEXT& setPNext( const void* pNext_ ) + { + pNext = pNext_; + return *this; + } + + ImageDrmFormatModifierExplicitCreateInfoEXT& setDrmFormatModifier( uint64_t drmFormatModifier_ ) + { + drmFormatModifier = drmFormatModifier_; + return *this; + } + + ImageDrmFormatModifierExplicitCreateInfoEXT& setDrmFormatModifierPlaneCount( uint32_t drmFormatModifierPlaneCount_ ) + { + drmFormatModifierPlaneCount = drmFormatModifierPlaneCount_; + return *this; + } + + ImageDrmFormatModifierExplicitCreateInfoEXT& setPPlaneLayouts( const SubresourceLayout* pPlaneLayouts_ ) + { + pPlaneLayouts = pPlaneLayouts_; + return *this; + } + + operator VkImageDrmFormatModifierExplicitCreateInfoEXT const&() const + { + return *reinterpret_cast(this); + } + + operator VkImageDrmFormatModifierExplicitCreateInfoEXT &() + { + return *reinterpret_cast(this); + } + + bool operator==( ImageDrmFormatModifierExplicitCreateInfoEXT const& rhs ) const + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( drmFormatModifier == rhs.drmFormatModifier ) + && ( drmFormatModifierPlaneCount == rhs.drmFormatModifierPlaneCount ) + && ( pPlaneLayouts == rhs.pPlaneLayouts ); + } + + bool operator!=( ImageDrmFormatModifierExplicitCreateInfoEXT const& rhs ) const + { + return !operator==( rhs ); + } + + private: + StructureType sType = StructureType::eImageDrmFormatModifierExplicitCreateInfoEXT; + + public: + const void* pNext = nullptr; + uint64_t drmFormatModifier; + uint32_t drmFormatModifierPlaneCount; + const SubresourceLayout* pPlaneLayouts; + }; + static_assert( sizeof( ImageDrmFormatModifierExplicitCreateInfoEXT ) == sizeof( VkImageDrmFormatModifierExplicitCreateInfoEXT ), "struct and wrapper have different size!" ); + + struct ImageDrmFormatModifierPropertiesEXT + { + operator VkImageDrmFormatModifierPropertiesEXT const&() const + { + return *reinterpret_cast(this); + } + + operator VkImageDrmFormatModifierPropertiesEXT &() + { + return *reinterpret_cast(this); + } + + bool operator==( ImageDrmFormatModifierPropertiesEXT const& rhs ) const + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( drmFormatModifier == rhs.drmFormatModifier ); + } + + bool operator!=( ImageDrmFormatModifierPropertiesEXT const& rhs ) const + { + return !operator==( rhs ); + } + + private: + StructureType sType = StructureType::eImageDrmFormatModifierPropertiesEXT; + + public: + void* pNext = nullptr; + uint64_t drmFormatModifier; + }; + static_assert( sizeof( ImageDrmFormatModifierPropertiesEXT ) == sizeof( VkImageDrmFormatModifierPropertiesEXT ), "struct and wrapper have different size!" ); + + enum class SubpassContents + { + eInline = VK_SUBPASS_CONTENTS_INLINE, + eSecondaryCommandBuffers = VK_SUBPASS_CONTENTS_SECONDARY_COMMAND_BUFFERS + }; + + struct SubpassBeginInfoKHR + { + SubpassBeginInfoKHR( SubpassContents contents_ = SubpassContents::eInline ) + : contents( contents_ ) + { + } + + SubpassBeginInfoKHR( VkSubpassBeginInfoKHR const & rhs ) + { + memcpy( this, &rhs, sizeof( SubpassBeginInfoKHR ) ); + } + + SubpassBeginInfoKHR& operator=( VkSubpassBeginInfoKHR const & rhs ) + { + memcpy( this, &rhs, sizeof( SubpassBeginInfoKHR ) ); + return *this; + } + SubpassBeginInfoKHR& setPNext( const void* pNext_ ) + { + pNext = pNext_; + return *this; + } + + SubpassBeginInfoKHR& setContents( SubpassContents contents_ ) + { + contents = contents_; + return *this; + } + + operator VkSubpassBeginInfoKHR const&() const + { + return *reinterpret_cast(this); + } + + operator VkSubpassBeginInfoKHR &() + { + return *reinterpret_cast(this); + } + + bool operator==( SubpassBeginInfoKHR const& rhs ) const + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( contents == rhs.contents ); + } + + bool operator!=( SubpassBeginInfoKHR const& rhs ) const + { + return !operator==( rhs ); + } + + private: + StructureType sType = StructureType::eSubpassBeginInfoKHR; + + public: + const void* pNext = nullptr; + SubpassContents contents; + }; + static_assert( sizeof( SubpassBeginInfoKHR ) == sizeof( VkSubpassBeginInfoKHR ), "struct and wrapper have different size!" ); + + struct PresentInfoKHR + { + PresentInfoKHR( uint32_t waitSemaphoreCount_ = 0, + const Semaphore* pWaitSemaphores_ = nullptr, + uint32_t swapchainCount_ = 0, + const SwapchainKHR* pSwapchains_ = nullptr, + const uint32_t* pImageIndices_ = nullptr, + Result* pResults_ = nullptr ) + : waitSemaphoreCount( waitSemaphoreCount_ ) + , pWaitSemaphores( pWaitSemaphores_ ) + , swapchainCount( swapchainCount_ ) + , pSwapchains( pSwapchains_ ) + , pImageIndices( pImageIndices_ ) + , pResults( pResults_ ) + { + } + + PresentInfoKHR( VkPresentInfoKHR const & rhs ) + { + memcpy( this, &rhs, sizeof( PresentInfoKHR ) ); + } + + PresentInfoKHR& operator=( VkPresentInfoKHR const & rhs ) + { + memcpy( this, &rhs, sizeof( PresentInfoKHR ) ); + return *this; + } + PresentInfoKHR& setPNext( const void* pNext_ ) + { + pNext = pNext_; + return *this; + } + + PresentInfoKHR& setWaitSemaphoreCount( uint32_t waitSemaphoreCount_ ) + { + waitSemaphoreCount = waitSemaphoreCount_; + return *this; + } + + PresentInfoKHR& setPWaitSemaphores( const Semaphore* pWaitSemaphores_ ) + { + pWaitSemaphores = pWaitSemaphores_; + return *this; + } + + PresentInfoKHR& setSwapchainCount( uint32_t swapchainCount_ ) + { + swapchainCount = swapchainCount_; + return *this; + } + + PresentInfoKHR& setPSwapchains( const SwapchainKHR* pSwapchains_ ) + { + pSwapchains = pSwapchains_; + return *this; + } + + PresentInfoKHR& setPImageIndices( const uint32_t* pImageIndices_ ) + { + pImageIndices = pImageIndices_; + return *this; + } + + PresentInfoKHR& setPResults( Result* pResults_ ) + { + pResults = pResults_; + return *this; + } + + operator VkPresentInfoKHR const&() const + { + return *reinterpret_cast(this); + } + + operator VkPresentInfoKHR &() + { + return *reinterpret_cast(this); + } + + bool operator==( PresentInfoKHR const& rhs ) const + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( waitSemaphoreCount == rhs.waitSemaphoreCount ) + && ( pWaitSemaphores == rhs.pWaitSemaphores ) + && ( swapchainCount == rhs.swapchainCount ) + && ( pSwapchains == rhs.pSwapchains ) + && ( pImageIndices == rhs.pImageIndices ) + && ( pResults == rhs.pResults ); + } + + bool operator!=( PresentInfoKHR const& rhs ) const + { + return !operator==( rhs ); + } + + private: + StructureType sType = StructureType::ePresentInfoKHR; + + public: + const void* pNext = nullptr; + uint32_t waitSemaphoreCount; + const Semaphore* pWaitSemaphores; + uint32_t swapchainCount; + const SwapchainKHR* pSwapchains; + const uint32_t* pImageIndices; + Result* pResults; + }; + static_assert( sizeof( PresentInfoKHR ) == sizeof( VkPresentInfoKHR ), "struct and wrapper have different size!" ); + + enum class DynamicState + { + eViewport = VK_DYNAMIC_STATE_VIEWPORT, + eScissor = VK_DYNAMIC_STATE_SCISSOR, + eLineWidth = VK_DYNAMIC_STATE_LINE_WIDTH, + eDepthBias = VK_DYNAMIC_STATE_DEPTH_BIAS, + eBlendConstants = VK_DYNAMIC_STATE_BLEND_CONSTANTS, + eDepthBounds = VK_DYNAMIC_STATE_DEPTH_BOUNDS, + eStencilCompareMask = VK_DYNAMIC_STATE_STENCIL_COMPARE_MASK, + eStencilWriteMask = VK_DYNAMIC_STATE_STENCIL_WRITE_MASK, + eStencilReference = VK_DYNAMIC_STATE_STENCIL_REFERENCE, + eViewportWScalingNV = VK_DYNAMIC_STATE_VIEWPORT_W_SCALING_NV, + eDiscardRectangleEXT = VK_DYNAMIC_STATE_DISCARD_RECTANGLE_EXT, + eSampleLocationsEXT = VK_DYNAMIC_STATE_SAMPLE_LOCATIONS_EXT, + eViewportShadingRatePaletteNV = VK_DYNAMIC_STATE_VIEWPORT_SHADING_RATE_PALETTE_NV, + eViewportCoarseSampleOrderNV = VK_DYNAMIC_STATE_VIEWPORT_COARSE_SAMPLE_ORDER_NV, + eExclusiveScissorNV = VK_DYNAMIC_STATE_EXCLUSIVE_SCISSOR_NV + }; + + struct PipelineDynamicStateCreateInfo + { + PipelineDynamicStateCreateInfo( PipelineDynamicStateCreateFlags flags_ = PipelineDynamicStateCreateFlags(), + uint32_t dynamicStateCount_ = 0, + const DynamicState* pDynamicStates_ = nullptr ) + : flags( flags_ ) + , dynamicStateCount( dynamicStateCount_ ) + , pDynamicStates( pDynamicStates_ ) + { + } + + PipelineDynamicStateCreateInfo( VkPipelineDynamicStateCreateInfo const & rhs ) + { + memcpy( this, &rhs, sizeof( PipelineDynamicStateCreateInfo ) ); + } + + PipelineDynamicStateCreateInfo& operator=( VkPipelineDynamicStateCreateInfo const & rhs ) + { + memcpy( this, &rhs, sizeof( PipelineDynamicStateCreateInfo ) ); + return *this; + } + PipelineDynamicStateCreateInfo& setPNext( const void* pNext_ ) + { + pNext = pNext_; + return *this; + } + + PipelineDynamicStateCreateInfo& setFlags( PipelineDynamicStateCreateFlags flags_ ) + { + flags = flags_; + return *this; + } + + PipelineDynamicStateCreateInfo& setDynamicStateCount( uint32_t dynamicStateCount_ ) + { + dynamicStateCount = dynamicStateCount_; + return *this; + } + + PipelineDynamicStateCreateInfo& setPDynamicStates( const DynamicState* pDynamicStates_ ) + { + pDynamicStates = pDynamicStates_; + return *this; + } + + operator VkPipelineDynamicStateCreateInfo const&() const + { + return *reinterpret_cast(this); + } + + operator VkPipelineDynamicStateCreateInfo &() + { + return *reinterpret_cast(this); + } + + bool operator==( PipelineDynamicStateCreateInfo const& rhs ) const + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( flags == rhs.flags ) + && ( dynamicStateCount == rhs.dynamicStateCount ) + && ( pDynamicStates == rhs.pDynamicStates ); + } + + bool operator!=( PipelineDynamicStateCreateInfo const& rhs ) const + { + return !operator==( rhs ); + } + + private: + StructureType sType = StructureType::ePipelineDynamicStateCreateInfo; + + public: + const void* pNext = nullptr; + PipelineDynamicStateCreateFlags flags; + uint32_t dynamicStateCount; + const DynamicState* pDynamicStates; + }; + static_assert( sizeof( PipelineDynamicStateCreateInfo ) == sizeof( VkPipelineDynamicStateCreateInfo ), "struct and wrapper have different size!" ); + + enum class DescriptorUpdateTemplateType + { + eDescriptorSet = VK_DESCRIPTOR_UPDATE_TEMPLATE_TYPE_DESCRIPTOR_SET, + eDescriptorSetKHR = VK_DESCRIPTOR_UPDATE_TEMPLATE_TYPE_DESCRIPTOR_SET, + ePushDescriptorsKHR = VK_DESCRIPTOR_UPDATE_TEMPLATE_TYPE_PUSH_DESCRIPTORS_KHR + }; + + struct DescriptorUpdateTemplateCreateInfo + { + DescriptorUpdateTemplateCreateInfo( DescriptorUpdateTemplateCreateFlags flags_ = DescriptorUpdateTemplateCreateFlags(), + uint32_t descriptorUpdateEntryCount_ = 0, + const DescriptorUpdateTemplateEntry* pDescriptorUpdateEntries_ = nullptr, + DescriptorUpdateTemplateType templateType_ = DescriptorUpdateTemplateType::eDescriptorSet, + DescriptorSetLayout descriptorSetLayout_ = DescriptorSetLayout(), + PipelineBindPoint pipelineBindPoint_ = PipelineBindPoint::eGraphics, + PipelineLayout pipelineLayout_ = PipelineLayout(), + uint32_t set_ = 0 ) + : flags( flags_ ) + , descriptorUpdateEntryCount( descriptorUpdateEntryCount_ ) + , pDescriptorUpdateEntries( pDescriptorUpdateEntries_ ) + , templateType( templateType_ ) + , descriptorSetLayout( descriptorSetLayout_ ) + , pipelineBindPoint( pipelineBindPoint_ ) + , pipelineLayout( pipelineLayout_ ) + , set( set_ ) + { + } + + DescriptorUpdateTemplateCreateInfo( VkDescriptorUpdateTemplateCreateInfo const & rhs ) + { + memcpy( this, &rhs, sizeof( DescriptorUpdateTemplateCreateInfo ) ); + } + + DescriptorUpdateTemplateCreateInfo& operator=( VkDescriptorUpdateTemplateCreateInfo const & rhs ) + { + memcpy( this, &rhs, sizeof( DescriptorUpdateTemplateCreateInfo ) ); + return *this; + } + DescriptorUpdateTemplateCreateInfo& setPNext( void* pNext_ ) + { + pNext = pNext_; + return *this; + } + + DescriptorUpdateTemplateCreateInfo& setFlags( DescriptorUpdateTemplateCreateFlags flags_ ) + { + flags = flags_; + return *this; + } + + DescriptorUpdateTemplateCreateInfo& setDescriptorUpdateEntryCount( uint32_t descriptorUpdateEntryCount_ ) + { + descriptorUpdateEntryCount = descriptorUpdateEntryCount_; + return *this; + } + + DescriptorUpdateTemplateCreateInfo& setPDescriptorUpdateEntries( const DescriptorUpdateTemplateEntry* pDescriptorUpdateEntries_ ) + { + pDescriptorUpdateEntries = pDescriptorUpdateEntries_; + return *this; + } + + DescriptorUpdateTemplateCreateInfo& setTemplateType( DescriptorUpdateTemplateType templateType_ ) + { + templateType = templateType_; + return *this; + } + + DescriptorUpdateTemplateCreateInfo& setDescriptorSetLayout( DescriptorSetLayout descriptorSetLayout_ ) + { + descriptorSetLayout = descriptorSetLayout_; + return *this; + } + + DescriptorUpdateTemplateCreateInfo& setPipelineBindPoint( PipelineBindPoint pipelineBindPoint_ ) + { + pipelineBindPoint = pipelineBindPoint_; + return *this; + } + + DescriptorUpdateTemplateCreateInfo& setPipelineLayout( PipelineLayout pipelineLayout_ ) + { + pipelineLayout = pipelineLayout_; + return *this; + } + + DescriptorUpdateTemplateCreateInfo& setSet( uint32_t set_ ) + { + set = set_; + return *this; + } + + operator VkDescriptorUpdateTemplateCreateInfo const&() const + { + return *reinterpret_cast(this); + } + + operator VkDescriptorUpdateTemplateCreateInfo &() + { + return *reinterpret_cast(this); + } + + bool operator==( DescriptorUpdateTemplateCreateInfo const& rhs ) const + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( flags == rhs.flags ) + && ( descriptorUpdateEntryCount == rhs.descriptorUpdateEntryCount ) + && ( pDescriptorUpdateEntries == rhs.pDescriptorUpdateEntries ) + && ( templateType == rhs.templateType ) + && ( descriptorSetLayout == rhs.descriptorSetLayout ) + && ( pipelineBindPoint == rhs.pipelineBindPoint ) + && ( pipelineLayout == rhs.pipelineLayout ) + && ( set == rhs.set ); + } + + bool operator!=( DescriptorUpdateTemplateCreateInfo const& rhs ) const + { + return !operator==( rhs ); + } + + private: + StructureType sType = StructureType::eDescriptorUpdateTemplateCreateInfo; + + public: + void* pNext = nullptr; + DescriptorUpdateTemplateCreateFlags flags; + uint32_t descriptorUpdateEntryCount; + const DescriptorUpdateTemplateEntry* pDescriptorUpdateEntries; + DescriptorUpdateTemplateType templateType; + DescriptorSetLayout descriptorSetLayout; + PipelineBindPoint pipelineBindPoint; + PipelineLayout pipelineLayout; + uint32_t set; + }; + static_assert( sizeof( DescriptorUpdateTemplateCreateInfo ) == sizeof( VkDescriptorUpdateTemplateCreateInfo ), "struct and wrapper have different size!" ); + + using DescriptorUpdateTemplateCreateInfoKHR = DescriptorUpdateTemplateCreateInfo; + + enum class ObjectType + { + eUnknown = VK_OBJECT_TYPE_UNKNOWN, + eInstance = VK_OBJECT_TYPE_INSTANCE, + ePhysicalDevice = VK_OBJECT_TYPE_PHYSICAL_DEVICE, + eDevice = VK_OBJECT_TYPE_DEVICE, + eQueue = VK_OBJECT_TYPE_QUEUE, + eSemaphore = VK_OBJECT_TYPE_SEMAPHORE, + eCommandBuffer = VK_OBJECT_TYPE_COMMAND_BUFFER, + eFence = VK_OBJECT_TYPE_FENCE, + eDeviceMemory = VK_OBJECT_TYPE_DEVICE_MEMORY, + eBuffer = VK_OBJECT_TYPE_BUFFER, + eImage = VK_OBJECT_TYPE_IMAGE, + eEvent = VK_OBJECT_TYPE_EVENT, + eQueryPool = VK_OBJECT_TYPE_QUERY_POOL, + eBufferView = VK_OBJECT_TYPE_BUFFER_VIEW, + eImageView = VK_OBJECT_TYPE_IMAGE_VIEW, + eShaderModule = VK_OBJECT_TYPE_SHADER_MODULE, + ePipelineCache = VK_OBJECT_TYPE_PIPELINE_CACHE, + ePipelineLayout = VK_OBJECT_TYPE_PIPELINE_LAYOUT, + eRenderPass = VK_OBJECT_TYPE_RENDER_PASS, + ePipeline = VK_OBJECT_TYPE_PIPELINE, + eDescriptorSetLayout = VK_OBJECT_TYPE_DESCRIPTOR_SET_LAYOUT, + eSampler = VK_OBJECT_TYPE_SAMPLER, + eDescriptorPool = VK_OBJECT_TYPE_DESCRIPTOR_POOL, + eDescriptorSet = VK_OBJECT_TYPE_DESCRIPTOR_SET, + eFramebuffer = VK_OBJECT_TYPE_FRAMEBUFFER, + eCommandPool = VK_OBJECT_TYPE_COMMAND_POOL, + eSamplerYcbcrConversion = VK_OBJECT_TYPE_SAMPLER_YCBCR_CONVERSION, + eSamplerYcbcrConversionKHR = VK_OBJECT_TYPE_SAMPLER_YCBCR_CONVERSION, + eDescriptorUpdateTemplate = VK_OBJECT_TYPE_DESCRIPTOR_UPDATE_TEMPLATE, + eDescriptorUpdateTemplateKHR = VK_OBJECT_TYPE_DESCRIPTOR_UPDATE_TEMPLATE, + eSurfaceKHR = VK_OBJECT_TYPE_SURFACE_KHR, + eSwapchainKHR = VK_OBJECT_TYPE_SWAPCHAIN_KHR, + eDisplayKHR = VK_OBJECT_TYPE_DISPLAY_KHR, + eDisplayModeKHR = VK_OBJECT_TYPE_DISPLAY_MODE_KHR, + eDebugReportCallbackEXT = VK_OBJECT_TYPE_DEBUG_REPORT_CALLBACK_EXT, + eObjectTableNVX = VK_OBJECT_TYPE_OBJECT_TABLE_NVX, + eIndirectCommandsLayoutNVX = VK_OBJECT_TYPE_INDIRECT_COMMANDS_LAYOUT_NVX, + eDebugUtilsMessengerEXT = VK_OBJECT_TYPE_DEBUG_UTILS_MESSENGER_EXT, + eValidationCacheEXT = VK_OBJECT_TYPE_VALIDATION_CACHE_EXT, + eAccelerationStructureNV = VK_OBJECT_TYPE_ACCELERATION_STRUCTURE_NV + }; + + struct DebugUtilsObjectNameInfoEXT + { + DebugUtilsObjectNameInfoEXT( ObjectType objectType_ = ObjectType::eUnknown, + uint64_t objectHandle_ = 0, + const char* pObjectName_ = nullptr ) + : objectType( objectType_ ) + , objectHandle( objectHandle_ ) + , pObjectName( pObjectName_ ) + { + } + + DebugUtilsObjectNameInfoEXT( VkDebugUtilsObjectNameInfoEXT const & rhs ) + { + memcpy( this, &rhs, sizeof( DebugUtilsObjectNameInfoEXT ) ); + } + + DebugUtilsObjectNameInfoEXT& operator=( VkDebugUtilsObjectNameInfoEXT const & rhs ) + { + memcpy( this, &rhs, sizeof( DebugUtilsObjectNameInfoEXT ) ); + return *this; + } + DebugUtilsObjectNameInfoEXT& setPNext( const void* pNext_ ) + { + pNext = pNext_; + return *this; + } + + DebugUtilsObjectNameInfoEXT& setObjectType( ObjectType objectType_ ) + { + objectType = objectType_; + return *this; + } + + DebugUtilsObjectNameInfoEXT& setObjectHandle( uint64_t objectHandle_ ) + { + objectHandle = objectHandle_; + return *this; + } + + DebugUtilsObjectNameInfoEXT& setPObjectName( const char* pObjectName_ ) + { + pObjectName = pObjectName_; + return *this; + } + + operator VkDebugUtilsObjectNameInfoEXT const&() const + { + return *reinterpret_cast(this); + } + + operator VkDebugUtilsObjectNameInfoEXT &() + { + return *reinterpret_cast(this); + } + + bool operator==( DebugUtilsObjectNameInfoEXT const& rhs ) const + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( objectType == rhs.objectType ) + && ( objectHandle == rhs.objectHandle ) + && ( pObjectName == rhs.pObjectName ); + } + + bool operator!=( DebugUtilsObjectNameInfoEXT const& rhs ) const + { + return !operator==( rhs ); + } + + private: + StructureType sType = StructureType::eDebugUtilsObjectNameInfoEXT; + + public: + const void* pNext = nullptr; + ObjectType objectType; + uint64_t objectHandle; + const char* pObjectName; + }; + static_assert( sizeof( DebugUtilsObjectNameInfoEXT ) == sizeof( VkDebugUtilsObjectNameInfoEXT ), "struct and wrapper have different size!" ); + + struct DebugUtilsObjectTagInfoEXT + { + DebugUtilsObjectTagInfoEXT( ObjectType objectType_ = ObjectType::eUnknown, + uint64_t objectHandle_ = 0, + uint64_t tagName_ = 0, + size_t tagSize_ = 0, + const void* pTag_ = nullptr ) + : objectType( objectType_ ) + , objectHandle( objectHandle_ ) + , tagName( tagName_ ) + , tagSize( tagSize_ ) + , pTag( pTag_ ) + { + } + + DebugUtilsObjectTagInfoEXT( VkDebugUtilsObjectTagInfoEXT const & rhs ) + { + memcpy( this, &rhs, sizeof( DebugUtilsObjectTagInfoEXT ) ); + } + + DebugUtilsObjectTagInfoEXT& operator=( VkDebugUtilsObjectTagInfoEXT const & rhs ) + { + memcpy( this, &rhs, sizeof( DebugUtilsObjectTagInfoEXT ) ); + return *this; + } + DebugUtilsObjectTagInfoEXT& setPNext( const void* pNext_ ) + { + pNext = pNext_; + return *this; + } + + DebugUtilsObjectTagInfoEXT& setObjectType( ObjectType objectType_ ) + { + objectType = objectType_; + return *this; + } + + DebugUtilsObjectTagInfoEXT& setObjectHandle( uint64_t objectHandle_ ) + { + objectHandle = objectHandle_; + return *this; + } + + DebugUtilsObjectTagInfoEXT& setTagName( uint64_t tagName_ ) + { + tagName = tagName_; + return *this; + } + + DebugUtilsObjectTagInfoEXT& setTagSize( size_t tagSize_ ) + { + tagSize = tagSize_; + return *this; + } + + DebugUtilsObjectTagInfoEXT& setPTag( const void* pTag_ ) + { + pTag = pTag_; + return *this; + } + + operator VkDebugUtilsObjectTagInfoEXT const&() const + { + return *reinterpret_cast(this); + } + + operator VkDebugUtilsObjectTagInfoEXT &() + { + return *reinterpret_cast(this); + } + + bool operator==( DebugUtilsObjectTagInfoEXT const& rhs ) const + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( objectType == rhs.objectType ) + && ( objectHandle == rhs.objectHandle ) + && ( tagName == rhs.tagName ) + && ( tagSize == rhs.tagSize ) + && ( pTag == rhs.pTag ); + } + + bool operator!=( DebugUtilsObjectTagInfoEXT const& rhs ) const + { + return !operator==( rhs ); + } + + private: + StructureType sType = StructureType::eDebugUtilsObjectTagInfoEXT; + + public: + const void* pNext = nullptr; + ObjectType objectType; + uint64_t objectHandle; + uint64_t tagName; + size_t tagSize; + const void* pTag; + }; + static_assert( sizeof( DebugUtilsObjectTagInfoEXT ) == sizeof( VkDebugUtilsObjectTagInfoEXT ), "struct and wrapper have different size!" ); + + struct DebugUtilsMessengerCallbackDataEXT + { + DebugUtilsMessengerCallbackDataEXT( DebugUtilsMessengerCallbackDataFlagsEXT flags_ = DebugUtilsMessengerCallbackDataFlagsEXT(), + const char* pMessageIdName_ = nullptr, + int32_t messageIdNumber_ = 0, + const char* pMessage_ = nullptr, + uint32_t queueLabelCount_ = 0, + DebugUtilsLabelEXT* pQueueLabels_ = nullptr, + uint32_t cmdBufLabelCount_ = 0, + DebugUtilsLabelEXT* pCmdBufLabels_ = nullptr, + uint32_t objectCount_ = 0, + DebugUtilsObjectNameInfoEXT* pObjects_ = nullptr ) + : flags( flags_ ) + , pMessageIdName( pMessageIdName_ ) + , messageIdNumber( messageIdNumber_ ) + , pMessage( pMessage_ ) + , queueLabelCount( queueLabelCount_ ) + , pQueueLabels( pQueueLabels_ ) + , cmdBufLabelCount( cmdBufLabelCount_ ) + , pCmdBufLabels( pCmdBufLabels_ ) + , objectCount( objectCount_ ) + , pObjects( pObjects_ ) + { + } + + DebugUtilsMessengerCallbackDataEXT( VkDebugUtilsMessengerCallbackDataEXT const & rhs ) + { + memcpy( this, &rhs, sizeof( DebugUtilsMessengerCallbackDataEXT ) ); + } + + DebugUtilsMessengerCallbackDataEXT& operator=( VkDebugUtilsMessengerCallbackDataEXT const & rhs ) + { + memcpy( this, &rhs, sizeof( DebugUtilsMessengerCallbackDataEXT ) ); + return *this; + } + DebugUtilsMessengerCallbackDataEXT& setPNext( const void* pNext_ ) + { + pNext = pNext_; + return *this; + } + + DebugUtilsMessengerCallbackDataEXT& setFlags( DebugUtilsMessengerCallbackDataFlagsEXT flags_ ) + { + flags = flags_; + return *this; + } + + DebugUtilsMessengerCallbackDataEXT& setPMessageIdName( const char* pMessageIdName_ ) + { + pMessageIdName = pMessageIdName_; + return *this; + } + + DebugUtilsMessengerCallbackDataEXT& setMessageIdNumber( int32_t messageIdNumber_ ) + { + messageIdNumber = messageIdNumber_; + return *this; + } + + DebugUtilsMessengerCallbackDataEXT& setPMessage( const char* pMessage_ ) + { + pMessage = pMessage_; + return *this; + } + + DebugUtilsMessengerCallbackDataEXT& setQueueLabelCount( uint32_t queueLabelCount_ ) + { + queueLabelCount = queueLabelCount_; + return *this; + } + + DebugUtilsMessengerCallbackDataEXT& setPQueueLabels( DebugUtilsLabelEXT* pQueueLabels_ ) + { + pQueueLabels = pQueueLabels_; + return *this; + } + + DebugUtilsMessengerCallbackDataEXT& setCmdBufLabelCount( uint32_t cmdBufLabelCount_ ) + { + cmdBufLabelCount = cmdBufLabelCount_; + return *this; + } + + DebugUtilsMessengerCallbackDataEXT& setPCmdBufLabels( DebugUtilsLabelEXT* pCmdBufLabels_ ) + { + pCmdBufLabels = pCmdBufLabels_; + return *this; + } + + DebugUtilsMessengerCallbackDataEXT& setObjectCount( uint32_t objectCount_ ) + { + objectCount = objectCount_; + return *this; + } + + DebugUtilsMessengerCallbackDataEXT& setPObjects( DebugUtilsObjectNameInfoEXT* pObjects_ ) + { + pObjects = pObjects_; + return *this; + } + + operator VkDebugUtilsMessengerCallbackDataEXT const&() const + { + return *reinterpret_cast(this); + } + + operator VkDebugUtilsMessengerCallbackDataEXT &() + { + return *reinterpret_cast(this); + } + + bool operator==( DebugUtilsMessengerCallbackDataEXT const& rhs ) const + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( flags == rhs.flags ) + && ( pMessageIdName == rhs.pMessageIdName ) + && ( messageIdNumber == rhs.messageIdNumber ) + && ( pMessage == rhs.pMessage ) + && ( queueLabelCount == rhs.queueLabelCount ) + && ( pQueueLabels == rhs.pQueueLabels ) + && ( cmdBufLabelCount == rhs.cmdBufLabelCount ) + && ( pCmdBufLabels == rhs.pCmdBufLabels ) + && ( objectCount == rhs.objectCount ) + && ( pObjects == rhs.pObjects ); + } + + bool operator!=( DebugUtilsMessengerCallbackDataEXT const& rhs ) const + { + return !operator==( rhs ); + } + + private: + StructureType sType = StructureType::eDebugUtilsMessengerCallbackDataEXT; + + public: + const void* pNext = nullptr; + DebugUtilsMessengerCallbackDataFlagsEXT flags; + const char* pMessageIdName; + int32_t messageIdNumber; + const char* pMessage; + uint32_t queueLabelCount; + DebugUtilsLabelEXT* pQueueLabels; + uint32_t cmdBufLabelCount; + DebugUtilsLabelEXT* pCmdBufLabels; + uint32_t objectCount; + DebugUtilsObjectNameInfoEXT* pObjects; + }; + static_assert( sizeof( DebugUtilsMessengerCallbackDataEXT ) == sizeof( VkDebugUtilsMessengerCallbackDataEXT ), "struct and wrapper have different size!" ); + + enum class QueueFlagBits + { + eGraphics = VK_QUEUE_GRAPHICS_BIT, + eCompute = VK_QUEUE_COMPUTE_BIT, + eTransfer = VK_QUEUE_TRANSFER_BIT, + eSparseBinding = VK_QUEUE_SPARSE_BINDING_BIT, + eProtected = VK_QUEUE_PROTECTED_BIT + }; + + using QueueFlags = Flags; + + VULKAN_HPP_INLINE QueueFlags operator|( QueueFlagBits bit0, QueueFlagBits bit1 ) + { + return QueueFlags( bit0 ) | bit1; + } + + VULKAN_HPP_INLINE QueueFlags operator~( QueueFlagBits bits ) + { + return ~( QueueFlags( bits ) ); + } + + template <> struct FlagTraits + { + enum + { + allFlags = VkFlags(QueueFlagBits::eGraphics) | VkFlags(QueueFlagBits::eCompute) | VkFlags(QueueFlagBits::eTransfer) | VkFlags(QueueFlagBits::eSparseBinding) | VkFlags(QueueFlagBits::eProtected) + }; + }; + + struct QueueFamilyProperties + { + operator VkQueueFamilyProperties const&() const + { + return *reinterpret_cast(this); + } + + operator VkQueueFamilyProperties &() + { + return *reinterpret_cast(this); + } + + bool operator==( QueueFamilyProperties const& rhs ) const + { + return ( queueFlags == rhs.queueFlags ) + && ( queueCount == rhs.queueCount ) + && ( timestampValidBits == rhs.timestampValidBits ) + && ( minImageTransferGranularity == rhs.minImageTransferGranularity ); + } + + bool operator!=( QueueFamilyProperties const& rhs ) const + { + return !operator==( rhs ); + } + + QueueFlags queueFlags; + uint32_t queueCount; + uint32_t timestampValidBits; + Extent3D minImageTransferGranularity; + }; + static_assert( sizeof( QueueFamilyProperties ) == sizeof( VkQueueFamilyProperties ), "struct and wrapper have different size!" ); + + struct QueueFamilyProperties2 + { + operator VkQueueFamilyProperties2 const&() const + { + return *reinterpret_cast(this); + } + + operator VkQueueFamilyProperties2 &() + { + return *reinterpret_cast(this); + } + + bool operator==( QueueFamilyProperties2 const& rhs ) const + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( queueFamilyProperties == rhs.queueFamilyProperties ); + } + + bool operator!=( QueueFamilyProperties2 const& rhs ) const + { + return !operator==( rhs ); + } + + private: + StructureType sType = StructureType::eQueueFamilyProperties2; + + public: + void* pNext = nullptr; + QueueFamilyProperties queueFamilyProperties; + }; + static_assert( sizeof( QueueFamilyProperties2 ) == sizeof( VkQueueFamilyProperties2 ), "struct and wrapper have different size!" ); + + using QueueFamilyProperties2KHR = QueueFamilyProperties2; + + enum class DeviceQueueCreateFlagBits + { + eProtected = VK_DEVICE_QUEUE_CREATE_PROTECTED_BIT + }; + + using DeviceQueueCreateFlags = Flags; + + VULKAN_HPP_INLINE DeviceQueueCreateFlags operator|( DeviceQueueCreateFlagBits bit0, DeviceQueueCreateFlagBits bit1 ) + { + return DeviceQueueCreateFlags( bit0 ) | bit1; + } + + VULKAN_HPP_INLINE DeviceQueueCreateFlags operator~( DeviceQueueCreateFlagBits bits ) + { + return ~( DeviceQueueCreateFlags( bits ) ); + } + + template <> struct FlagTraits + { + enum + { + allFlags = VkFlags(DeviceQueueCreateFlagBits::eProtected) + }; + }; + + struct DeviceQueueCreateInfo + { + DeviceQueueCreateInfo( DeviceQueueCreateFlags flags_ = DeviceQueueCreateFlags(), + uint32_t queueFamilyIndex_ = 0, + uint32_t queueCount_ = 0, + const float* pQueuePriorities_ = nullptr ) + : flags( flags_ ) + , queueFamilyIndex( queueFamilyIndex_ ) + , queueCount( queueCount_ ) + , pQueuePriorities( pQueuePriorities_ ) + { + } + + DeviceQueueCreateInfo( VkDeviceQueueCreateInfo const & rhs ) + { + memcpy( this, &rhs, sizeof( DeviceQueueCreateInfo ) ); + } + + DeviceQueueCreateInfo& operator=( VkDeviceQueueCreateInfo const & rhs ) + { + memcpy( this, &rhs, sizeof( DeviceQueueCreateInfo ) ); + return *this; + } + DeviceQueueCreateInfo& setPNext( const void* pNext_ ) + { + pNext = pNext_; + return *this; + } + + DeviceQueueCreateInfo& setFlags( DeviceQueueCreateFlags flags_ ) + { + flags = flags_; + return *this; + } + + DeviceQueueCreateInfo& setQueueFamilyIndex( uint32_t queueFamilyIndex_ ) + { + queueFamilyIndex = queueFamilyIndex_; + return *this; + } + + DeviceQueueCreateInfo& setQueueCount( uint32_t queueCount_ ) + { + queueCount = queueCount_; + return *this; + } + + DeviceQueueCreateInfo& setPQueuePriorities( const float* pQueuePriorities_ ) + { + pQueuePriorities = pQueuePriorities_; + return *this; + } + + operator VkDeviceQueueCreateInfo const&() const + { + return *reinterpret_cast(this); + } + + operator VkDeviceQueueCreateInfo &() + { + return *reinterpret_cast(this); + } + + bool operator==( DeviceQueueCreateInfo const& rhs ) const + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( flags == rhs.flags ) + && ( queueFamilyIndex == rhs.queueFamilyIndex ) + && ( queueCount == rhs.queueCount ) + && ( pQueuePriorities == rhs.pQueuePriorities ); + } + + bool operator!=( DeviceQueueCreateInfo const& rhs ) const + { + return !operator==( rhs ); + } + + private: + StructureType sType = StructureType::eDeviceQueueCreateInfo; + + public: + const void* pNext = nullptr; + DeviceQueueCreateFlags flags; + uint32_t queueFamilyIndex; + uint32_t queueCount; + const float* pQueuePriorities; + }; + static_assert( sizeof( DeviceQueueCreateInfo ) == sizeof( VkDeviceQueueCreateInfo ), "struct and wrapper have different size!" ); + + struct DeviceCreateInfo + { + DeviceCreateInfo( DeviceCreateFlags flags_ = DeviceCreateFlags(), + uint32_t queueCreateInfoCount_ = 0, + const DeviceQueueCreateInfo* pQueueCreateInfos_ = nullptr, + uint32_t enabledLayerCount_ = 0, + const char* const* ppEnabledLayerNames_ = nullptr, + uint32_t enabledExtensionCount_ = 0, + const char* const* ppEnabledExtensionNames_ = nullptr, + const PhysicalDeviceFeatures* pEnabledFeatures_ = nullptr ) + : flags( flags_ ) + , queueCreateInfoCount( queueCreateInfoCount_ ) + , pQueueCreateInfos( pQueueCreateInfos_ ) + , enabledLayerCount( enabledLayerCount_ ) + , ppEnabledLayerNames( ppEnabledLayerNames_ ) + , enabledExtensionCount( enabledExtensionCount_ ) + , ppEnabledExtensionNames( ppEnabledExtensionNames_ ) + , pEnabledFeatures( pEnabledFeatures_ ) + { + } + + DeviceCreateInfo( VkDeviceCreateInfo const & rhs ) + { + memcpy( this, &rhs, sizeof( DeviceCreateInfo ) ); + } + + DeviceCreateInfo& operator=( VkDeviceCreateInfo const & rhs ) + { + memcpy( this, &rhs, sizeof( DeviceCreateInfo ) ); + return *this; + } + DeviceCreateInfo& setPNext( const void* pNext_ ) + { + pNext = pNext_; + return *this; + } + + DeviceCreateInfo& setFlags( DeviceCreateFlags flags_ ) + { + flags = flags_; + return *this; + } + + DeviceCreateInfo& setQueueCreateInfoCount( uint32_t queueCreateInfoCount_ ) + { + queueCreateInfoCount = queueCreateInfoCount_; + return *this; + } + + DeviceCreateInfo& setPQueueCreateInfos( const DeviceQueueCreateInfo* pQueueCreateInfos_ ) + { + pQueueCreateInfos = pQueueCreateInfos_; + return *this; + } + + DeviceCreateInfo& setEnabledLayerCount( uint32_t enabledLayerCount_ ) + { + enabledLayerCount = enabledLayerCount_; + return *this; + } + + DeviceCreateInfo& setPpEnabledLayerNames( const char* const* ppEnabledLayerNames_ ) + { + ppEnabledLayerNames = ppEnabledLayerNames_; + return *this; + } + + DeviceCreateInfo& setEnabledExtensionCount( uint32_t enabledExtensionCount_ ) + { + enabledExtensionCount = enabledExtensionCount_; + return *this; + } + + DeviceCreateInfo& setPpEnabledExtensionNames( const char* const* ppEnabledExtensionNames_ ) + { + ppEnabledExtensionNames = ppEnabledExtensionNames_; + return *this; + } + + DeviceCreateInfo& setPEnabledFeatures( const PhysicalDeviceFeatures* pEnabledFeatures_ ) + { + pEnabledFeatures = pEnabledFeatures_; + return *this; + } + + operator VkDeviceCreateInfo const&() const + { + return *reinterpret_cast(this); + } + + operator VkDeviceCreateInfo &() + { + return *reinterpret_cast(this); + } + + bool operator==( DeviceCreateInfo const& rhs ) const + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( flags == rhs.flags ) + && ( queueCreateInfoCount == rhs.queueCreateInfoCount ) + && ( pQueueCreateInfos == rhs.pQueueCreateInfos ) + && ( enabledLayerCount == rhs.enabledLayerCount ) + && ( ppEnabledLayerNames == rhs.ppEnabledLayerNames ) + && ( enabledExtensionCount == rhs.enabledExtensionCount ) + && ( ppEnabledExtensionNames == rhs.ppEnabledExtensionNames ) + && ( pEnabledFeatures == rhs.pEnabledFeatures ); + } + + bool operator!=( DeviceCreateInfo const& rhs ) const + { + return !operator==( rhs ); + } + + private: + StructureType sType = StructureType::eDeviceCreateInfo; + + public: + const void* pNext = nullptr; + DeviceCreateFlags flags; + uint32_t queueCreateInfoCount; + const DeviceQueueCreateInfo* pQueueCreateInfos; + uint32_t enabledLayerCount; + const char* const* ppEnabledLayerNames; + uint32_t enabledExtensionCount; + const char* const* ppEnabledExtensionNames; + const PhysicalDeviceFeatures* pEnabledFeatures; + }; + static_assert( sizeof( DeviceCreateInfo ) == sizeof( VkDeviceCreateInfo ), "struct and wrapper have different size!" ); + + struct DeviceQueueInfo2 + { + DeviceQueueInfo2( DeviceQueueCreateFlags flags_ = DeviceQueueCreateFlags(), + uint32_t queueFamilyIndex_ = 0, + uint32_t queueIndex_ = 0 ) + : flags( flags_ ) + , queueFamilyIndex( queueFamilyIndex_ ) + , queueIndex( queueIndex_ ) + { + } + + DeviceQueueInfo2( VkDeviceQueueInfo2 const & rhs ) + { + memcpy( this, &rhs, sizeof( DeviceQueueInfo2 ) ); + } + + DeviceQueueInfo2& operator=( VkDeviceQueueInfo2 const & rhs ) + { + memcpy( this, &rhs, sizeof( DeviceQueueInfo2 ) ); + return *this; + } + DeviceQueueInfo2& setPNext( const void* pNext_ ) + { + pNext = pNext_; + return *this; + } + + DeviceQueueInfo2& setFlags( DeviceQueueCreateFlags flags_ ) + { + flags = flags_; + return *this; + } + + DeviceQueueInfo2& setQueueFamilyIndex( uint32_t queueFamilyIndex_ ) + { + queueFamilyIndex = queueFamilyIndex_; + return *this; + } + + DeviceQueueInfo2& setQueueIndex( uint32_t queueIndex_ ) + { + queueIndex = queueIndex_; + return *this; + } + + operator VkDeviceQueueInfo2 const&() const + { + return *reinterpret_cast(this); + } + + operator VkDeviceQueueInfo2 &() + { + return *reinterpret_cast(this); + } + + bool operator==( DeviceQueueInfo2 const& rhs ) const + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( flags == rhs.flags ) + && ( queueFamilyIndex == rhs.queueFamilyIndex ) + && ( queueIndex == rhs.queueIndex ); + } + + bool operator!=( DeviceQueueInfo2 const& rhs ) const + { + return !operator==( rhs ); + } + + private: + StructureType sType = StructureType::eDeviceQueueInfo2; + + public: + const void* pNext = nullptr; + DeviceQueueCreateFlags flags; + uint32_t queueFamilyIndex; + uint32_t queueIndex; + }; + static_assert( sizeof( DeviceQueueInfo2 ) == sizeof( VkDeviceQueueInfo2 ), "struct and wrapper have different size!" ); + + enum class MemoryPropertyFlagBits + { + eDeviceLocal = VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT, + eHostVisible = VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT, + eHostCoherent = VK_MEMORY_PROPERTY_HOST_COHERENT_BIT, + eHostCached = VK_MEMORY_PROPERTY_HOST_CACHED_BIT, + eLazilyAllocated = VK_MEMORY_PROPERTY_LAZILY_ALLOCATED_BIT, + eProtected = VK_MEMORY_PROPERTY_PROTECTED_BIT + }; + + using MemoryPropertyFlags = Flags; + + VULKAN_HPP_INLINE MemoryPropertyFlags operator|( MemoryPropertyFlagBits bit0, MemoryPropertyFlagBits bit1 ) + { + return MemoryPropertyFlags( bit0 ) | bit1; + } + + VULKAN_HPP_INLINE MemoryPropertyFlags operator~( MemoryPropertyFlagBits bits ) + { + return ~( MemoryPropertyFlags( bits ) ); + } + + template <> struct FlagTraits + { + enum + { + allFlags = VkFlags(MemoryPropertyFlagBits::eDeviceLocal) | VkFlags(MemoryPropertyFlagBits::eHostVisible) | VkFlags(MemoryPropertyFlagBits::eHostCoherent) | VkFlags(MemoryPropertyFlagBits::eHostCached) | VkFlags(MemoryPropertyFlagBits::eLazilyAllocated) | VkFlags(MemoryPropertyFlagBits::eProtected) + }; + }; + + struct MemoryType + { + operator VkMemoryType const&() const + { + return *reinterpret_cast(this); + } + + operator VkMemoryType &() + { + return *reinterpret_cast(this); + } + + bool operator==( MemoryType const& rhs ) const + { + return ( propertyFlags == rhs.propertyFlags ) + && ( heapIndex == rhs.heapIndex ); + } + + bool operator!=( MemoryType const& rhs ) const + { + return !operator==( rhs ); + } + + MemoryPropertyFlags propertyFlags; + uint32_t heapIndex; + }; + static_assert( sizeof( MemoryType ) == sizeof( VkMemoryType ), "struct and wrapper have different size!" ); + + enum class MemoryHeapFlagBits + { + eDeviceLocal = VK_MEMORY_HEAP_DEVICE_LOCAL_BIT, + eMultiInstance = VK_MEMORY_HEAP_MULTI_INSTANCE_BIT, + eMultiInstanceKHR = VK_MEMORY_HEAP_MULTI_INSTANCE_BIT + }; + + using MemoryHeapFlags = Flags; + + VULKAN_HPP_INLINE MemoryHeapFlags operator|( MemoryHeapFlagBits bit0, MemoryHeapFlagBits bit1 ) + { + return MemoryHeapFlags( bit0 ) | bit1; + } + + VULKAN_HPP_INLINE MemoryHeapFlags operator~( MemoryHeapFlagBits bits ) + { + return ~( MemoryHeapFlags( bits ) ); + } + + template <> struct FlagTraits + { + enum + { + allFlags = VkFlags(MemoryHeapFlagBits::eDeviceLocal) | VkFlags(MemoryHeapFlagBits::eMultiInstance) + }; + }; + + struct MemoryHeap + { + operator VkMemoryHeap const&() const + { + return *reinterpret_cast(this); + } + + operator VkMemoryHeap &() + { + return *reinterpret_cast(this); + } + + bool operator==( MemoryHeap const& rhs ) const + { + return ( size == rhs.size ) + && ( flags == rhs.flags ); + } + + bool operator!=( MemoryHeap const& rhs ) const + { + return !operator==( rhs ); + } + + DeviceSize size; + MemoryHeapFlags flags; + }; + static_assert( sizeof( MemoryHeap ) == sizeof( VkMemoryHeap ), "struct and wrapper have different size!" ); + + struct PhysicalDeviceMemoryProperties + { + operator VkPhysicalDeviceMemoryProperties const&() const + { + return *reinterpret_cast(this); + } + + operator VkPhysicalDeviceMemoryProperties &() + { + return *reinterpret_cast(this); + } + + bool operator==( PhysicalDeviceMemoryProperties const& rhs ) const + { + return ( memoryTypeCount == rhs.memoryTypeCount ) + && ( memcmp( memoryTypes, rhs.memoryTypes, VK_MAX_MEMORY_TYPES * sizeof( MemoryType ) ) == 0 ) + && ( memoryHeapCount == rhs.memoryHeapCount ) + && ( memcmp( memoryHeaps, rhs.memoryHeaps, VK_MAX_MEMORY_HEAPS * sizeof( MemoryHeap ) ) == 0 ); + } + + bool operator!=( PhysicalDeviceMemoryProperties const& rhs ) const + { + return !operator==( rhs ); + } + + uint32_t memoryTypeCount; + MemoryType memoryTypes[VK_MAX_MEMORY_TYPES]; + uint32_t memoryHeapCount; + MemoryHeap memoryHeaps[VK_MAX_MEMORY_HEAPS]; + }; + static_assert( sizeof( PhysicalDeviceMemoryProperties ) == sizeof( VkPhysicalDeviceMemoryProperties ), "struct and wrapper have different size!" ); + + struct PhysicalDeviceMemoryProperties2 + { + operator VkPhysicalDeviceMemoryProperties2 const&() const + { + return *reinterpret_cast(this); + } + + operator VkPhysicalDeviceMemoryProperties2 &() + { + return *reinterpret_cast(this); + } + + bool operator==( PhysicalDeviceMemoryProperties2 const& rhs ) const + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( memoryProperties == rhs.memoryProperties ); + } + + bool operator!=( PhysicalDeviceMemoryProperties2 const& rhs ) const + { + return !operator==( rhs ); + } + + private: + StructureType sType = StructureType::ePhysicalDeviceMemoryProperties2; + + public: + void* pNext = nullptr; + PhysicalDeviceMemoryProperties memoryProperties; + }; + static_assert( sizeof( PhysicalDeviceMemoryProperties2 ) == sizeof( VkPhysicalDeviceMemoryProperties2 ), "struct and wrapper have different size!" ); + + using PhysicalDeviceMemoryProperties2KHR = PhysicalDeviceMemoryProperties2; + + enum class AccessFlagBits + { + eIndirectCommandRead = VK_ACCESS_INDIRECT_COMMAND_READ_BIT, + eIndexRead = VK_ACCESS_INDEX_READ_BIT, + eVertexAttributeRead = VK_ACCESS_VERTEX_ATTRIBUTE_READ_BIT, + eUniformRead = VK_ACCESS_UNIFORM_READ_BIT, + eInputAttachmentRead = VK_ACCESS_INPUT_ATTACHMENT_READ_BIT, + eShaderRead = VK_ACCESS_SHADER_READ_BIT, + eShaderWrite = VK_ACCESS_SHADER_WRITE_BIT, + eColorAttachmentRead = VK_ACCESS_COLOR_ATTACHMENT_READ_BIT, + eColorAttachmentWrite = VK_ACCESS_COLOR_ATTACHMENT_WRITE_BIT, + eDepthStencilAttachmentRead = VK_ACCESS_DEPTH_STENCIL_ATTACHMENT_READ_BIT, + eDepthStencilAttachmentWrite = VK_ACCESS_DEPTH_STENCIL_ATTACHMENT_WRITE_BIT, + eTransferRead = VK_ACCESS_TRANSFER_READ_BIT, + eTransferWrite = VK_ACCESS_TRANSFER_WRITE_BIT, + eHostRead = VK_ACCESS_HOST_READ_BIT, + eHostWrite = VK_ACCESS_HOST_WRITE_BIT, + eMemoryRead = VK_ACCESS_MEMORY_READ_BIT, + eMemoryWrite = VK_ACCESS_MEMORY_WRITE_BIT, + eTransformFeedbackWriteEXT = VK_ACCESS_TRANSFORM_FEEDBACK_WRITE_BIT_EXT, + eTransformFeedbackCounterReadEXT = VK_ACCESS_TRANSFORM_FEEDBACK_COUNTER_READ_BIT_EXT, + eTransformFeedbackCounterWriteEXT = VK_ACCESS_TRANSFORM_FEEDBACK_COUNTER_WRITE_BIT_EXT, + eConditionalRenderingReadEXT = VK_ACCESS_CONDITIONAL_RENDERING_READ_BIT_EXT, + eCommandProcessReadNVX = VK_ACCESS_COMMAND_PROCESS_READ_BIT_NVX, + eCommandProcessWriteNVX = VK_ACCESS_COMMAND_PROCESS_WRITE_BIT_NVX, + eColorAttachmentReadNoncoherentEXT = VK_ACCESS_COLOR_ATTACHMENT_READ_NONCOHERENT_BIT_EXT, + eShadingRateImageReadNV = VK_ACCESS_SHADING_RATE_IMAGE_READ_BIT_NV, + eAccelerationStructureReadNV = VK_ACCESS_ACCELERATION_STRUCTURE_READ_BIT_NV, + eAccelerationStructureWriteNV = VK_ACCESS_ACCELERATION_STRUCTURE_WRITE_BIT_NV + }; + + using AccessFlags = Flags; + + VULKAN_HPP_INLINE AccessFlags operator|( AccessFlagBits bit0, AccessFlagBits bit1 ) + { + return AccessFlags( bit0 ) | bit1; + } + + VULKAN_HPP_INLINE AccessFlags operator~( AccessFlagBits bits ) + { + return ~( AccessFlags( bits ) ); + } + + template <> struct FlagTraits + { + enum + { + allFlags = VkFlags(AccessFlagBits::eIndirectCommandRead) | VkFlags(AccessFlagBits::eIndexRead) | VkFlags(AccessFlagBits::eVertexAttributeRead) | VkFlags(AccessFlagBits::eUniformRead) | VkFlags(AccessFlagBits::eInputAttachmentRead) | VkFlags(AccessFlagBits::eShaderRead) | VkFlags(AccessFlagBits::eShaderWrite) | VkFlags(AccessFlagBits::eColorAttachmentRead) | VkFlags(AccessFlagBits::eColorAttachmentWrite) | VkFlags(AccessFlagBits::eDepthStencilAttachmentRead) | VkFlags(AccessFlagBits::eDepthStencilAttachmentWrite) | VkFlags(AccessFlagBits::eTransferRead) | VkFlags(AccessFlagBits::eTransferWrite) | VkFlags(AccessFlagBits::eHostRead) | VkFlags(AccessFlagBits::eHostWrite) | VkFlags(AccessFlagBits::eMemoryRead) | VkFlags(AccessFlagBits::eMemoryWrite) | VkFlags(AccessFlagBits::eTransformFeedbackWriteEXT) | VkFlags(AccessFlagBits::eTransformFeedbackCounterReadEXT) | VkFlags(AccessFlagBits::eTransformFeedbackCounterWriteEXT) | VkFlags(AccessFlagBits::eConditionalRenderingReadEXT) | VkFlags(AccessFlagBits::eCommandProcessReadNVX) | VkFlags(AccessFlagBits::eCommandProcessWriteNVX) | VkFlags(AccessFlagBits::eColorAttachmentReadNoncoherentEXT) | VkFlags(AccessFlagBits::eShadingRateImageReadNV) | VkFlags(AccessFlagBits::eAccelerationStructureReadNV) | VkFlags(AccessFlagBits::eAccelerationStructureWriteNV) + }; + }; + + struct MemoryBarrier + { + MemoryBarrier( AccessFlags srcAccessMask_ = AccessFlags(), + AccessFlags dstAccessMask_ = AccessFlags() ) + : srcAccessMask( srcAccessMask_ ) + , dstAccessMask( dstAccessMask_ ) + { + } + + MemoryBarrier( VkMemoryBarrier const & rhs ) + { + memcpy( this, &rhs, sizeof( MemoryBarrier ) ); + } + + MemoryBarrier& operator=( VkMemoryBarrier const & rhs ) + { + memcpy( this, &rhs, sizeof( MemoryBarrier ) ); + return *this; + } + MemoryBarrier& setPNext( const void* pNext_ ) + { + pNext = pNext_; + return *this; + } + + MemoryBarrier& setSrcAccessMask( AccessFlags srcAccessMask_ ) + { + srcAccessMask = srcAccessMask_; + return *this; + } + + MemoryBarrier& setDstAccessMask( AccessFlags dstAccessMask_ ) + { + dstAccessMask = dstAccessMask_; + return *this; + } + + operator VkMemoryBarrier const&() const + { + return *reinterpret_cast(this); + } + + operator VkMemoryBarrier &() + { + return *reinterpret_cast(this); + } + + bool operator==( MemoryBarrier const& rhs ) const + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( srcAccessMask == rhs.srcAccessMask ) + && ( dstAccessMask == rhs.dstAccessMask ); + } + + bool operator!=( MemoryBarrier const& rhs ) const + { + return !operator==( rhs ); + } + + private: + StructureType sType = StructureType::eMemoryBarrier; + + public: + const void* pNext = nullptr; + AccessFlags srcAccessMask; + AccessFlags dstAccessMask; + }; + static_assert( sizeof( MemoryBarrier ) == sizeof( VkMemoryBarrier ), "struct and wrapper have different size!" ); + + struct BufferMemoryBarrier + { + BufferMemoryBarrier( AccessFlags srcAccessMask_ = AccessFlags(), + AccessFlags dstAccessMask_ = AccessFlags(), + uint32_t srcQueueFamilyIndex_ = 0, + uint32_t dstQueueFamilyIndex_ = 0, + Buffer buffer_ = Buffer(), + DeviceSize offset_ = 0, + DeviceSize size_ = 0 ) + : srcAccessMask( srcAccessMask_ ) + , dstAccessMask( dstAccessMask_ ) + , srcQueueFamilyIndex( srcQueueFamilyIndex_ ) + , dstQueueFamilyIndex( dstQueueFamilyIndex_ ) + , buffer( buffer_ ) + , offset( offset_ ) + , size( size_ ) + { + } + + BufferMemoryBarrier( VkBufferMemoryBarrier const & rhs ) + { + memcpy( this, &rhs, sizeof( BufferMemoryBarrier ) ); + } + + BufferMemoryBarrier& operator=( VkBufferMemoryBarrier const & rhs ) + { + memcpy( this, &rhs, sizeof( BufferMemoryBarrier ) ); + return *this; + } + BufferMemoryBarrier& setPNext( const void* pNext_ ) + { + pNext = pNext_; + return *this; + } + + BufferMemoryBarrier& setSrcAccessMask( AccessFlags srcAccessMask_ ) + { + srcAccessMask = srcAccessMask_; + return *this; + } + + BufferMemoryBarrier& setDstAccessMask( AccessFlags dstAccessMask_ ) + { + dstAccessMask = dstAccessMask_; + return *this; + } + + BufferMemoryBarrier& setSrcQueueFamilyIndex( uint32_t srcQueueFamilyIndex_ ) + { + srcQueueFamilyIndex = srcQueueFamilyIndex_; + return *this; + } + + BufferMemoryBarrier& setDstQueueFamilyIndex( uint32_t dstQueueFamilyIndex_ ) + { + dstQueueFamilyIndex = dstQueueFamilyIndex_; + return *this; + } + + BufferMemoryBarrier& setBuffer( Buffer buffer_ ) + { + buffer = buffer_; + return *this; + } + + BufferMemoryBarrier& setOffset( DeviceSize offset_ ) + { + offset = offset_; + return *this; + } + + BufferMemoryBarrier& setSize( DeviceSize size_ ) + { + size = size_; + return *this; + } + + operator VkBufferMemoryBarrier const&() const + { + return *reinterpret_cast(this); + } + + operator VkBufferMemoryBarrier &() + { + return *reinterpret_cast(this); + } + + bool operator==( BufferMemoryBarrier const& rhs ) const + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( srcAccessMask == rhs.srcAccessMask ) + && ( dstAccessMask == rhs.dstAccessMask ) + && ( srcQueueFamilyIndex == rhs.srcQueueFamilyIndex ) + && ( dstQueueFamilyIndex == rhs.dstQueueFamilyIndex ) + && ( buffer == rhs.buffer ) + && ( offset == rhs.offset ) + && ( size == rhs.size ); + } + + bool operator!=( BufferMemoryBarrier const& rhs ) const + { + return !operator==( rhs ); + } + + private: + StructureType sType = StructureType::eBufferMemoryBarrier; + + public: + const void* pNext = nullptr; + AccessFlags srcAccessMask; + AccessFlags dstAccessMask; + uint32_t srcQueueFamilyIndex; + uint32_t dstQueueFamilyIndex; + Buffer buffer; + DeviceSize offset; + DeviceSize size; + }; + static_assert( sizeof( BufferMemoryBarrier ) == sizeof( VkBufferMemoryBarrier ), "struct and wrapper have different size!" ); + + enum class BufferUsageFlagBits + { + eTransferSrc = VK_BUFFER_USAGE_TRANSFER_SRC_BIT, + eTransferDst = VK_BUFFER_USAGE_TRANSFER_DST_BIT, + eUniformTexelBuffer = VK_BUFFER_USAGE_UNIFORM_TEXEL_BUFFER_BIT, + eStorageTexelBuffer = VK_BUFFER_USAGE_STORAGE_TEXEL_BUFFER_BIT, + eUniformBuffer = VK_BUFFER_USAGE_UNIFORM_BUFFER_BIT, + eStorageBuffer = VK_BUFFER_USAGE_STORAGE_BUFFER_BIT, + eIndexBuffer = VK_BUFFER_USAGE_INDEX_BUFFER_BIT, + eVertexBuffer = VK_BUFFER_USAGE_VERTEX_BUFFER_BIT, + eIndirectBuffer = VK_BUFFER_USAGE_INDIRECT_BUFFER_BIT, + eTransformFeedbackBufferEXT = VK_BUFFER_USAGE_TRANSFORM_FEEDBACK_BUFFER_BIT_EXT, + eTransformFeedbackCounterBufferEXT = VK_BUFFER_USAGE_TRANSFORM_FEEDBACK_COUNTER_BUFFER_BIT_EXT, + eConditionalRenderingEXT = VK_BUFFER_USAGE_CONDITIONAL_RENDERING_BIT_EXT, + eRayTracingNV = VK_BUFFER_USAGE_RAY_TRACING_BIT_NV + }; + + using BufferUsageFlags = Flags; + + VULKAN_HPP_INLINE BufferUsageFlags operator|( BufferUsageFlagBits bit0, BufferUsageFlagBits bit1 ) + { + return BufferUsageFlags( bit0 ) | bit1; + } + + VULKAN_HPP_INLINE BufferUsageFlags operator~( BufferUsageFlagBits bits ) + { + return ~( BufferUsageFlags( bits ) ); + } + + template <> struct FlagTraits + { + enum + { + allFlags = VkFlags(BufferUsageFlagBits::eTransferSrc) | VkFlags(BufferUsageFlagBits::eTransferDst) | VkFlags(BufferUsageFlagBits::eUniformTexelBuffer) | VkFlags(BufferUsageFlagBits::eStorageTexelBuffer) | VkFlags(BufferUsageFlagBits::eUniformBuffer) | VkFlags(BufferUsageFlagBits::eStorageBuffer) | VkFlags(BufferUsageFlagBits::eIndexBuffer) | VkFlags(BufferUsageFlagBits::eVertexBuffer) | VkFlags(BufferUsageFlagBits::eIndirectBuffer) | VkFlags(BufferUsageFlagBits::eTransformFeedbackBufferEXT) | VkFlags(BufferUsageFlagBits::eTransformFeedbackCounterBufferEXT) | VkFlags(BufferUsageFlagBits::eConditionalRenderingEXT) | VkFlags(BufferUsageFlagBits::eRayTracingNV) + }; + }; + + enum class BufferCreateFlagBits + { + eSparseBinding = VK_BUFFER_CREATE_SPARSE_BINDING_BIT, + eSparseResidency = VK_BUFFER_CREATE_SPARSE_RESIDENCY_BIT, + eSparseAliased = VK_BUFFER_CREATE_SPARSE_ALIASED_BIT, + eProtected = VK_BUFFER_CREATE_PROTECTED_BIT + }; + + using BufferCreateFlags = Flags; + + VULKAN_HPP_INLINE BufferCreateFlags operator|( BufferCreateFlagBits bit0, BufferCreateFlagBits bit1 ) + { + return BufferCreateFlags( bit0 ) | bit1; + } + + VULKAN_HPP_INLINE BufferCreateFlags operator~( BufferCreateFlagBits bits ) + { + return ~( BufferCreateFlags( bits ) ); + } + + template <> struct FlagTraits + { + enum + { + allFlags = VkFlags(BufferCreateFlagBits::eSparseBinding) | VkFlags(BufferCreateFlagBits::eSparseResidency) | VkFlags(BufferCreateFlagBits::eSparseAliased) | VkFlags(BufferCreateFlagBits::eProtected) + }; + }; + + struct BufferCreateInfo + { + BufferCreateInfo( BufferCreateFlags flags_ = BufferCreateFlags(), + DeviceSize size_ = 0, + BufferUsageFlags usage_ = BufferUsageFlags(), + SharingMode sharingMode_ = SharingMode::eExclusive, + uint32_t queueFamilyIndexCount_ = 0, + const uint32_t* pQueueFamilyIndices_ = nullptr ) + : flags( flags_ ) + , size( size_ ) + , usage( usage_ ) + , sharingMode( sharingMode_ ) + , queueFamilyIndexCount( queueFamilyIndexCount_ ) + , pQueueFamilyIndices( pQueueFamilyIndices_ ) + { + } + + BufferCreateInfo( VkBufferCreateInfo const & rhs ) + { + memcpy( this, &rhs, sizeof( BufferCreateInfo ) ); + } + + BufferCreateInfo& operator=( VkBufferCreateInfo const & rhs ) + { + memcpy( this, &rhs, sizeof( BufferCreateInfo ) ); + return *this; + } + BufferCreateInfo& setPNext( const void* pNext_ ) + { + pNext = pNext_; + return *this; + } + + BufferCreateInfo& setFlags( BufferCreateFlags flags_ ) + { + flags = flags_; + return *this; + } + + BufferCreateInfo& setSize( DeviceSize size_ ) + { + size = size_; + return *this; + } + + BufferCreateInfo& setUsage( BufferUsageFlags usage_ ) + { + usage = usage_; + return *this; + } + + BufferCreateInfo& setSharingMode( SharingMode sharingMode_ ) + { + sharingMode = sharingMode_; + return *this; + } + + BufferCreateInfo& setQueueFamilyIndexCount( uint32_t queueFamilyIndexCount_ ) + { + queueFamilyIndexCount = queueFamilyIndexCount_; + return *this; + } + + BufferCreateInfo& setPQueueFamilyIndices( const uint32_t* pQueueFamilyIndices_ ) + { + pQueueFamilyIndices = pQueueFamilyIndices_; + return *this; + } + + operator VkBufferCreateInfo const&() const + { + return *reinterpret_cast(this); + } + + operator VkBufferCreateInfo &() + { + return *reinterpret_cast(this); + } + + bool operator==( BufferCreateInfo const& rhs ) const + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( flags == rhs.flags ) + && ( size == rhs.size ) + && ( usage == rhs.usage ) + && ( sharingMode == rhs.sharingMode ) + && ( queueFamilyIndexCount == rhs.queueFamilyIndexCount ) + && ( pQueueFamilyIndices == rhs.pQueueFamilyIndices ); + } + + bool operator!=( BufferCreateInfo const& rhs ) const + { + return !operator==( rhs ); + } + + private: + StructureType sType = StructureType::eBufferCreateInfo; + + public: + const void* pNext = nullptr; + BufferCreateFlags flags; + DeviceSize size; + BufferUsageFlags usage; + SharingMode sharingMode; + uint32_t queueFamilyIndexCount; + const uint32_t* pQueueFamilyIndices; + }; + static_assert( sizeof( BufferCreateInfo ) == sizeof( VkBufferCreateInfo ), "struct and wrapper have different size!" ); + + enum class ShaderStageFlagBits + { + eVertex = VK_SHADER_STAGE_VERTEX_BIT, + eTessellationControl = VK_SHADER_STAGE_TESSELLATION_CONTROL_BIT, + eTessellationEvaluation = VK_SHADER_STAGE_TESSELLATION_EVALUATION_BIT, + eGeometry = VK_SHADER_STAGE_GEOMETRY_BIT, + eFragment = VK_SHADER_STAGE_FRAGMENT_BIT, + eCompute = VK_SHADER_STAGE_COMPUTE_BIT, + eAllGraphics = VK_SHADER_STAGE_ALL_GRAPHICS, + eAll = VK_SHADER_STAGE_ALL, + eRaygenNV = VK_SHADER_STAGE_RAYGEN_BIT_NV, + eAnyHitNV = VK_SHADER_STAGE_ANY_HIT_BIT_NV, + eClosestHitNV = VK_SHADER_STAGE_CLOSEST_HIT_BIT_NV, + eMissNV = VK_SHADER_STAGE_MISS_BIT_NV, + eIntersectionNV = VK_SHADER_STAGE_INTERSECTION_BIT_NV, + eCallableNV = VK_SHADER_STAGE_CALLABLE_BIT_NV, + eTaskNV = VK_SHADER_STAGE_TASK_BIT_NV, + eMeshNV = VK_SHADER_STAGE_MESH_BIT_NV + }; + + using ShaderStageFlags = Flags; + + VULKAN_HPP_INLINE ShaderStageFlags operator|( ShaderStageFlagBits bit0, ShaderStageFlagBits bit1 ) + { + return ShaderStageFlags( bit0 ) | bit1; + } + + VULKAN_HPP_INLINE ShaderStageFlags operator~( ShaderStageFlagBits bits ) + { + return ~( ShaderStageFlags( bits ) ); + } + + template <> struct FlagTraits + { + enum + { + allFlags = VkFlags(ShaderStageFlagBits::eVertex) | VkFlags(ShaderStageFlagBits::eTessellationControl) | VkFlags(ShaderStageFlagBits::eTessellationEvaluation) | VkFlags(ShaderStageFlagBits::eGeometry) | VkFlags(ShaderStageFlagBits::eFragment) | VkFlags(ShaderStageFlagBits::eCompute) | VkFlags(ShaderStageFlagBits::eAllGraphics) | VkFlags(ShaderStageFlagBits::eAll) | VkFlags(ShaderStageFlagBits::eRaygenNV) | VkFlags(ShaderStageFlagBits::eAnyHitNV) | VkFlags(ShaderStageFlagBits::eClosestHitNV) | VkFlags(ShaderStageFlagBits::eMissNV) | VkFlags(ShaderStageFlagBits::eIntersectionNV) | VkFlags(ShaderStageFlagBits::eCallableNV) | VkFlags(ShaderStageFlagBits::eTaskNV) | VkFlags(ShaderStageFlagBits::eMeshNV) + }; + }; + + struct DescriptorSetLayoutBinding + { + DescriptorSetLayoutBinding( uint32_t binding_ = 0, + DescriptorType descriptorType_ = DescriptorType::eSampler, + uint32_t descriptorCount_ = 0, + ShaderStageFlags stageFlags_ = ShaderStageFlags(), + const Sampler* pImmutableSamplers_ = nullptr ) + : binding( binding_ ) + , descriptorType( descriptorType_ ) + , descriptorCount( descriptorCount_ ) + , stageFlags( stageFlags_ ) + , pImmutableSamplers( pImmutableSamplers_ ) + { + } + + DescriptorSetLayoutBinding( VkDescriptorSetLayoutBinding const & rhs ) + { + memcpy( this, &rhs, sizeof( DescriptorSetLayoutBinding ) ); + } + + DescriptorSetLayoutBinding& operator=( VkDescriptorSetLayoutBinding const & rhs ) + { + memcpy( this, &rhs, sizeof( DescriptorSetLayoutBinding ) ); + return *this; + } + DescriptorSetLayoutBinding& setBinding( uint32_t binding_ ) + { + binding = binding_; + return *this; + } + + DescriptorSetLayoutBinding& setDescriptorType( DescriptorType descriptorType_ ) + { + descriptorType = descriptorType_; + return *this; + } + + DescriptorSetLayoutBinding& setDescriptorCount( uint32_t descriptorCount_ ) + { + descriptorCount = descriptorCount_; + return *this; + } + + DescriptorSetLayoutBinding& setStageFlags( ShaderStageFlags stageFlags_ ) + { + stageFlags = stageFlags_; + return *this; + } + + DescriptorSetLayoutBinding& setPImmutableSamplers( const Sampler* pImmutableSamplers_ ) + { + pImmutableSamplers = pImmutableSamplers_; + return *this; + } + + operator VkDescriptorSetLayoutBinding const&() const + { + return *reinterpret_cast(this); + } + + operator VkDescriptorSetLayoutBinding &() + { + return *reinterpret_cast(this); + } + + bool operator==( DescriptorSetLayoutBinding const& rhs ) const + { + return ( binding == rhs.binding ) + && ( descriptorType == rhs.descriptorType ) + && ( descriptorCount == rhs.descriptorCount ) + && ( stageFlags == rhs.stageFlags ) + && ( pImmutableSamplers == rhs.pImmutableSamplers ); + } + + bool operator!=( DescriptorSetLayoutBinding const& rhs ) const + { + return !operator==( rhs ); + } + + uint32_t binding; + DescriptorType descriptorType; + uint32_t descriptorCount; + ShaderStageFlags stageFlags; + const Sampler* pImmutableSamplers; + }; + static_assert( sizeof( DescriptorSetLayoutBinding ) == sizeof( VkDescriptorSetLayoutBinding ), "struct and wrapper have different size!" ); + + struct PipelineShaderStageCreateInfo + { + PipelineShaderStageCreateInfo( PipelineShaderStageCreateFlags flags_ = PipelineShaderStageCreateFlags(), + ShaderStageFlagBits stage_ = ShaderStageFlagBits::eVertex, + ShaderModule module_ = ShaderModule(), + const char* pName_ = nullptr, + const SpecializationInfo* pSpecializationInfo_ = nullptr ) + : flags( flags_ ) + , stage( stage_ ) + , module( module_ ) + , pName( pName_ ) + , pSpecializationInfo( pSpecializationInfo_ ) + { + } + + PipelineShaderStageCreateInfo( VkPipelineShaderStageCreateInfo const & rhs ) + { + memcpy( this, &rhs, sizeof( PipelineShaderStageCreateInfo ) ); + } + + PipelineShaderStageCreateInfo& operator=( VkPipelineShaderStageCreateInfo const & rhs ) + { + memcpy( this, &rhs, sizeof( PipelineShaderStageCreateInfo ) ); + return *this; + } + PipelineShaderStageCreateInfo& setPNext( const void* pNext_ ) + { + pNext = pNext_; + return *this; + } + + PipelineShaderStageCreateInfo& setFlags( PipelineShaderStageCreateFlags flags_ ) + { + flags = flags_; + return *this; + } + + PipelineShaderStageCreateInfo& setStage( ShaderStageFlagBits stage_ ) + { + stage = stage_; + return *this; + } + + PipelineShaderStageCreateInfo& setModule( ShaderModule module_ ) + { + module = module_; + return *this; + } + + PipelineShaderStageCreateInfo& setPName( const char* pName_ ) + { + pName = pName_; + return *this; + } + + PipelineShaderStageCreateInfo& setPSpecializationInfo( const SpecializationInfo* pSpecializationInfo_ ) + { + pSpecializationInfo = pSpecializationInfo_; + return *this; + } + + operator VkPipelineShaderStageCreateInfo const&() const + { + return *reinterpret_cast(this); + } + + operator VkPipelineShaderStageCreateInfo &() + { + return *reinterpret_cast(this); + } + + bool operator==( PipelineShaderStageCreateInfo const& rhs ) const + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( flags == rhs.flags ) + && ( stage == rhs.stage ) + && ( module == rhs.module ) + && ( pName == rhs.pName ) + && ( pSpecializationInfo == rhs.pSpecializationInfo ); + } + + bool operator!=( PipelineShaderStageCreateInfo const& rhs ) const + { + return !operator==( rhs ); + } + + private: + StructureType sType = StructureType::ePipelineShaderStageCreateInfo; + + public: + const void* pNext = nullptr; + PipelineShaderStageCreateFlags flags; + ShaderStageFlagBits stage; + ShaderModule module; + const char* pName; + const SpecializationInfo* pSpecializationInfo; + }; + static_assert( sizeof( PipelineShaderStageCreateInfo ) == sizeof( VkPipelineShaderStageCreateInfo ), "struct and wrapper have different size!" ); + + struct PushConstantRange + { + PushConstantRange( ShaderStageFlags stageFlags_ = ShaderStageFlags(), + uint32_t offset_ = 0, + uint32_t size_ = 0 ) + : stageFlags( stageFlags_ ) + , offset( offset_ ) + , size( size_ ) + { + } + + PushConstantRange( VkPushConstantRange const & rhs ) + { + memcpy( this, &rhs, sizeof( PushConstantRange ) ); + } + + PushConstantRange& operator=( VkPushConstantRange const & rhs ) + { + memcpy( this, &rhs, sizeof( PushConstantRange ) ); + return *this; + } + PushConstantRange& setStageFlags( ShaderStageFlags stageFlags_ ) + { + stageFlags = stageFlags_; + return *this; + } + + PushConstantRange& setOffset( uint32_t offset_ ) + { + offset = offset_; + return *this; + } + + PushConstantRange& setSize( uint32_t size_ ) + { + size = size_; + return *this; + } + + operator VkPushConstantRange const&() const + { + return *reinterpret_cast(this); + } + + operator VkPushConstantRange &() + { + return *reinterpret_cast(this); + } + + bool operator==( PushConstantRange const& rhs ) const + { + return ( stageFlags == rhs.stageFlags ) + && ( offset == rhs.offset ) + && ( size == rhs.size ); + } + + bool operator!=( PushConstantRange const& rhs ) const + { + return !operator==( rhs ); + } + + ShaderStageFlags stageFlags; + uint32_t offset; + uint32_t size; + }; + static_assert( sizeof( PushConstantRange ) == sizeof( VkPushConstantRange ), "struct and wrapper have different size!" ); + + struct PipelineLayoutCreateInfo + { + PipelineLayoutCreateInfo( PipelineLayoutCreateFlags flags_ = PipelineLayoutCreateFlags(), + uint32_t setLayoutCount_ = 0, + const DescriptorSetLayout* pSetLayouts_ = nullptr, + uint32_t pushConstantRangeCount_ = 0, + const PushConstantRange* pPushConstantRanges_ = nullptr ) + : flags( flags_ ) + , setLayoutCount( setLayoutCount_ ) + , pSetLayouts( pSetLayouts_ ) + , pushConstantRangeCount( pushConstantRangeCount_ ) + , pPushConstantRanges( pPushConstantRanges_ ) + { + } + + PipelineLayoutCreateInfo( VkPipelineLayoutCreateInfo const & rhs ) + { + memcpy( this, &rhs, sizeof( PipelineLayoutCreateInfo ) ); + } + + PipelineLayoutCreateInfo& operator=( VkPipelineLayoutCreateInfo const & rhs ) + { + memcpy( this, &rhs, sizeof( PipelineLayoutCreateInfo ) ); + return *this; + } + PipelineLayoutCreateInfo& setPNext( const void* pNext_ ) + { + pNext = pNext_; + return *this; + } + + PipelineLayoutCreateInfo& setFlags( PipelineLayoutCreateFlags flags_ ) + { + flags = flags_; + return *this; + } + + PipelineLayoutCreateInfo& setSetLayoutCount( uint32_t setLayoutCount_ ) + { + setLayoutCount = setLayoutCount_; + return *this; + } + + PipelineLayoutCreateInfo& setPSetLayouts( const DescriptorSetLayout* pSetLayouts_ ) + { + pSetLayouts = pSetLayouts_; + return *this; + } + + PipelineLayoutCreateInfo& setPushConstantRangeCount( uint32_t pushConstantRangeCount_ ) + { + pushConstantRangeCount = pushConstantRangeCount_; + return *this; + } + + PipelineLayoutCreateInfo& setPPushConstantRanges( const PushConstantRange* pPushConstantRanges_ ) + { + pPushConstantRanges = pPushConstantRanges_; + return *this; + } + + operator VkPipelineLayoutCreateInfo const&() const + { + return *reinterpret_cast(this); + } + + operator VkPipelineLayoutCreateInfo &() + { + return *reinterpret_cast(this); + } + + bool operator==( PipelineLayoutCreateInfo const& rhs ) const + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( flags == rhs.flags ) + && ( setLayoutCount == rhs.setLayoutCount ) + && ( pSetLayouts == rhs.pSetLayouts ) + && ( pushConstantRangeCount == rhs.pushConstantRangeCount ) + && ( pPushConstantRanges == rhs.pPushConstantRanges ); + } + + bool operator!=( PipelineLayoutCreateInfo const& rhs ) const + { + return !operator==( rhs ); + } + + private: + StructureType sType = StructureType::ePipelineLayoutCreateInfo; + + public: + const void* pNext = nullptr; + PipelineLayoutCreateFlags flags; + uint32_t setLayoutCount; + const DescriptorSetLayout* pSetLayouts; + uint32_t pushConstantRangeCount; + const PushConstantRange* pPushConstantRanges; + }; + static_assert( sizeof( PipelineLayoutCreateInfo ) == sizeof( VkPipelineLayoutCreateInfo ), "struct and wrapper have different size!" ); + + struct ShaderStatisticsInfoAMD + { + operator VkShaderStatisticsInfoAMD const&() const + { + return *reinterpret_cast(this); + } + + operator VkShaderStatisticsInfoAMD &() + { + return *reinterpret_cast(this); + } + + bool operator==( ShaderStatisticsInfoAMD const& rhs ) const + { + return ( shaderStageMask == rhs.shaderStageMask ) + && ( resourceUsage == rhs.resourceUsage ) + && ( numPhysicalVgprs == rhs.numPhysicalVgprs ) + && ( numPhysicalSgprs == rhs.numPhysicalSgprs ) + && ( numAvailableVgprs == rhs.numAvailableVgprs ) + && ( numAvailableSgprs == rhs.numAvailableSgprs ) + && ( memcmp( computeWorkGroupSize, rhs.computeWorkGroupSize, 3 * sizeof( uint32_t ) ) == 0 ); + } + + bool operator!=( ShaderStatisticsInfoAMD const& rhs ) const + { + return !operator==( rhs ); + } + + ShaderStageFlags shaderStageMask; + ShaderResourceUsageAMD resourceUsage; + uint32_t numPhysicalVgprs; + uint32_t numPhysicalSgprs; + uint32_t numAvailableVgprs; + uint32_t numAvailableSgprs; + uint32_t computeWorkGroupSize[3]; + }; + static_assert( sizeof( ShaderStatisticsInfoAMD ) == sizeof( VkShaderStatisticsInfoAMD ), "struct and wrapper have different size!" ); + + enum class ImageUsageFlagBits + { + eTransferSrc = VK_IMAGE_USAGE_TRANSFER_SRC_BIT, + eTransferDst = VK_IMAGE_USAGE_TRANSFER_DST_BIT, + eSampled = VK_IMAGE_USAGE_SAMPLED_BIT, + eStorage = VK_IMAGE_USAGE_STORAGE_BIT, + eColorAttachment = VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT, + eDepthStencilAttachment = VK_IMAGE_USAGE_DEPTH_STENCIL_ATTACHMENT_BIT, + eTransientAttachment = VK_IMAGE_USAGE_TRANSIENT_ATTACHMENT_BIT, + eInputAttachment = VK_IMAGE_USAGE_INPUT_ATTACHMENT_BIT, + eShadingRateImageNV = VK_IMAGE_USAGE_SHADING_RATE_IMAGE_BIT_NV + }; + + using ImageUsageFlags = Flags; + + VULKAN_HPP_INLINE ImageUsageFlags operator|( ImageUsageFlagBits bit0, ImageUsageFlagBits bit1 ) + { + return ImageUsageFlags( bit0 ) | bit1; + } + + VULKAN_HPP_INLINE ImageUsageFlags operator~( ImageUsageFlagBits bits ) + { + return ~( ImageUsageFlags( bits ) ); + } + + template <> struct FlagTraits + { + enum + { + allFlags = VkFlags(ImageUsageFlagBits::eTransferSrc) | VkFlags(ImageUsageFlagBits::eTransferDst) | VkFlags(ImageUsageFlagBits::eSampled) | VkFlags(ImageUsageFlagBits::eStorage) | VkFlags(ImageUsageFlagBits::eColorAttachment) | VkFlags(ImageUsageFlagBits::eDepthStencilAttachment) | VkFlags(ImageUsageFlagBits::eTransientAttachment) | VkFlags(ImageUsageFlagBits::eInputAttachment) | VkFlags(ImageUsageFlagBits::eShadingRateImageNV) + }; + }; + + struct SharedPresentSurfaceCapabilitiesKHR + { + operator VkSharedPresentSurfaceCapabilitiesKHR const&() const + { + return *reinterpret_cast(this); + } + + operator VkSharedPresentSurfaceCapabilitiesKHR &() + { + return *reinterpret_cast(this); + } + + bool operator==( SharedPresentSurfaceCapabilitiesKHR const& rhs ) const + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( sharedPresentSupportedUsageFlags == rhs.sharedPresentSupportedUsageFlags ); + } + + bool operator!=( SharedPresentSurfaceCapabilitiesKHR const& rhs ) const + { + return !operator==( rhs ); + } + + private: + StructureType sType = StructureType::eSharedPresentSurfaceCapabilitiesKHR; + + public: + void* pNext = nullptr; + ImageUsageFlags sharedPresentSupportedUsageFlags; + }; + static_assert( sizeof( SharedPresentSurfaceCapabilitiesKHR ) == sizeof( VkSharedPresentSurfaceCapabilitiesKHR ), "struct and wrapper have different size!" ); + + struct ImageViewUsageCreateInfo + { + ImageViewUsageCreateInfo( ImageUsageFlags usage_ = ImageUsageFlags() ) + : usage( usage_ ) + { + } + + ImageViewUsageCreateInfo( VkImageViewUsageCreateInfo const & rhs ) + { + memcpy( this, &rhs, sizeof( ImageViewUsageCreateInfo ) ); + } + + ImageViewUsageCreateInfo& operator=( VkImageViewUsageCreateInfo const & rhs ) + { + memcpy( this, &rhs, sizeof( ImageViewUsageCreateInfo ) ); + return *this; + } + ImageViewUsageCreateInfo& setPNext( const void* pNext_ ) + { + pNext = pNext_; + return *this; + } + + ImageViewUsageCreateInfo& setUsage( ImageUsageFlags usage_ ) + { + usage = usage_; + return *this; + } + + operator VkImageViewUsageCreateInfo const&() const + { + return *reinterpret_cast(this); + } + + operator VkImageViewUsageCreateInfo &() + { + return *reinterpret_cast(this); + } + + bool operator==( ImageViewUsageCreateInfo const& rhs ) const + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( usage == rhs.usage ); + } + + bool operator!=( ImageViewUsageCreateInfo const& rhs ) const + { + return !operator==( rhs ); + } + + private: + StructureType sType = StructureType::eImageViewUsageCreateInfo; + + public: + const void* pNext = nullptr; + ImageUsageFlags usage; + }; + static_assert( sizeof( ImageViewUsageCreateInfo ) == sizeof( VkImageViewUsageCreateInfo ), "struct and wrapper have different size!" ); + + using ImageViewUsageCreateInfoKHR = ImageViewUsageCreateInfo; + + enum class ImageCreateFlagBits + { + eSparseBinding = VK_IMAGE_CREATE_SPARSE_BINDING_BIT, + eSparseResidency = VK_IMAGE_CREATE_SPARSE_RESIDENCY_BIT, + eSparseAliased = VK_IMAGE_CREATE_SPARSE_ALIASED_BIT, + eMutableFormat = VK_IMAGE_CREATE_MUTABLE_FORMAT_BIT, + eCubeCompatible = VK_IMAGE_CREATE_CUBE_COMPATIBLE_BIT, + eAlias = VK_IMAGE_CREATE_ALIAS_BIT, + eAliasKHR = VK_IMAGE_CREATE_ALIAS_BIT, + eSplitInstanceBindRegions = VK_IMAGE_CREATE_SPLIT_INSTANCE_BIND_REGIONS_BIT, + eSplitInstanceBindRegionsKHR = VK_IMAGE_CREATE_SPLIT_INSTANCE_BIND_REGIONS_BIT, + e2DArrayCompatible = VK_IMAGE_CREATE_2D_ARRAY_COMPATIBLE_BIT, + e2DArrayCompatibleKHR = VK_IMAGE_CREATE_2D_ARRAY_COMPATIBLE_BIT, + eBlockTexelViewCompatible = VK_IMAGE_CREATE_BLOCK_TEXEL_VIEW_COMPATIBLE_BIT, + eBlockTexelViewCompatibleKHR = VK_IMAGE_CREATE_BLOCK_TEXEL_VIEW_COMPATIBLE_BIT, + eExtendedUsage = VK_IMAGE_CREATE_EXTENDED_USAGE_BIT, + eExtendedUsageKHR = VK_IMAGE_CREATE_EXTENDED_USAGE_BIT, + eProtected = VK_IMAGE_CREATE_PROTECTED_BIT, + eDisjoint = VK_IMAGE_CREATE_DISJOINT_BIT, + eDisjointKHR = VK_IMAGE_CREATE_DISJOINT_BIT, + eCornerSampledNV = VK_IMAGE_CREATE_CORNER_SAMPLED_BIT_NV, + eSampleLocationsCompatibleDepthEXT = VK_IMAGE_CREATE_SAMPLE_LOCATIONS_COMPATIBLE_DEPTH_BIT_EXT + }; + + using ImageCreateFlags = Flags; + + VULKAN_HPP_INLINE ImageCreateFlags operator|( ImageCreateFlagBits bit0, ImageCreateFlagBits bit1 ) + { + return ImageCreateFlags( bit0 ) | bit1; + } + + VULKAN_HPP_INLINE ImageCreateFlags operator~( ImageCreateFlagBits bits ) + { + return ~( ImageCreateFlags( bits ) ); + } + + template <> struct FlagTraits + { + enum + { + allFlags = VkFlags(ImageCreateFlagBits::eSparseBinding) | VkFlags(ImageCreateFlagBits::eSparseResidency) | VkFlags(ImageCreateFlagBits::eSparseAliased) | VkFlags(ImageCreateFlagBits::eMutableFormat) | VkFlags(ImageCreateFlagBits::eCubeCompatible) | VkFlags(ImageCreateFlagBits::eAlias) | VkFlags(ImageCreateFlagBits::eSplitInstanceBindRegions) | VkFlags(ImageCreateFlagBits::e2DArrayCompatible) | VkFlags(ImageCreateFlagBits::eBlockTexelViewCompatible) | VkFlags(ImageCreateFlagBits::eExtendedUsage) | VkFlags(ImageCreateFlagBits::eProtected) | VkFlags(ImageCreateFlagBits::eDisjoint) | VkFlags(ImageCreateFlagBits::eCornerSampledNV) | VkFlags(ImageCreateFlagBits::eSampleLocationsCompatibleDepthEXT) + }; + }; + + struct PhysicalDeviceImageFormatInfo2 + { + PhysicalDeviceImageFormatInfo2( Format format_ = Format::eUndefined, + ImageType type_ = ImageType::e1D, + ImageTiling tiling_ = ImageTiling::eOptimal, + ImageUsageFlags usage_ = ImageUsageFlags(), + ImageCreateFlags flags_ = ImageCreateFlags() ) + : format( format_ ) + , type( type_ ) + , tiling( tiling_ ) + , usage( usage_ ) + , flags( flags_ ) + { + } + + PhysicalDeviceImageFormatInfo2( VkPhysicalDeviceImageFormatInfo2 const & rhs ) + { + memcpy( this, &rhs, sizeof( PhysicalDeviceImageFormatInfo2 ) ); + } + + PhysicalDeviceImageFormatInfo2& operator=( VkPhysicalDeviceImageFormatInfo2 const & rhs ) + { + memcpy( this, &rhs, sizeof( PhysicalDeviceImageFormatInfo2 ) ); + return *this; + } + PhysicalDeviceImageFormatInfo2& setPNext( const void* pNext_ ) + { + pNext = pNext_; + return *this; + } + + PhysicalDeviceImageFormatInfo2& setFormat( Format format_ ) + { + format = format_; + return *this; + } + + PhysicalDeviceImageFormatInfo2& setType( ImageType type_ ) + { + type = type_; + return *this; + } + + PhysicalDeviceImageFormatInfo2& setTiling( ImageTiling tiling_ ) + { + tiling = tiling_; + return *this; + } + + PhysicalDeviceImageFormatInfo2& setUsage( ImageUsageFlags usage_ ) + { + usage = usage_; + return *this; + } + + PhysicalDeviceImageFormatInfo2& setFlags( ImageCreateFlags flags_ ) + { + flags = flags_; + return *this; + } + + operator VkPhysicalDeviceImageFormatInfo2 const&() const + { + return *reinterpret_cast(this); + } + + operator VkPhysicalDeviceImageFormatInfo2 &() + { + return *reinterpret_cast(this); + } + + bool operator==( PhysicalDeviceImageFormatInfo2 const& rhs ) const + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( format == rhs.format ) + && ( type == rhs.type ) + && ( tiling == rhs.tiling ) + && ( usage == rhs.usage ) + && ( flags == rhs.flags ); + } + + bool operator!=( PhysicalDeviceImageFormatInfo2 const& rhs ) const + { + return !operator==( rhs ); + } + + private: + StructureType sType = StructureType::ePhysicalDeviceImageFormatInfo2; + + public: + const void* pNext = nullptr; + Format format; + ImageType type; + ImageTiling tiling; + ImageUsageFlags usage; + ImageCreateFlags flags; + }; + static_assert( sizeof( PhysicalDeviceImageFormatInfo2 ) == sizeof( VkPhysicalDeviceImageFormatInfo2 ), "struct and wrapper have different size!" ); + + using PhysicalDeviceImageFormatInfo2KHR = PhysicalDeviceImageFormatInfo2; + + enum class PipelineCreateFlagBits + { + eDisableOptimization = VK_PIPELINE_CREATE_DISABLE_OPTIMIZATION_BIT, + eAllowDerivatives = VK_PIPELINE_CREATE_ALLOW_DERIVATIVES_BIT, + eDerivative = VK_PIPELINE_CREATE_DERIVATIVE_BIT, + eViewIndexFromDeviceIndex = VK_PIPELINE_CREATE_VIEW_INDEX_FROM_DEVICE_INDEX_BIT, + eViewIndexFromDeviceIndexKHR = VK_PIPELINE_CREATE_VIEW_INDEX_FROM_DEVICE_INDEX_BIT, + eDispatchBase = VK_PIPELINE_CREATE_DISPATCH_BASE, + eDispatchBaseKHR = VK_PIPELINE_CREATE_DISPATCH_BASE, + eDeferCompileNV = VK_PIPELINE_CREATE_DEFER_COMPILE_BIT_NV + }; + + using PipelineCreateFlags = Flags; + + VULKAN_HPP_INLINE PipelineCreateFlags operator|( PipelineCreateFlagBits bit0, PipelineCreateFlagBits bit1 ) + { + return PipelineCreateFlags( bit0 ) | bit1; + } + + VULKAN_HPP_INLINE PipelineCreateFlags operator~( PipelineCreateFlagBits bits ) + { + return ~( PipelineCreateFlags( bits ) ); + } + + template <> struct FlagTraits + { + enum + { + allFlags = VkFlags(PipelineCreateFlagBits::eDisableOptimization) | VkFlags(PipelineCreateFlagBits::eAllowDerivatives) | VkFlags(PipelineCreateFlagBits::eDerivative) | VkFlags(PipelineCreateFlagBits::eViewIndexFromDeviceIndex) | VkFlags(PipelineCreateFlagBits::eDispatchBase) | VkFlags(PipelineCreateFlagBits::eDeferCompileNV) + }; + }; + + struct ComputePipelineCreateInfo + { + ComputePipelineCreateInfo( PipelineCreateFlags flags_ = PipelineCreateFlags(), + PipelineShaderStageCreateInfo stage_ = PipelineShaderStageCreateInfo(), + PipelineLayout layout_ = PipelineLayout(), + Pipeline basePipelineHandle_ = Pipeline(), + int32_t basePipelineIndex_ = 0 ) + : flags( flags_ ) + , stage( stage_ ) + , layout( layout_ ) + , basePipelineHandle( basePipelineHandle_ ) + , basePipelineIndex( basePipelineIndex_ ) + { + } + + ComputePipelineCreateInfo( VkComputePipelineCreateInfo const & rhs ) + { + memcpy( this, &rhs, sizeof( ComputePipelineCreateInfo ) ); + } + + ComputePipelineCreateInfo& operator=( VkComputePipelineCreateInfo const & rhs ) + { + memcpy( this, &rhs, sizeof( ComputePipelineCreateInfo ) ); + return *this; + } + ComputePipelineCreateInfo& setPNext( const void* pNext_ ) + { + pNext = pNext_; + return *this; + } + + ComputePipelineCreateInfo& setFlags( PipelineCreateFlags flags_ ) + { + flags = flags_; + return *this; + } + + ComputePipelineCreateInfo& setStage( PipelineShaderStageCreateInfo stage_ ) + { + stage = stage_; + return *this; + } + + ComputePipelineCreateInfo& setLayout( PipelineLayout layout_ ) + { + layout = layout_; + return *this; + } + + ComputePipelineCreateInfo& setBasePipelineHandle( Pipeline basePipelineHandle_ ) + { + basePipelineHandle = basePipelineHandle_; + return *this; + } + + ComputePipelineCreateInfo& setBasePipelineIndex( int32_t basePipelineIndex_ ) + { + basePipelineIndex = basePipelineIndex_; + return *this; + } + + operator VkComputePipelineCreateInfo const&() const + { + return *reinterpret_cast(this); + } + + operator VkComputePipelineCreateInfo &() + { + return *reinterpret_cast(this); + } + + bool operator==( ComputePipelineCreateInfo const& rhs ) const + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( flags == rhs.flags ) + && ( stage == rhs.stage ) + && ( layout == rhs.layout ) + && ( basePipelineHandle == rhs.basePipelineHandle ) + && ( basePipelineIndex == rhs.basePipelineIndex ); + } + + bool operator!=( ComputePipelineCreateInfo const& rhs ) const + { + return !operator==( rhs ); + } + + private: + StructureType sType = StructureType::eComputePipelineCreateInfo; + + public: + const void* pNext = nullptr; + PipelineCreateFlags flags; + PipelineShaderStageCreateInfo stage; + PipelineLayout layout; + Pipeline basePipelineHandle; + int32_t basePipelineIndex; + }; + static_assert( sizeof( ComputePipelineCreateInfo ) == sizeof( VkComputePipelineCreateInfo ), "struct and wrapper have different size!" ); + + enum class ColorComponentFlagBits + { + eR = VK_COLOR_COMPONENT_R_BIT, + eG = VK_COLOR_COMPONENT_G_BIT, + eB = VK_COLOR_COMPONENT_B_BIT, + eA = VK_COLOR_COMPONENT_A_BIT + }; + + using ColorComponentFlags = Flags; + + VULKAN_HPP_INLINE ColorComponentFlags operator|( ColorComponentFlagBits bit0, ColorComponentFlagBits bit1 ) + { + return ColorComponentFlags( bit0 ) | bit1; + } + + VULKAN_HPP_INLINE ColorComponentFlags operator~( ColorComponentFlagBits bits ) + { + return ~( ColorComponentFlags( bits ) ); + } + + template <> struct FlagTraits + { + enum + { + allFlags = VkFlags(ColorComponentFlagBits::eR) | VkFlags(ColorComponentFlagBits::eG) | VkFlags(ColorComponentFlagBits::eB) | VkFlags(ColorComponentFlagBits::eA) + }; + }; + + struct PipelineColorBlendAttachmentState + { + PipelineColorBlendAttachmentState( Bool32 blendEnable_ = 0, + BlendFactor srcColorBlendFactor_ = BlendFactor::eZero, + BlendFactor dstColorBlendFactor_ = BlendFactor::eZero, + BlendOp colorBlendOp_ = BlendOp::eAdd, + BlendFactor srcAlphaBlendFactor_ = BlendFactor::eZero, + BlendFactor dstAlphaBlendFactor_ = BlendFactor::eZero, + BlendOp alphaBlendOp_ = BlendOp::eAdd, + ColorComponentFlags colorWriteMask_ = ColorComponentFlags() ) + : blendEnable( blendEnable_ ) + , srcColorBlendFactor( srcColorBlendFactor_ ) + , dstColorBlendFactor( dstColorBlendFactor_ ) + , colorBlendOp( colorBlendOp_ ) + , srcAlphaBlendFactor( srcAlphaBlendFactor_ ) + , dstAlphaBlendFactor( dstAlphaBlendFactor_ ) + , alphaBlendOp( alphaBlendOp_ ) + , colorWriteMask( colorWriteMask_ ) + { + } + + PipelineColorBlendAttachmentState( VkPipelineColorBlendAttachmentState const & rhs ) + { + memcpy( this, &rhs, sizeof( PipelineColorBlendAttachmentState ) ); + } + + PipelineColorBlendAttachmentState& operator=( VkPipelineColorBlendAttachmentState const & rhs ) + { + memcpy( this, &rhs, sizeof( PipelineColorBlendAttachmentState ) ); + return *this; + } + PipelineColorBlendAttachmentState& setBlendEnable( Bool32 blendEnable_ ) + { + blendEnable = blendEnable_; + return *this; + } + + PipelineColorBlendAttachmentState& setSrcColorBlendFactor( BlendFactor srcColorBlendFactor_ ) + { + srcColorBlendFactor = srcColorBlendFactor_; + return *this; + } + + PipelineColorBlendAttachmentState& setDstColorBlendFactor( BlendFactor dstColorBlendFactor_ ) + { + dstColorBlendFactor = dstColorBlendFactor_; + return *this; + } + + PipelineColorBlendAttachmentState& setColorBlendOp( BlendOp colorBlendOp_ ) + { + colorBlendOp = colorBlendOp_; + return *this; + } + + PipelineColorBlendAttachmentState& setSrcAlphaBlendFactor( BlendFactor srcAlphaBlendFactor_ ) + { + srcAlphaBlendFactor = srcAlphaBlendFactor_; + return *this; + } + + PipelineColorBlendAttachmentState& setDstAlphaBlendFactor( BlendFactor dstAlphaBlendFactor_ ) + { + dstAlphaBlendFactor = dstAlphaBlendFactor_; + return *this; + } + + PipelineColorBlendAttachmentState& setAlphaBlendOp( BlendOp alphaBlendOp_ ) + { + alphaBlendOp = alphaBlendOp_; + return *this; + } + + PipelineColorBlendAttachmentState& setColorWriteMask( ColorComponentFlags colorWriteMask_ ) + { + colorWriteMask = colorWriteMask_; + return *this; + } + + operator VkPipelineColorBlendAttachmentState const&() const + { + return *reinterpret_cast(this); + } + + operator VkPipelineColorBlendAttachmentState &() + { + return *reinterpret_cast(this); + } + + bool operator==( PipelineColorBlendAttachmentState const& rhs ) const + { + return ( blendEnable == rhs.blendEnable ) + && ( srcColorBlendFactor == rhs.srcColorBlendFactor ) + && ( dstColorBlendFactor == rhs.dstColorBlendFactor ) + && ( colorBlendOp == rhs.colorBlendOp ) + && ( srcAlphaBlendFactor == rhs.srcAlphaBlendFactor ) + && ( dstAlphaBlendFactor == rhs.dstAlphaBlendFactor ) + && ( alphaBlendOp == rhs.alphaBlendOp ) + && ( colorWriteMask == rhs.colorWriteMask ); + } + + bool operator!=( PipelineColorBlendAttachmentState const& rhs ) const + { + return !operator==( rhs ); + } + + Bool32 blendEnable; + BlendFactor srcColorBlendFactor; + BlendFactor dstColorBlendFactor; + BlendOp colorBlendOp; + BlendFactor srcAlphaBlendFactor; + BlendFactor dstAlphaBlendFactor; + BlendOp alphaBlendOp; + ColorComponentFlags colorWriteMask; + }; + static_assert( sizeof( PipelineColorBlendAttachmentState ) == sizeof( VkPipelineColorBlendAttachmentState ), "struct and wrapper have different size!" ); + + struct PipelineColorBlendStateCreateInfo + { + PipelineColorBlendStateCreateInfo( PipelineColorBlendStateCreateFlags flags_ = PipelineColorBlendStateCreateFlags(), + Bool32 logicOpEnable_ = 0, + LogicOp logicOp_ = LogicOp::eClear, + uint32_t attachmentCount_ = 0, + const PipelineColorBlendAttachmentState* pAttachments_ = nullptr, + std::array const& blendConstants_ = { { 0, 0, 0, 0 } } ) + : flags( flags_ ) + , logicOpEnable( logicOpEnable_ ) + , logicOp( logicOp_ ) + , attachmentCount( attachmentCount_ ) + , pAttachments( pAttachments_ ) + { + memcpy( &blendConstants, blendConstants_.data(), 4 * sizeof( float ) ); + } + + PipelineColorBlendStateCreateInfo( VkPipelineColorBlendStateCreateInfo const & rhs ) + { + memcpy( this, &rhs, sizeof( PipelineColorBlendStateCreateInfo ) ); + } + + PipelineColorBlendStateCreateInfo& operator=( VkPipelineColorBlendStateCreateInfo const & rhs ) + { + memcpy( this, &rhs, sizeof( PipelineColorBlendStateCreateInfo ) ); + return *this; + } + PipelineColorBlendStateCreateInfo& setPNext( const void* pNext_ ) + { + pNext = pNext_; + return *this; + } + + PipelineColorBlendStateCreateInfo& setFlags( PipelineColorBlendStateCreateFlags flags_ ) + { + flags = flags_; + return *this; + } + + PipelineColorBlendStateCreateInfo& setLogicOpEnable( Bool32 logicOpEnable_ ) + { + logicOpEnable = logicOpEnable_; + return *this; + } + + PipelineColorBlendStateCreateInfo& setLogicOp( LogicOp logicOp_ ) + { + logicOp = logicOp_; + return *this; + } + + PipelineColorBlendStateCreateInfo& setAttachmentCount( uint32_t attachmentCount_ ) + { + attachmentCount = attachmentCount_; + return *this; + } + + PipelineColorBlendStateCreateInfo& setPAttachments( const PipelineColorBlendAttachmentState* pAttachments_ ) + { + pAttachments = pAttachments_; + return *this; + } + + PipelineColorBlendStateCreateInfo& setBlendConstants( std::array blendConstants_ ) + { + memcpy( &blendConstants, blendConstants_.data(), 4 * sizeof( float ) ); + return *this; + } + + operator VkPipelineColorBlendStateCreateInfo const&() const + { + return *reinterpret_cast(this); + } + + operator VkPipelineColorBlendStateCreateInfo &() + { + return *reinterpret_cast(this); + } + + bool operator==( PipelineColorBlendStateCreateInfo const& rhs ) const + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( flags == rhs.flags ) + && ( logicOpEnable == rhs.logicOpEnable ) + && ( logicOp == rhs.logicOp ) + && ( attachmentCount == rhs.attachmentCount ) + && ( pAttachments == rhs.pAttachments ) + && ( memcmp( blendConstants, rhs.blendConstants, 4 * sizeof( float ) ) == 0 ); + } + + bool operator!=( PipelineColorBlendStateCreateInfo const& rhs ) const + { + return !operator==( rhs ); + } + + private: + StructureType sType = StructureType::ePipelineColorBlendStateCreateInfo; + + public: + const void* pNext = nullptr; + PipelineColorBlendStateCreateFlags flags; + Bool32 logicOpEnable; + LogicOp logicOp; + uint32_t attachmentCount; + const PipelineColorBlendAttachmentState* pAttachments; + float blendConstants[4]; + }; + static_assert( sizeof( PipelineColorBlendStateCreateInfo ) == sizeof( VkPipelineColorBlendStateCreateInfo ), "struct and wrapper have different size!" ); + + enum class FenceCreateFlagBits + { + eSignaled = VK_FENCE_CREATE_SIGNALED_BIT + }; + + using FenceCreateFlags = Flags; + + VULKAN_HPP_INLINE FenceCreateFlags operator|( FenceCreateFlagBits bit0, FenceCreateFlagBits bit1 ) + { + return FenceCreateFlags( bit0 ) | bit1; + } + + VULKAN_HPP_INLINE FenceCreateFlags operator~( FenceCreateFlagBits bits ) + { + return ~( FenceCreateFlags( bits ) ); + } + + template <> struct FlagTraits + { + enum + { + allFlags = VkFlags(FenceCreateFlagBits::eSignaled) + }; + }; + + struct FenceCreateInfo + { + FenceCreateInfo( FenceCreateFlags flags_ = FenceCreateFlags() ) + : flags( flags_ ) + { + } + + FenceCreateInfo( VkFenceCreateInfo const & rhs ) + { + memcpy( this, &rhs, sizeof( FenceCreateInfo ) ); + } + + FenceCreateInfo& operator=( VkFenceCreateInfo const & rhs ) + { + memcpy( this, &rhs, sizeof( FenceCreateInfo ) ); + return *this; + } + FenceCreateInfo& setPNext( const void* pNext_ ) + { + pNext = pNext_; + return *this; + } + + FenceCreateInfo& setFlags( FenceCreateFlags flags_ ) + { + flags = flags_; + return *this; + } + + operator VkFenceCreateInfo const&() const + { + return *reinterpret_cast(this); + } + + operator VkFenceCreateInfo &() + { + return *reinterpret_cast(this); + } + + bool operator==( FenceCreateInfo const& rhs ) const + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( flags == rhs.flags ); + } + + bool operator!=( FenceCreateInfo const& rhs ) const + { + return !operator==( rhs ); + } + + private: + StructureType sType = StructureType::eFenceCreateInfo; + + public: + const void* pNext = nullptr; + FenceCreateFlags flags; + }; + static_assert( sizeof( FenceCreateInfo ) == sizeof( VkFenceCreateInfo ), "struct and wrapper have different size!" ); + + enum class FormatFeatureFlagBits + { + eSampledImage = VK_FORMAT_FEATURE_SAMPLED_IMAGE_BIT, + eStorageImage = VK_FORMAT_FEATURE_STORAGE_IMAGE_BIT, + eStorageImageAtomic = VK_FORMAT_FEATURE_STORAGE_IMAGE_ATOMIC_BIT, + eUniformTexelBuffer = VK_FORMAT_FEATURE_UNIFORM_TEXEL_BUFFER_BIT, + eStorageTexelBuffer = VK_FORMAT_FEATURE_STORAGE_TEXEL_BUFFER_BIT, + eStorageTexelBufferAtomic = VK_FORMAT_FEATURE_STORAGE_TEXEL_BUFFER_ATOMIC_BIT, + eVertexBuffer = VK_FORMAT_FEATURE_VERTEX_BUFFER_BIT, + eColorAttachment = VK_FORMAT_FEATURE_COLOR_ATTACHMENT_BIT, + eColorAttachmentBlend = VK_FORMAT_FEATURE_COLOR_ATTACHMENT_BLEND_BIT, + eDepthStencilAttachment = VK_FORMAT_FEATURE_DEPTH_STENCIL_ATTACHMENT_BIT, + eBlitSrc = VK_FORMAT_FEATURE_BLIT_SRC_BIT, + eBlitDst = VK_FORMAT_FEATURE_BLIT_DST_BIT, + eSampledImageFilterLinear = VK_FORMAT_FEATURE_SAMPLED_IMAGE_FILTER_LINEAR_BIT, + eTransferSrc = VK_FORMAT_FEATURE_TRANSFER_SRC_BIT, + eTransferSrcKHR = VK_FORMAT_FEATURE_TRANSFER_SRC_BIT, + eTransferDst = VK_FORMAT_FEATURE_TRANSFER_DST_BIT, + eTransferDstKHR = VK_FORMAT_FEATURE_TRANSFER_DST_BIT, + eMidpointChromaSamples = VK_FORMAT_FEATURE_MIDPOINT_CHROMA_SAMPLES_BIT, + eMidpointChromaSamplesKHR = VK_FORMAT_FEATURE_MIDPOINT_CHROMA_SAMPLES_BIT, + eSampledImageYcbcrConversionLinearFilter = VK_FORMAT_FEATURE_SAMPLED_IMAGE_YCBCR_CONVERSION_LINEAR_FILTER_BIT, + eSampledImageYcbcrConversionLinearFilterKHR = VK_FORMAT_FEATURE_SAMPLED_IMAGE_YCBCR_CONVERSION_LINEAR_FILTER_BIT, + eSampledImageYcbcrConversionSeparateReconstructionFilter = VK_FORMAT_FEATURE_SAMPLED_IMAGE_YCBCR_CONVERSION_SEPARATE_RECONSTRUCTION_FILTER_BIT, + eSampledImageYcbcrConversionSeparateReconstructionFilterKHR = VK_FORMAT_FEATURE_SAMPLED_IMAGE_YCBCR_CONVERSION_SEPARATE_RECONSTRUCTION_FILTER_BIT, + eSampledImageYcbcrConversionChromaReconstructionExplicit = VK_FORMAT_FEATURE_SAMPLED_IMAGE_YCBCR_CONVERSION_CHROMA_RECONSTRUCTION_EXPLICIT_BIT, + eSampledImageYcbcrConversionChromaReconstructionExplicitKHR = VK_FORMAT_FEATURE_SAMPLED_IMAGE_YCBCR_CONVERSION_CHROMA_RECONSTRUCTION_EXPLICIT_BIT, + eSampledImageYcbcrConversionChromaReconstructionExplicitForceable = VK_FORMAT_FEATURE_SAMPLED_IMAGE_YCBCR_CONVERSION_CHROMA_RECONSTRUCTION_EXPLICIT_FORCEABLE_BIT, + eSampledImageYcbcrConversionChromaReconstructionExplicitForceableKHR = VK_FORMAT_FEATURE_SAMPLED_IMAGE_YCBCR_CONVERSION_CHROMA_RECONSTRUCTION_EXPLICIT_FORCEABLE_BIT, + eDisjoint = VK_FORMAT_FEATURE_DISJOINT_BIT, + eDisjointKHR = VK_FORMAT_FEATURE_DISJOINT_BIT, + eCositedChromaSamples = VK_FORMAT_FEATURE_COSITED_CHROMA_SAMPLES_BIT, + eCositedChromaSamplesKHR = VK_FORMAT_FEATURE_COSITED_CHROMA_SAMPLES_BIT, + eSampledImageFilterCubicIMG = VK_FORMAT_FEATURE_SAMPLED_IMAGE_FILTER_CUBIC_BIT_IMG, + eSampledImageFilterMinmaxEXT = VK_FORMAT_FEATURE_SAMPLED_IMAGE_FILTER_MINMAX_BIT_EXT + }; + + using FormatFeatureFlags = Flags; + + VULKAN_HPP_INLINE FormatFeatureFlags operator|( FormatFeatureFlagBits bit0, FormatFeatureFlagBits bit1 ) + { + return FormatFeatureFlags( bit0 ) | bit1; + } + + VULKAN_HPP_INLINE FormatFeatureFlags operator~( FormatFeatureFlagBits bits ) + { + return ~( FormatFeatureFlags( bits ) ); + } + + template <> struct FlagTraits + { + enum + { + allFlags = VkFlags(FormatFeatureFlagBits::eSampledImage) | VkFlags(FormatFeatureFlagBits::eStorageImage) | VkFlags(FormatFeatureFlagBits::eStorageImageAtomic) | VkFlags(FormatFeatureFlagBits::eUniformTexelBuffer) | VkFlags(FormatFeatureFlagBits::eStorageTexelBuffer) | VkFlags(FormatFeatureFlagBits::eStorageTexelBufferAtomic) | VkFlags(FormatFeatureFlagBits::eVertexBuffer) | VkFlags(FormatFeatureFlagBits::eColorAttachment) | VkFlags(FormatFeatureFlagBits::eColorAttachmentBlend) | VkFlags(FormatFeatureFlagBits::eDepthStencilAttachment) | VkFlags(FormatFeatureFlagBits::eBlitSrc) | VkFlags(FormatFeatureFlagBits::eBlitDst) | VkFlags(FormatFeatureFlagBits::eSampledImageFilterLinear) | VkFlags(FormatFeatureFlagBits::eTransferSrc) | VkFlags(FormatFeatureFlagBits::eTransferDst) | VkFlags(FormatFeatureFlagBits::eMidpointChromaSamples) | VkFlags(FormatFeatureFlagBits::eSampledImageYcbcrConversionLinearFilter) | VkFlags(FormatFeatureFlagBits::eSampledImageYcbcrConversionSeparateReconstructionFilter) | VkFlags(FormatFeatureFlagBits::eSampledImageYcbcrConversionChromaReconstructionExplicit) | VkFlags(FormatFeatureFlagBits::eSampledImageYcbcrConversionChromaReconstructionExplicitForceable) | VkFlags(FormatFeatureFlagBits::eDisjoint) | VkFlags(FormatFeatureFlagBits::eCositedChromaSamples) | VkFlags(FormatFeatureFlagBits::eSampledImageFilterCubicIMG) | VkFlags(FormatFeatureFlagBits::eSampledImageFilterMinmaxEXT) + }; + }; + + struct FormatProperties + { + operator VkFormatProperties const&() const + { + return *reinterpret_cast(this); + } + + operator VkFormatProperties &() + { + return *reinterpret_cast(this); + } + + bool operator==( FormatProperties const& rhs ) const + { + return ( linearTilingFeatures == rhs.linearTilingFeatures ) + && ( optimalTilingFeatures == rhs.optimalTilingFeatures ) + && ( bufferFeatures == rhs.bufferFeatures ); + } + + bool operator!=( FormatProperties const& rhs ) const + { + return !operator==( rhs ); + } + + FormatFeatureFlags linearTilingFeatures; + FormatFeatureFlags optimalTilingFeatures; + FormatFeatureFlags bufferFeatures; + }; + static_assert( sizeof( FormatProperties ) == sizeof( VkFormatProperties ), "struct and wrapper have different size!" ); + + struct FormatProperties2 + { + operator VkFormatProperties2 const&() const + { + return *reinterpret_cast(this); + } + + operator VkFormatProperties2 &() + { + return *reinterpret_cast(this); + } + + bool operator==( FormatProperties2 const& rhs ) const + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( formatProperties == rhs.formatProperties ); + } + + bool operator!=( FormatProperties2 const& rhs ) const + { + return !operator==( rhs ); + } + + private: + StructureType sType = StructureType::eFormatProperties2; + + public: + void* pNext = nullptr; + FormatProperties formatProperties; + }; + static_assert( sizeof( FormatProperties2 ) == sizeof( VkFormatProperties2 ), "struct and wrapper have different size!" ); + + using FormatProperties2KHR = FormatProperties2; + + struct DrmFormatModifierPropertiesEXT + { + operator VkDrmFormatModifierPropertiesEXT const&() const + { + return *reinterpret_cast(this); + } + + operator VkDrmFormatModifierPropertiesEXT &() + { + return *reinterpret_cast(this); + } + + bool operator==( DrmFormatModifierPropertiesEXT const& rhs ) const + { + return ( drmFormatModifier == rhs.drmFormatModifier ) + && ( drmFormatModifierPlaneCount == rhs.drmFormatModifierPlaneCount ) + && ( drmFormatModifierTilingFeatures == rhs.drmFormatModifierTilingFeatures ); + } + + bool operator!=( DrmFormatModifierPropertiesEXT const& rhs ) const + { + return !operator==( rhs ); + } + + uint64_t drmFormatModifier; + uint32_t drmFormatModifierPlaneCount; + FormatFeatureFlags drmFormatModifierTilingFeatures; + }; + static_assert( sizeof( DrmFormatModifierPropertiesEXT ) == sizeof( VkDrmFormatModifierPropertiesEXT ), "struct and wrapper have different size!" ); + + struct DrmFormatModifierPropertiesListEXT + { + DrmFormatModifierPropertiesListEXT( uint32_t drmFormatModifierCount_ = 0, + DrmFormatModifierPropertiesEXT* pDrmFormatModifierProperties_ = nullptr ) + : drmFormatModifierCount( drmFormatModifierCount_ ) + , pDrmFormatModifierProperties( pDrmFormatModifierProperties_ ) + { + } + + DrmFormatModifierPropertiesListEXT( VkDrmFormatModifierPropertiesListEXT const & rhs ) + { + memcpy( this, &rhs, sizeof( DrmFormatModifierPropertiesListEXT ) ); + } + + DrmFormatModifierPropertiesListEXT& operator=( VkDrmFormatModifierPropertiesListEXT const & rhs ) + { + memcpy( this, &rhs, sizeof( DrmFormatModifierPropertiesListEXT ) ); + return *this; + } + DrmFormatModifierPropertiesListEXT& setPNext( void* pNext_ ) + { + pNext = pNext_; + return *this; + } + + DrmFormatModifierPropertiesListEXT& setDrmFormatModifierCount( uint32_t drmFormatModifierCount_ ) + { + drmFormatModifierCount = drmFormatModifierCount_; + return *this; + } + + DrmFormatModifierPropertiesListEXT& setPDrmFormatModifierProperties( DrmFormatModifierPropertiesEXT* pDrmFormatModifierProperties_ ) + { + pDrmFormatModifierProperties = pDrmFormatModifierProperties_; + return *this; + } + + operator VkDrmFormatModifierPropertiesListEXT const&() const + { + return *reinterpret_cast(this); + } + + operator VkDrmFormatModifierPropertiesListEXT &() + { + return *reinterpret_cast(this); + } + + bool operator==( DrmFormatModifierPropertiesListEXT const& rhs ) const + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( drmFormatModifierCount == rhs.drmFormatModifierCount ) + && ( pDrmFormatModifierProperties == rhs.pDrmFormatModifierProperties ); + } + + bool operator!=( DrmFormatModifierPropertiesListEXT const& rhs ) const + { + return !operator==( rhs ); + } + + private: + StructureType sType = StructureType::eDrmFormatModifierPropertiesListEXT; + + public: + void* pNext = nullptr; + uint32_t drmFormatModifierCount; + DrmFormatModifierPropertiesEXT* pDrmFormatModifierProperties; + }; + static_assert( sizeof( DrmFormatModifierPropertiesListEXT ) == sizeof( VkDrmFormatModifierPropertiesListEXT ), "struct and wrapper have different size!" ); + + enum class QueryControlFlagBits + { + ePrecise = VK_QUERY_CONTROL_PRECISE_BIT + }; + + using QueryControlFlags = Flags; + + VULKAN_HPP_INLINE QueryControlFlags operator|( QueryControlFlagBits bit0, QueryControlFlagBits bit1 ) + { + return QueryControlFlags( bit0 ) | bit1; + } + + VULKAN_HPP_INLINE QueryControlFlags operator~( QueryControlFlagBits bits ) + { + return ~( QueryControlFlags( bits ) ); + } + + template <> struct FlagTraits + { + enum + { + allFlags = VkFlags(QueryControlFlagBits::ePrecise) + }; + }; + + enum class QueryResultFlagBits + { + e64 = VK_QUERY_RESULT_64_BIT, + eWait = VK_QUERY_RESULT_WAIT_BIT, + eWithAvailability = VK_QUERY_RESULT_WITH_AVAILABILITY_BIT, + ePartial = VK_QUERY_RESULT_PARTIAL_BIT + }; + + using QueryResultFlags = Flags; + + VULKAN_HPP_INLINE QueryResultFlags operator|( QueryResultFlagBits bit0, QueryResultFlagBits bit1 ) + { + return QueryResultFlags( bit0 ) | bit1; + } + + VULKAN_HPP_INLINE QueryResultFlags operator~( QueryResultFlagBits bits ) + { + return ~( QueryResultFlags( bits ) ); + } + + template <> struct FlagTraits + { + enum + { + allFlags = VkFlags(QueryResultFlagBits::e64) | VkFlags(QueryResultFlagBits::eWait) | VkFlags(QueryResultFlagBits::eWithAvailability) | VkFlags(QueryResultFlagBits::ePartial) + }; + }; + + enum class CommandBufferUsageFlagBits + { + eOneTimeSubmit = VK_COMMAND_BUFFER_USAGE_ONE_TIME_SUBMIT_BIT, + eRenderPassContinue = VK_COMMAND_BUFFER_USAGE_RENDER_PASS_CONTINUE_BIT, + eSimultaneousUse = VK_COMMAND_BUFFER_USAGE_SIMULTANEOUS_USE_BIT + }; + + using CommandBufferUsageFlags = Flags; + + VULKAN_HPP_INLINE CommandBufferUsageFlags operator|( CommandBufferUsageFlagBits bit0, CommandBufferUsageFlagBits bit1 ) + { + return CommandBufferUsageFlags( bit0 ) | bit1; + } + + VULKAN_HPP_INLINE CommandBufferUsageFlags operator~( CommandBufferUsageFlagBits bits ) + { + return ~( CommandBufferUsageFlags( bits ) ); + } + + template <> struct FlagTraits + { + enum + { + allFlags = VkFlags(CommandBufferUsageFlagBits::eOneTimeSubmit) | VkFlags(CommandBufferUsageFlagBits::eRenderPassContinue) | VkFlags(CommandBufferUsageFlagBits::eSimultaneousUse) + }; + }; + + enum class QueryPipelineStatisticFlagBits + { + eInputAssemblyVertices = VK_QUERY_PIPELINE_STATISTIC_INPUT_ASSEMBLY_VERTICES_BIT, + eInputAssemblyPrimitives = VK_QUERY_PIPELINE_STATISTIC_INPUT_ASSEMBLY_PRIMITIVES_BIT, + eVertexShaderInvocations = VK_QUERY_PIPELINE_STATISTIC_VERTEX_SHADER_INVOCATIONS_BIT, + eGeometryShaderInvocations = VK_QUERY_PIPELINE_STATISTIC_GEOMETRY_SHADER_INVOCATIONS_BIT, + eGeometryShaderPrimitives = VK_QUERY_PIPELINE_STATISTIC_GEOMETRY_SHADER_PRIMITIVES_BIT, + eClippingInvocations = VK_QUERY_PIPELINE_STATISTIC_CLIPPING_INVOCATIONS_BIT, + eClippingPrimitives = VK_QUERY_PIPELINE_STATISTIC_CLIPPING_PRIMITIVES_BIT, + eFragmentShaderInvocations = VK_QUERY_PIPELINE_STATISTIC_FRAGMENT_SHADER_INVOCATIONS_BIT, + eTessellationControlShaderPatches = VK_QUERY_PIPELINE_STATISTIC_TESSELLATION_CONTROL_SHADER_PATCHES_BIT, + eTessellationEvaluationShaderInvocations = VK_QUERY_PIPELINE_STATISTIC_TESSELLATION_EVALUATION_SHADER_INVOCATIONS_BIT, + eComputeShaderInvocations = VK_QUERY_PIPELINE_STATISTIC_COMPUTE_SHADER_INVOCATIONS_BIT + }; + + using QueryPipelineStatisticFlags = Flags; + + VULKAN_HPP_INLINE QueryPipelineStatisticFlags operator|( QueryPipelineStatisticFlagBits bit0, QueryPipelineStatisticFlagBits bit1 ) + { + return QueryPipelineStatisticFlags( bit0 ) | bit1; + } + + VULKAN_HPP_INLINE QueryPipelineStatisticFlags operator~( QueryPipelineStatisticFlagBits bits ) + { + return ~( QueryPipelineStatisticFlags( bits ) ); + } + + template <> struct FlagTraits + { + enum + { + allFlags = VkFlags(QueryPipelineStatisticFlagBits::eInputAssemblyVertices) | VkFlags(QueryPipelineStatisticFlagBits::eInputAssemblyPrimitives) | VkFlags(QueryPipelineStatisticFlagBits::eVertexShaderInvocations) | VkFlags(QueryPipelineStatisticFlagBits::eGeometryShaderInvocations) | VkFlags(QueryPipelineStatisticFlagBits::eGeometryShaderPrimitives) | VkFlags(QueryPipelineStatisticFlagBits::eClippingInvocations) | VkFlags(QueryPipelineStatisticFlagBits::eClippingPrimitives) | VkFlags(QueryPipelineStatisticFlagBits::eFragmentShaderInvocations) | VkFlags(QueryPipelineStatisticFlagBits::eTessellationControlShaderPatches) | VkFlags(QueryPipelineStatisticFlagBits::eTessellationEvaluationShaderInvocations) | VkFlags(QueryPipelineStatisticFlagBits::eComputeShaderInvocations) + }; + }; + + struct CommandBufferInheritanceInfo + { + CommandBufferInheritanceInfo( RenderPass renderPass_ = RenderPass(), + uint32_t subpass_ = 0, + Framebuffer framebuffer_ = Framebuffer(), + Bool32 occlusionQueryEnable_ = 0, + QueryControlFlags queryFlags_ = QueryControlFlags(), + QueryPipelineStatisticFlags pipelineStatistics_ = QueryPipelineStatisticFlags() ) + : renderPass( renderPass_ ) + , subpass( subpass_ ) + , framebuffer( framebuffer_ ) + , occlusionQueryEnable( occlusionQueryEnable_ ) + , queryFlags( queryFlags_ ) + , pipelineStatistics( pipelineStatistics_ ) + { + } + + CommandBufferInheritanceInfo( VkCommandBufferInheritanceInfo const & rhs ) + { + memcpy( this, &rhs, sizeof( CommandBufferInheritanceInfo ) ); + } + + CommandBufferInheritanceInfo& operator=( VkCommandBufferInheritanceInfo const & rhs ) + { + memcpy( this, &rhs, sizeof( CommandBufferInheritanceInfo ) ); + return *this; + } + CommandBufferInheritanceInfo& setPNext( const void* pNext_ ) + { + pNext = pNext_; + return *this; + } + + CommandBufferInheritanceInfo& setRenderPass( RenderPass renderPass_ ) + { + renderPass = renderPass_; + return *this; + } + + CommandBufferInheritanceInfo& setSubpass( uint32_t subpass_ ) + { + subpass = subpass_; + return *this; + } + + CommandBufferInheritanceInfo& setFramebuffer( Framebuffer framebuffer_ ) + { + framebuffer = framebuffer_; + return *this; + } + + CommandBufferInheritanceInfo& setOcclusionQueryEnable( Bool32 occlusionQueryEnable_ ) + { + occlusionQueryEnable = occlusionQueryEnable_; + return *this; + } + + CommandBufferInheritanceInfo& setQueryFlags( QueryControlFlags queryFlags_ ) + { + queryFlags = queryFlags_; + return *this; + } + + CommandBufferInheritanceInfo& setPipelineStatistics( QueryPipelineStatisticFlags pipelineStatistics_ ) + { + pipelineStatistics = pipelineStatistics_; + return *this; + } + + operator VkCommandBufferInheritanceInfo const&() const + { + return *reinterpret_cast(this); + } + + operator VkCommandBufferInheritanceInfo &() + { + return *reinterpret_cast(this); + } + + bool operator==( CommandBufferInheritanceInfo const& rhs ) const + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( renderPass == rhs.renderPass ) + && ( subpass == rhs.subpass ) + && ( framebuffer == rhs.framebuffer ) + && ( occlusionQueryEnable == rhs.occlusionQueryEnable ) + && ( queryFlags == rhs.queryFlags ) + && ( pipelineStatistics == rhs.pipelineStatistics ); + } + + bool operator!=( CommandBufferInheritanceInfo const& rhs ) const + { + return !operator==( rhs ); + } + + private: + StructureType sType = StructureType::eCommandBufferInheritanceInfo; + + public: + const void* pNext = nullptr; + RenderPass renderPass; + uint32_t subpass; + Framebuffer framebuffer; + Bool32 occlusionQueryEnable; + QueryControlFlags queryFlags; + QueryPipelineStatisticFlags pipelineStatistics; + }; + static_assert( sizeof( CommandBufferInheritanceInfo ) == sizeof( VkCommandBufferInheritanceInfo ), "struct and wrapper have different size!" ); + + struct CommandBufferBeginInfo + { + CommandBufferBeginInfo( CommandBufferUsageFlags flags_ = CommandBufferUsageFlags(), + const CommandBufferInheritanceInfo* pInheritanceInfo_ = nullptr ) + : flags( flags_ ) + , pInheritanceInfo( pInheritanceInfo_ ) + { + } + + CommandBufferBeginInfo( VkCommandBufferBeginInfo const & rhs ) + { + memcpy( this, &rhs, sizeof( CommandBufferBeginInfo ) ); + } + + CommandBufferBeginInfo& operator=( VkCommandBufferBeginInfo const & rhs ) + { + memcpy( this, &rhs, sizeof( CommandBufferBeginInfo ) ); + return *this; + } + CommandBufferBeginInfo& setPNext( const void* pNext_ ) + { + pNext = pNext_; + return *this; + } + + CommandBufferBeginInfo& setFlags( CommandBufferUsageFlags flags_ ) + { + flags = flags_; + return *this; + } + + CommandBufferBeginInfo& setPInheritanceInfo( const CommandBufferInheritanceInfo* pInheritanceInfo_ ) + { + pInheritanceInfo = pInheritanceInfo_; + return *this; + } + + operator VkCommandBufferBeginInfo const&() const + { + return *reinterpret_cast(this); + } + + operator VkCommandBufferBeginInfo &() + { + return *reinterpret_cast(this); + } + + bool operator==( CommandBufferBeginInfo const& rhs ) const + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( flags == rhs.flags ) + && ( pInheritanceInfo == rhs.pInheritanceInfo ); + } + + bool operator!=( CommandBufferBeginInfo const& rhs ) const + { + return !operator==( rhs ); + } + + private: + StructureType sType = StructureType::eCommandBufferBeginInfo; + + public: + const void* pNext = nullptr; + CommandBufferUsageFlags flags; + const CommandBufferInheritanceInfo* pInheritanceInfo; + }; + static_assert( sizeof( CommandBufferBeginInfo ) == sizeof( VkCommandBufferBeginInfo ), "struct and wrapper have different size!" ); + + struct QueryPoolCreateInfo + { + QueryPoolCreateInfo( QueryPoolCreateFlags flags_ = QueryPoolCreateFlags(), + QueryType queryType_ = QueryType::eOcclusion, + uint32_t queryCount_ = 0, + QueryPipelineStatisticFlags pipelineStatistics_ = QueryPipelineStatisticFlags() ) + : flags( flags_ ) + , queryType( queryType_ ) + , queryCount( queryCount_ ) + , pipelineStatistics( pipelineStatistics_ ) + { + } + + QueryPoolCreateInfo( VkQueryPoolCreateInfo const & rhs ) + { + memcpy( this, &rhs, sizeof( QueryPoolCreateInfo ) ); + } + + QueryPoolCreateInfo& operator=( VkQueryPoolCreateInfo const & rhs ) + { + memcpy( this, &rhs, sizeof( QueryPoolCreateInfo ) ); + return *this; + } + QueryPoolCreateInfo& setPNext( const void* pNext_ ) + { + pNext = pNext_; + return *this; + } + + QueryPoolCreateInfo& setFlags( QueryPoolCreateFlags flags_ ) + { + flags = flags_; + return *this; + } + + QueryPoolCreateInfo& setQueryType( QueryType queryType_ ) + { + queryType = queryType_; + return *this; + } + + QueryPoolCreateInfo& setQueryCount( uint32_t queryCount_ ) + { + queryCount = queryCount_; + return *this; + } + + QueryPoolCreateInfo& setPipelineStatistics( QueryPipelineStatisticFlags pipelineStatistics_ ) + { + pipelineStatistics = pipelineStatistics_; + return *this; + } + + operator VkQueryPoolCreateInfo const&() const + { + return *reinterpret_cast(this); + } + + operator VkQueryPoolCreateInfo &() + { + return *reinterpret_cast(this); + } + + bool operator==( QueryPoolCreateInfo const& rhs ) const + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( flags == rhs.flags ) + && ( queryType == rhs.queryType ) + && ( queryCount == rhs.queryCount ) + && ( pipelineStatistics == rhs.pipelineStatistics ); + } + + bool operator!=( QueryPoolCreateInfo const& rhs ) const + { + return !operator==( rhs ); + } + + private: + StructureType sType = StructureType::eQueryPoolCreateInfo; + + public: + const void* pNext = nullptr; + QueryPoolCreateFlags flags; + QueryType queryType; + uint32_t queryCount; + QueryPipelineStatisticFlags pipelineStatistics; + }; + static_assert( sizeof( QueryPoolCreateInfo ) == sizeof( VkQueryPoolCreateInfo ), "struct and wrapper have different size!" ); + + enum class ImageAspectFlagBits + { + eColor = VK_IMAGE_ASPECT_COLOR_BIT, + eDepth = VK_IMAGE_ASPECT_DEPTH_BIT, + eStencil = VK_IMAGE_ASPECT_STENCIL_BIT, + eMetadata = VK_IMAGE_ASPECT_METADATA_BIT, + ePlane0 = VK_IMAGE_ASPECT_PLANE_0_BIT, + ePlane0KHR = VK_IMAGE_ASPECT_PLANE_0_BIT, + ePlane1 = VK_IMAGE_ASPECT_PLANE_1_BIT, + ePlane1KHR = VK_IMAGE_ASPECT_PLANE_1_BIT, + ePlane2 = VK_IMAGE_ASPECT_PLANE_2_BIT, + ePlane2KHR = VK_IMAGE_ASPECT_PLANE_2_BIT, + eMemoryPlane0EXT = VK_IMAGE_ASPECT_MEMORY_PLANE_0_BIT_EXT, + eMemoryPlane1EXT = VK_IMAGE_ASPECT_MEMORY_PLANE_1_BIT_EXT, + eMemoryPlane2EXT = VK_IMAGE_ASPECT_MEMORY_PLANE_2_BIT_EXT, + eMemoryPlane3EXT = VK_IMAGE_ASPECT_MEMORY_PLANE_3_BIT_EXT + }; + + using ImageAspectFlags = Flags; + + VULKAN_HPP_INLINE ImageAspectFlags operator|( ImageAspectFlagBits bit0, ImageAspectFlagBits bit1 ) + { + return ImageAspectFlags( bit0 ) | bit1; + } + + VULKAN_HPP_INLINE ImageAspectFlags operator~( ImageAspectFlagBits bits ) + { + return ~( ImageAspectFlags( bits ) ); + } + + template <> struct FlagTraits + { + enum + { + allFlags = VkFlags(ImageAspectFlagBits::eColor) | VkFlags(ImageAspectFlagBits::eDepth) | VkFlags(ImageAspectFlagBits::eStencil) | VkFlags(ImageAspectFlagBits::eMetadata) | VkFlags(ImageAspectFlagBits::ePlane0) | VkFlags(ImageAspectFlagBits::ePlane1) | VkFlags(ImageAspectFlagBits::ePlane2) | VkFlags(ImageAspectFlagBits::eMemoryPlane0EXT) | VkFlags(ImageAspectFlagBits::eMemoryPlane1EXT) | VkFlags(ImageAspectFlagBits::eMemoryPlane2EXT) | VkFlags(ImageAspectFlagBits::eMemoryPlane3EXT) + }; + }; + + struct ImageSubresource + { + ImageSubresource( ImageAspectFlags aspectMask_ = ImageAspectFlags(), + uint32_t mipLevel_ = 0, + uint32_t arrayLayer_ = 0 ) + : aspectMask( aspectMask_ ) + , mipLevel( mipLevel_ ) + , arrayLayer( arrayLayer_ ) + { + } + + ImageSubresource( VkImageSubresource const & rhs ) + { + memcpy( this, &rhs, sizeof( ImageSubresource ) ); + } + + ImageSubresource& operator=( VkImageSubresource const & rhs ) + { + memcpy( this, &rhs, sizeof( ImageSubresource ) ); + return *this; + } + ImageSubresource& setAspectMask( ImageAspectFlags aspectMask_ ) + { + aspectMask = aspectMask_; + return *this; + } + + ImageSubresource& setMipLevel( uint32_t mipLevel_ ) + { + mipLevel = mipLevel_; + return *this; + } + + ImageSubresource& setArrayLayer( uint32_t arrayLayer_ ) + { + arrayLayer = arrayLayer_; + return *this; + } + + operator VkImageSubresource const&() const + { + return *reinterpret_cast(this); + } + + operator VkImageSubresource &() + { + return *reinterpret_cast(this); + } + + bool operator==( ImageSubresource const& rhs ) const + { + return ( aspectMask == rhs.aspectMask ) + && ( mipLevel == rhs.mipLevel ) + && ( arrayLayer == rhs.arrayLayer ); + } + + bool operator!=( ImageSubresource const& rhs ) const + { + return !operator==( rhs ); + } + + ImageAspectFlags aspectMask; + uint32_t mipLevel; + uint32_t arrayLayer; + }; + static_assert( sizeof( ImageSubresource ) == sizeof( VkImageSubresource ), "struct and wrapper have different size!" ); + + struct ImageSubresourceLayers + { + ImageSubresourceLayers( ImageAspectFlags aspectMask_ = ImageAspectFlags(), + uint32_t mipLevel_ = 0, + uint32_t baseArrayLayer_ = 0, + uint32_t layerCount_ = 0 ) + : aspectMask( aspectMask_ ) + , mipLevel( mipLevel_ ) + , baseArrayLayer( baseArrayLayer_ ) + , layerCount( layerCount_ ) + { + } + + ImageSubresourceLayers( VkImageSubresourceLayers const & rhs ) + { + memcpy( this, &rhs, sizeof( ImageSubresourceLayers ) ); + } + + ImageSubresourceLayers& operator=( VkImageSubresourceLayers const & rhs ) + { + memcpy( this, &rhs, sizeof( ImageSubresourceLayers ) ); + return *this; + } + ImageSubresourceLayers& setAspectMask( ImageAspectFlags aspectMask_ ) + { + aspectMask = aspectMask_; + return *this; + } + + ImageSubresourceLayers& setMipLevel( uint32_t mipLevel_ ) + { + mipLevel = mipLevel_; + return *this; + } + + ImageSubresourceLayers& setBaseArrayLayer( uint32_t baseArrayLayer_ ) + { + baseArrayLayer = baseArrayLayer_; + return *this; + } + + ImageSubresourceLayers& setLayerCount( uint32_t layerCount_ ) + { + layerCount = layerCount_; + return *this; + } + + operator VkImageSubresourceLayers const&() const + { + return *reinterpret_cast(this); + } + + operator VkImageSubresourceLayers &() + { + return *reinterpret_cast(this); + } + + bool operator==( ImageSubresourceLayers const& rhs ) const + { + return ( aspectMask == rhs.aspectMask ) + && ( mipLevel == rhs.mipLevel ) + && ( baseArrayLayer == rhs.baseArrayLayer ) + && ( layerCount == rhs.layerCount ); + } + + bool operator!=( ImageSubresourceLayers const& rhs ) const + { + return !operator==( rhs ); + } + + ImageAspectFlags aspectMask; + uint32_t mipLevel; + uint32_t baseArrayLayer; + uint32_t layerCount; + }; + static_assert( sizeof( ImageSubresourceLayers ) == sizeof( VkImageSubresourceLayers ), "struct and wrapper have different size!" ); + + struct ImageSubresourceRange + { + ImageSubresourceRange( ImageAspectFlags aspectMask_ = ImageAspectFlags(), + uint32_t baseMipLevel_ = 0, + uint32_t levelCount_ = 0, + uint32_t baseArrayLayer_ = 0, + uint32_t layerCount_ = 0 ) + : aspectMask( aspectMask_ ) + , baseMipLevel( baseMipLevel_ ) + , levelCount( levelCount_ ) + , baseArrayLayer( baseArrayLayer_ ) + , layerCount( layerCount_ ) + { + } + + ImageSubresourceRange( VkImageSubresourceRange const & rhs ) + { + memcpy( this, &rhs, sizeof( ImageSubresourceRange ) ); + } + + ImageSubresourceRange& operator=( VkImageSubresourceRange const & rhs ) + { + memcpy( this, &rhs, sizeof( ImageSubresourceRange ) ); + return *this; + } + ImageSubresourceRange& setAspectMask( ImageAspectFlags aspectMask_ ) + { + aspectMask = aspectMask_; + return *this; + } + + ImageSubresourceRange& setBaseMipLevel( uint32_t baseMipLevel_ ) + { + baseMipLevel = baseMipLevel_; + return *this; + } + + ImageSubresourceRange& setLevelCount( uint32_t levelCount_ ) + { + levelCount = levelCount_; + return *this; + } + + ImageSubresourceRange& setBaseArrayLayer( uint32_t baseArrayLayer_ ) + { + baseArrayLayer = baseArrayLayer_; + return *this; + } + + ImageSubresourceRange& setLayerCount( uint32_t layerCount_ ) + { + layerCount = layerCount_; + return *this; + } + + operator VkImageSubresourceRange const&() const + { + return *reinterpret_cast(this); + } + + operator VkImageSubresourceRange &() + { + return *reinterpret_cast(this); + } + + bool operator==( ImageSubresourceRange const& rhs ) const + { + return ( aspectMask == rhs.aspectMask ) + && ( baseMipLevel == rhs.baseMipLevel ) + && ( levelCount == rhs.levelCount ) + && ( baseArrayLayer == rhs.baseArrayLayer ) + && ( layerCount == rhs.layerCount ); + } + + bool operator!=( ImageSubresourceRange const& rhs ) const + { + return !operator==( rhs ); + } + + ImageAspectFlags aspectMask; + uint32_t baseMipLevel; + uint32_t levelCount; + uint32_t baseArrayLayer; + uint32_t layerCount; + }; + static_assert( sizeof( ImageSubresourceRange ) == sizeof( VkImageSubresourceRange ), "struct and wrapper have different size!" ); + + struct ImageMemoryBarrier + { + ImageMemoryBarrier( AccessFlags srcAccessMask_ = AccessFlags(), + AccessFlags dstAccessMask_ = AccessFlags(), + ImageLayout oldLayout_ = ImageLayout::eUndefined, + ImageLayout newLayout_ = ImageLayout::eUndefined, + uint32_t srcQueueFamilyIndex_ = 0, + uint32_t dstQueueFamilyIndex_ = 0, + Image image_ = Image(), + ImageSubresourceRange subresourceRange_ = ImageSubresourceRange() ) + : srcAccessMask( srcAccessMask_ ) + , dstAccessMask( dstAccessMask_ ) + , oldLayout( oldLayout_ ) + , newLayout( newLayout_ ) + , srcQueueFamilyIndex( srcQueueFamilyIndex_ ) + , dstQueueFamilyIndex( dstQueueFamilyIndex_ ) + , image( image_ ) + , subresourceRange( subresourceRange_ ) + { + } + + ImageMemoryBarrier( VkImageMemoryBarrier const & rhs ) + { + memcpy( this, &rhs, sizeof( ImageMemoryBarrier ) ); + } + + ImageMemoryBarrier& operator=( VkImageMemoryBarrier const & rhs ) + { + memcpy( this, &rhs, sizeof( ImageMemoryBarrier ) ); + return *this; + } + ImageMemoryBarrier& setPNext( const void* pNext_ ) + { + pNext = pNext_; + return *this; + } + + ImageMemoryBarrier& setSrcAccessMask( AccessFlags srcAccessMask_ ) + { + srcAccessMask = srcAccessMask_; + return *this; + } + + ImageMemoryBarrier& setDstAccessMask( AccessFlags dstAccessMask_ ) + { + dstAccessMask = dstAccessMask_; + return *this; + } + + ImageMemoryBarrier& setOldLayout( ImageLayout oldLayout_ ) + { + oldLayout = oldLayout_; + return *this; + } + + ImageMemoryBarrier& setNewLayout( ImageLayout newLayout_ ) + { + newLayout = newLayout_; + return *this; + } + + ImageMemoryBarrier& setSrcQueueFamilyIndex( uint32_t srcQueueFamilyIndex_ ) + { + srcQueueFamilyIndex = srcQueueFamilyIndex_; + return *this; + } + + ImageMemoryBarrier& setDstQueueFamilyIndex( uint32_t dstQueueFamilyIndex_ ) + { + dstQueueFamilyIndex = dstQueueFamilyIndex_; + return *this; + } + + ImageMemoryBarrier& setImage( Image image_ ) + { + image = image_; + return *this; + } + + ImageMemoryBarrier& setSubresourceRange( ImageSubresourceRange subresourceRange_ ) + { + subresourceRange = subresourceRange_; + return *this; + } + + operator VkImageMemoryBarrier const&() const + { + return *reinterpret_cast(this); + } + + operator VkImageMemoryBarrier &() + { + return *reinterpret_cast(this); + } + + bool operator==( ImageMemoryBarrier const& rhs ) const + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( srcAccessMask == rhs.srcAccessMask ) + && ( dstAccessMask == rhs.dstAccessMask ) + && ( oldLayout == rhs.oldLayout ) + && ( newLayout == rhs.newLayout ) + && ( srcQueueFamilyIndex == rhs.srcQueueFamilyIndex ) + && ( dstQueueFamilyIndex == rhs.dstQueueFamilyIndex ) + && ( image == rhs.image ) + && ( subresourceRange == rhs.subresourceRange ); + } + + bool operator!=( ImageMemoryBarrier const& rhs ) const + { + return !operator==( rhs ); + } + + private: + StructureType sType = StructureType::eImageMemoryBarrier; + + public: + const void* pNext = nullptr; + AccessFlags srcAccessMask; + AccessFlags dstAccessMask; + ImageLayout oldLayout; + ImageLayout newLayout; + uint32_t srcQueueFamilyIndex; + uint32_t dstQueueFamilyIndex; + Image image; + ImageSubresourceRange subresourceRange; + }; + static_assert( sizeof( ImageMemoryBarrier ) == sizeof( VkImageMemoryBarrier ), "struct and wrapper have different size!" ); + + struct ImageViewCreateInfo + { + ImageViewCreateInfo( ImageViewCreateFlags flags_ = ImageViewCreateFlags(), + Image image_ = Image(), + ImageViewType viewType_ = ImageViewType::e1D, + Format format_ = Format::eUndefined, + ComponentMapping components_ = ComponentMapping(), + ImageSubresourceRange subresourceRange_ = ImageSubresourceRange() ) + : flags( flags_ ) + , image( image_ ) + , viewType( viewType_ ) + , format( format_ ) + , components( components_ ) + , subresourceRange( subresourceRange_ ) + { + } + + ImageViewCreateInfo( VkImageViewCreateInfo const & rhs ) + { + memcpy( this, &rhs, sizeof( ImageViewCreateInfo ) ); + } + + ImageViewCreateInfo& operator=( VkImageViewCreateInfo const & rhs ) + { + memcpy( this, &rhs, sizeof( ImageViewCreateInfo ) ); + return *this; + } + ImageViewCreateInfo& setPNext( const void* pNext_ ) + { + pNext = pNext_; + return *this; + } + + ImageViewCreateInfo& setFlags( ImageViewCreateFlags flags_ ) + { + flags = flags_; + return *this; + } + + ImageViewCreateInfo& setImage( Image image_ ) + { + image = image_; + return *this; + } + + ImageViewCreateInfo& setViewType( ImageViewType viewType_ ) + { + viewType = viewType_; + return *this; + } + + ImageViewCreateInfo& setFormat( Format format_ ) + { + format = format_; + return *this; + } + + ImageViewCreateInfo& setComponents( ComponentMapping components_ ) + { + components = components_; + return *this; + } + + ImageViewCreateInfo& setSubresourceRange( ImageSubresourceRange subresourceRange_ ) + { + subresourceRange = subresourceRange_; + return *this; + } + + operator VkImageViewCreateInfo const&() const + { + return *reinterpret_cast(this); + } + + operator VkImageViewCreateInfo &() + { + return *reinterpret_cast(this); + } + + bool operator==( ImageViewCreateInfo const& rhs ) const + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( flags == rhs.flags ) + && ( image == rhs.image ) + && ( viewType == rhs.viewType ) + && ( format == rhs.format ) + && ( components == rhs.components ) + && ( subresourceRange == rhs.subresourceRange ); + } + + bool operator!=( ImageViewCreateInfo const& rhs ) const + { + return !operator==( rhs ); + } + + private: + StructureType sType = StructureType::eImageViewCreateInfo; + + public: + const void* pNext = nullptr; + ImageViewCreateFlags flags; + Image image; + ImageViewType viewType; + Format format; + ComponentMapping components; + ImageSubresourceRange subresourceRange; + }; + static_assert( sizeof( ImageViewCreateInfo ) == sizeof( VkImageViewCreateInfo ), "struct and wrapper have different size!" ); + + struct ImageCopy + { + ImageCopy( ImageSubresourceLayers srcSubresource_ = ImageSubresourceLayers(), + Offset3D srcOffset_ = Offset3D(), + ImageSubresourceLayers dstSubresource_ = ImageSubresourceLayers(), + Offset3D dstOffset_ = Offset3D(), + Extent3D extent_ = Extent3D() ) + : srcSubresource( srcSubresource_ ) + , srcOffset( srcOffset_ ) + , dstSubresource( dstSubresource_ ) + , dstOffset( dstOffset_ ) + , extent( extent_ ) + { + } + + ImageCopy( VkImageCopy const & rhs ) + { + memcpy( this, &rhs, sizeof( ImageCopy ) ); + } + + ImageCopy& operator=( VkImageCopy const & rhs ) + { + memcpy( this, &rhs, sizeof( ImageCopy ) ); + return *this; + } + ImageCopy& setSrcSubresource( ImageSubresourceLayers srcSubresource_ ) + { + srcSubresource = srcSubresource_; + return *this; + } + + ImageCopy& setSrcOffset( Offset3D srcOffset_ ) + { + srcOffset = srcOffset_; + return *this; + } + + ImageCopy& setDstSubresource( ImageSubresourceLayers dstSubresource_ ) + { + dstSubresource = dstSubresource_; + return *this; + } + + ImageCopy& setDstOffset( Offset3D dstOffset_ ) + { + dstOffset = dstOffset_; + return *this; + } + + ImageCopy& setExtent( Extent3D extent_ ) + { + extent = extent_; + return *this; + } + + operator VkImageCopy const&() const + { + return *reinterpret_cast(this); + } + + operator VkImageCopy &() + { + return *reinterpret_cast(this); + } + + bool operator==( ImageCopy const& rhs ) const + { + return ( srcSubresource == rhs.srcSubresource ) + && ( srcOffset == rhs.srcOffset ) + && ( dstSubresource == rhs.dstSubresource ) + && ( dstOffset == rhs.dstOffset ) + && ( extent == rhs.extent ); + } + + bool operator!=( ImageCopy const& rhs ) const + { + return !operator==( rhs ); + } + + ImageSubresourceLayers srcSubresource; + Offset3D srcOffset; + ImageSubresourceLayers dstSubresource; + Offset3D dstOffset; + Extent3D extent; + }; + static_assert( sizeof( ImageCopy ) == sizeof( VkImageCopy ), "struct and wrapper have different size!" ); + + struct ImageBlit + { + ImageBlit( ImageSubresourceLayers srcSubresource_ = ImageSubresourceLayers(), + std::array const& srcOffsets_ = { { Offset3D(), Offset3D() } }, + ImageSubresourceLayers dstSubresource_ = ImageSubresourceLayers(), + std::array const& dstOffsets_ = { { Offset3D(), Offset3D() } } ) + : srcSubresource( srcSubresource_ ) + , dstSubresource( dstSubresource_ ) + { + memcpy( &srcOffsets, srcOffsets_.data(), 2 * sizeof( Offset3D ) ); + memcpy( &dstOffsets, dstOffsets_.data(), 2 * sizeof( Offset3D ) ); + } + + ImageBlit( VkImageBlit const & rhs ) + { + memcpy( this, &rhs, sizeof( ImageBlit ) ); + } + + ImageBlit& operator=( VkImageBlit const & rhs ) + { + memcpy( this, &rhs, sizeof( ImageBlit ) ); + return *this; + } + ImageBlit& setSrcSubresource( ImageSubresourceLayers srcSubresource_ ) + { + srcSubresource = srcSubresource_; + return *this; + } + + ImageBlit& setSrcOffsets( std::array srcOffsets_ ) + { + memcpy( &srcOffsets, srcOffsets_.data(), 2 * sizeof( Offset3D ) ); + return *this; + } + + ImageBlit& setDstSubresource( ImageSubresourceLayers dstSubresource_ ) + { + dstSubresource = dstSubresource_; + return *this; + } + + ImageBlit& setDstOffsets( std::array dstOffsets_ ) + { + memcpy( &dstOffsets, dstOffsets_.data(), 2 * sizeof( Offset3D ) ); + return *this; + } + + operator VkImageBlit const&() const + { + return *reinterpret_cast(this); + } + + operator VkImageBlit &() + { + return *reinterpret_cast(this); + } + + bool operator==( ImageBlit const& rhs ) const + { + return ( srcSubresource == rhs.srcSubresource ) + && ( memcmp( srcOffsets, rhs.srcOffsets, 2 * sizeof( Offset3D ) ) == 0 ) + && ( dstSubresource == rhs.dstSubresource ) + && ( memcmp( dstOffsets, rhs.dstOffsets, 2 * sizeof( Offset3D ) ) == 0 ); + } + + bool operator!=( ImageBlit const& rhs ) const + { + return !operator==( rhs ); + } + + ImageSubresourceLayers srcSubresource; + Offset3D srcOffsets[2]; + ImageSubresourceLayers dstSubresource; + Offset3D dstOffsets[2]; + }; + static_assert( sizeof( ImageBlit ) == sizeof( VkImageBlit ), "struct and wrapper have different size!" ); + + struct BufferImageCopy + { + BufferImageCopy( DeviceSize bufferOffset_ = 0, + uint32_t bufferRowLength_ = 0, + uint32_t bufferImageHeight_ = 0, + ImageSubresourceLayers imageSubresource_ = ImageSubresourceLayers(), + Offset3D imageOffset_ = Offset3D(), + Extent3D imageExtent_ = Extent3D() ) + : bufferOffset( bufferOffset_ ) + , bufferRowLength( bufferRowLength_ ) + , bufferImageHeight( bufferImageHeight_ ) + , imageSubresource( imageSubresource_ ) + , imageOffset( imageOffset_ ) + , imageExtent( imageExtent_ ) + { + } + + BufferImageCopy( VkBufferImageCopy const & rhs ) + { + memcpy( this, &rhs, sizeof( BufferImageCopy ) ); + } + + BufferImageCopy& operator=( VkBufferImageCopy const & rhs ) + { + memcpy( this, &rhs, sizeof( BufferImageCopy ) ); + return *this; + } + BufferImageCopy& setBufferOffset( DeviceSize bufferOffset_ ) + { + bufferOffset = bufferOffset_; + return *this; + } + + BufferImageCopy& setBufferRowLength( uint32_t bufferRowLength_ ) + { + bufferRowLength = bufferRowLength_; + return *this; + } + + BufferImageCopy& setBufferImageHeight( uint32_t bufferImageHeight_ ) + { + bufferImageHeight = bufferImageHeight_; + return *this; + } + + BufferImageCopy& setImageSubresource( ImageSubresourceLayers imageSubresource_ ) + { + imageSubresource = imageSubresource_; + return *this; + } + + BufferImageCopy& setImageOffset( Offset3D imageOffset_ ) + { + imageOffset = imageOffset_; + return *this; + } + + BufferImageCopy& setImageExtent( Extent3D imageExtent_ ) + { + imageExtent = imageExtent_; + return *this; + } + + operator VkBufferImageCopy const&() const + { + return *reinterpret_cast(this); + } + + operator VkBufferImageCopy &() + { + return *reinterpret_cast(this); + } + + bool operator==( BufferImageCopy const& rhs ) const + { + return ( bufferOffset == rhs.bufferOffset ) + && ( bufferRowLength == rhs.bufferRowLength ) + && ( bufferImageHeight == rhs.bufferImageHeight ) + && ( imageSubresource == rhs.imageSubresource ) + && ( imageOffset == rhs.imageOffset ) + && ( imageExtent == rhs.imageExtent ); + } + + bool operator!=( BufferImageCopy const& rhs ) const + { + return !operator==( rhs ); + } + + DeviceSize bufferOffset; + uint32_t bufferRowLength; + uint32_t bufferImageHeight; + ImageSubresourceLayers imageSubresource; + Offset3D imageOffset; + Extent3D imageExtent; + }; + static_assert( sizeof( BufferImageCopy ) == sizeof( VkBufferImageCopy ), "struct and wrapper have different size!" ); + + struct ImageResolve + { + ImageResolve( ImageSubresourceLayers srcSubresource_ = ImageSubresourceLayers(), + Offset3D srcOffset_ = Offset3D(), + ImageSubresourceLayers dstSubresource_ = ImageSubresourceLayers(), + Offset3D dstOffset_ = Offset3D(), + Extent3D extent_ = Extent3D() ) + : srcSubresource( srcSubresource_ ) + , srcOffset( srcOffset_ ) + , dstSubresource( dstSubresource_ ) + , dstOffset( dstOffset_ ) + , extent( extent_ ) + { + } + + ImageResolve( VkImageResolve const & rhs ) + { + memcpy( this, &rhs, sizeof( ImageResolve ) ); + } + + ImageResolve& operator=( VkImageResolve const & rhs ) + { + memcpy( this, &rhs, sizeof( ImageResolve ) ); + return *this; + } + ImageResolve& setSrcSubresource( ImageSubresourceLayers srcSubresource_ ) + { + srcSubresource = srcSubresource_; + return *this; + } + + ImageResolve& setSrcOffset( Offset3D srcOffset_ ) + { + srcOffset = srcOffset_; + return *this; + } + + ImageResolve& setDstSubresource( ImageSubresourceLayers dstSubresource_ ) + { + dstSubresource = dstSubresource_; + return *this; + } + + ImageResolve& setDstOffset( Offset3D dstOffset_ ) + { + dstOffset = dstOffset_; + return *this; + } + + ImageResolve& setExtent( Extent3D extent_ ) + { + extent = extent_; + return *this; + } + + operator VkImageResolve const&() const + { + return *reinterpret_cast(this); + } + + operator VkImageResolve &() + { + return *reinterpret_cast(this); + } + + bool operator==( ImageResolve const& rhs ) const + { + return ( srcSubresource == rhs.srcSubresource ) + && ( srcOffset == rhs.srcOffset ) + && ( dstSubresource == rhs.dstSubresource ) + && ( dstOffset == rhs.dstOffset ) + && ( extent == rhs.extent ); + } + + bool operator!=( ImageResolve const& rhs ) const + { + return !operator==( rhs ); + } + + ImageSubresourceLayers srcSubresource; + Offset3D srcOffset; + ImageSubresourceLayers dstSubresource; + Offset3D dstOffset; + Extent3D extent; + }; + static_assert( sizeof( ImageResolve ) == sizeof( VkImageResolve ), "struct and wrapper have different size!" ); + + struct ClearAttachment + { + ClearAttachment( ImageAspectFlags aspectMask_ = ImageAspectFlags(), + uint32_t colorAttachment_ = 0, + ClearValue clearValue_ = ClearValue() ) + : aspectMask( aspectMask_ ) + , colorAttachment( colorAttachment_ ) + , clearValue( clearValue_ ) + { + } + + ClearAttachment( VkClearAttachment const & rhs ) + { + memcpy( this, &rhs, sizeof( ClearAttachment ) ); + } + + ClearAttachment& operator=( VkClearAttachment const & rhs ) + { + memcpy( this, &rhs, sizeof( ClearAttachment ) ); + return *this; + } + ClearAttachment& setAspectMask( ImageAspectFlags aspectMask_ ) + { + aspectMask = aspectMask_; + return *this; + } + + ClearAttachment& setColorAttachment( uint32_t colorAttachment_ ) + { + colorAttachment = colorAttachment_; + return *this; + } + + ClearAttachment& setClearValue( ClearValue clearValue_ ) + { + clearValue = clearValue_; + return *this; + } + + operator VkClearAttachment const&() const + { + return *reinterpret_cast(this); + } + + operator VkClearAttachment &() + { + return *reinterpret_cast(this); + } + + ImageAspectFlags aspectMask; + uint32_t colorAttachment; + ClearValue clearValue; + }; + static_assert( sizeof( ClearAttachment ) == sizeof( VkClearAttachment ), "struct and wrapper have different size!" ); + + struct InputAttachmentAspectReference + { + InputAttachmentAspectReference( uint32_t subpass_ = 0, + uint32_t inputAttachmentIndex_ = 0, + ImageAspectFlags aspectMask_ = ImageAspectFlags() ) + : subpass( subpass_ ) + , inputAttachmentIndex( inputAttachmentIndex_ ) + , aspectMask( aspectMask_ ) + { + } + + InputAttachmentAspectReference( VkInputAttachmentAspectReference const & rhs ) + { + memcpy( this, &rhs, sizeof( InputAttachmentAspectReference ) ); + } + + InputAttachmentAspectReference& operator=( VkInputAttachmentAspectReference const & rhs ) + { + memcpy( this, &rhs, sizeof( InputAttachmentAspectReference ) ); + return *this; + } + InputAttachmentAspectReference& setSubpass( uint32_t subpass_ ) + { + subpass = subpass_; + return *this; + } + + InputAttachmentAspectReference& setInputAttachmentIndex( uint32_t inputAttachmentIndex_ ) + { + inputAttachmentIndex = inputAttachmentIndex_; + return *this; + } + + InputAttachmentAspectReference& setAspectMask( ImageAspectFlags aspectMask_ ) + { + aspectMask = aspectMask_; + return *this; + } + + operator VkInputAttachmentAspectReference const&() const + { + return *reinterpret_cast(this); + } + + operator VkInputAttachmentAspectReference &() + { + return *reinterpret_cast(this); + } + + bool operator==( InputAttachmentAspectReference const& rhs ) const + { + return ( subpass == rhs.subpass ) + && ( inputAttachmentIndex == rhs.inputAttachmentIndex ) + && ( aspectMask == rhs.aspectMask ); + } + + bool operator!=( InputAttachmentAspectReference const& rhs ) const + { + return !operator==( rhs ); + } + + uint32_t subpass; + uint32_t inputAttachmentIndex; + ImageAspectFlags aspectMask; + }; + static_assert( sizeof( InputAttachmentAspectReference ) == sizeof( VkInputAttachmentAspectReference ), "struct and wrapper have different size!" ); + + using InputAttachmentAspectReferenceKHR = InputAttachmentAspectReference; + + struct RenderPassInputAttachmentAspectCreateInfo + { + RenderPassInputAttachmentAspectCreateInfo( uint32_t aspectReferenceCount_ = 0, + const InputAttachmentAspectReference* pAspectReferences_ = nullptr ) + : aspectReferenceCount( aspectReferenceCount_ ) + , pAspectReferences( pAspectReferences_ ) + { + } + + RenderPassInputAttachmentAspectCreateInfo( VkRenderPassInputAttachmentAspectCreateInfo const & rhs ) + { + memcpy( this, &rhs, sizeof( RenderPassInputAttachmentAspectCreateInfo ) ); + } + + RenderPassInputAttachmentAspectCreateInfo& operator=( VkRenderPassInputAttachmentAspectCreateInfo const & rhs ) + { + memcpy( this, &rhs, sizeof( RenderPassInputAttachmentAspectCreateInfo ) ); + return *this; + } + RenderPassInputAttachmentAspectCreateInfo& setPNext( const void* pNext_ ) + { + pNext = pNext_; + return *this; + } + + RenderPassInputAttachmentAspectCreateInfo& setAspectReferenceCount( uint32_t aspectReferenceCount_ ) + { + aspectReferenceCount = aspectReferenceCount_; + return *this; + } + + RenderPassInputAttachmentAspectCreateInfo& setPAspectReferences( const InputAttachmentAspectReference* pAspectReferences_ ) + { + pAspectReferences = pAspectReferences_; + return *this; + } + + operator VkRenderPassInputAttachmentAspectCreateInfo const&() const + { + return *reinterpret_cast(this); + } + + operator VkRenderPassInputAttachmentAspectCreateInfo &() + { + return *reinterpret_cast(this); + } + + bool operator==( RenderPassInputAttachmentAspectCreateInfo const& rhs ) const + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( aspectReferenceCount == rhs.aspectReferenceCount ) + && ( pAspectReferences == rhs.pAspectReferences ); + } + + bool operator!=( RenderPassInputAttachmentAspectCreateInfo const& rhs ) const + { + return !operator==( rhs ); + } + + private: + StructureType sType = StructureType::eRenderPassInputAttachmentAspectCreateInfo; + + public: + const void* pNext = nullptr; + uint32_t aspectReferenceCount; + const InputAttachmentAspectReference* pAspectReferences; + }; + static_assert( sizeof( RenderPassInputAttachmentAspectCreateInfo ) == sizeof( VkRenderPassInputAttachmentAspectCreateInfo ), "struct and wrapper have different size!" ); + + using RenderPassInputAttachmentAspectCreateInfoKHR = RenderPassInputAttachmentAspectCreateInfo; + + struct BindImagePlaneMemoryInfo + { + BindImagePlaneMemoryInfo( ImageAspectFlagBits planeAspect_ = ImageAspectFlagBits::eColor ) + : planeAspect( planeAspect_ ) + { + } + + BindImagePlaneMemoryInfo( VkBindImagePlaneMemoryInfo const & rhs ) + { + memcpy( this, &rhs, sizeof( BindImagePlaneMemoryInfo ) ); + } + + BindImagePlaneMemoryInfo& operator=( VkBindImagePlaneMemoryInfo const & rhs ) + { + memcpy( this, &rhs, sizeof( BindImagePlaneMemoryInfo ) ); + return *this; + } + BindImagePlaneMemoryInfo& setPNext( const void* pNext_ ) + { + pNext = pNext_; + return *this; + } + + BindImagePlaneMemoryInfo& setPlaneAspect( ImageAspectFlagBits planeAspect_ ) + { + planeAspect = planeAspect_; + return *this; + } + + operator VkBindImagePlaneMemoryInfo const&() const + { + return *reinterpret_cast(this); + } + + operator VkBindImagePlaneMemoryInfo &() + { + return *reinterpret_cast(this); + } + + bool operator==( BindImagePlaneMemoryInfo const& rhs ) const + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( planeAspect == rhs.planeAspect ); + } + + bool operator!=( BindImagePlaneMemoryInfo const& rhs ) const + { + return !operator==( rhs ); + } + + private: + StructureType sType = StructureType::eBindImagePlaneMemoryInfo; + + public: + const void* pNext = nullptr; + ImageAspectFlagBits planeAspect; + }; + static_assert( sizeof( BindImagePlaneMemoryInfo ) == sizeof( VkBindImagePlaneMemoryInfo ), "struct and wrapper have different size!" ); + + using BindImagePlaneMemoryInfoKHR = BindImagePlaneMemoryInfo; + + struct ImagePlaneMemoryRequirementsInfo + { + ImagePlaneMemoryRequirementsInfo( ImageAspectFlagBits planeAspect_ = ImageAspectFlagBits::eColor ) + : planeAspect( planeAspect_ ) + { + } + + ImagePlaneMemoryRequirementsInfo( VkImagePlaneMemoryRequirementsInfo const & rhs ) + { + memcpy( this, &rhs, sizeof( ImagePlaneMemoryRequirementsInfo ) ); + } + + ImagePlaneMemoryRequirementsInfo& operator=( VkImagePlaneMemoryRequirementsInfo const & rhs ) + { + memcpy( this, &rhs, sizeof( ImagePlaneMemoryRequirementsInfo ) ); + return *this; + } + ImagePlaneMemoryRequirementsInfo& setPNext( const void* pNext_ ) + { + pNext = pNext_; + return *this; + } + + ImagePlaneMemoryRequirementsInfo& setPlaneAspect( ImageAspectFlagBits planeAspect_ ) + { + planeAspect = planeAspect_; + return *this; + } + + operator VkImagePlaneMemoryRequirementsInfo const&() const + { + return *reinterpret_cast(this); + } + + operator VkImagePlaneMemoryRequirementsInfo &() + { + return *reinterpret_cast(this); + } + + bool operator==( ImagePlaneMemoryRequirementsInfo const& rhs ) const + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( planeAspect == rhs.planeAspect ); + } + + bool operator!=( ImagePlaneMemoryRequirementsInfo const& rhs ) const + { + return !operator==( rhs ); + } + + private: + StructureType sType = StructureType::eImagePlaneMemoryRequirementsInfo; + + public: + const void* pNext = nullptr; + ImageAspectFlagBits planeAspect; + }; + static_assert( sizeof( ImagePlaneMemoryRequirementsInfo ) == sizeof( VkImagePlaneMemoryRequirementsInfo ), "struct and wrapper have different size!" ); + + using ImagePlaneMemoryRequirementsInfoKHR = ImagePlaneMemoryRequirementsInfo; + + struct AttachmentReference2KHR + { + AttachmentReference2KHR( uint32_t attachment_ = 0, + ImageLayout layout_ = ImageLayout::eUndefined, + ImageAspectFlags aspectMask_ = ImageAspectFlags() ) + : attachment( attachment_ ) + , layout( layout_ ) + , aspectMask( aspectMask_ ) + { + } + + AttachmentReference2KHR( VkAttachmentReference2KHR const & rhs ) + { + memcpy( this, &rhs, sizeof( AttachmentReference2KHR ) ); + } + + AttachmentReference2KHR& operator=( VkAttachmentReference2KHR const & rhs ) + { + memcpy( this, &rhs, sizeof( AttachmentReference2KHR ) ); + return *this; + } + AttachmentReference2KHR& setPNext( const void* pNext_ ) + { + pNext = pNext_; + return *this; + } + + AttachmentReference2KHR& setAttachment( uint32_t attachment_ ) + { + attachment = attachment_; + return *this; + } + + AttachmentReference2KHR& setLayout( ImageLayout layout_ ) + { + layout = layout_; + return *this; + } + + AttachmentReference2KHR& setAspectMask( ImageAspectFlags aspectMask_ ) + { + aspectMask = aspectMask_; + return *this; + } + + operator VkAttachmentReference2KHR const&() const + { + return *reinterpret_cast(this); + } + + operator VkAttachmentReference2KHR &() + { + return *reinterpret_cast(this); + } + + bool operator==( AttachmentReference2KHR const& rhs ) const + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( attachment == rhs.attachment ) + && ( layout == rhs.layout ) + && ( aspectMask == rhs.aspectMask ); + } + + bool operator!=( AttachmentReference2KHR const& rhs ) const + { + return !operator==( rhs ); + } + + private: + StructureType sType = StructureType::eAttachmentReference2KHR; + + public: + const void* pNext = nullptr; + uint32_t attachment; + ImageLayout layout; + ImageAspectFlags aspectMask; + }; + static_assert( sizeof( AttachmentReference2KHR ) == sizeof( VkAttachmentReference2KHR ), "struct and wrapper have different size!" ); + + enum class SparseImageFormatFlagBits + { + eSingleMiptail = VK_SPARSE_IMAGE_FORMAT_SINGLE_MIPTAIL_BIT, + eAlignedMipSize = VK_SPARSE_IMAGE_FORMAT_ALIGNED_MIP_SIZE_BIT, + eNonstandardBlockSize = VK_SPARSE_IMAGE_FORMAT_NONSTANDARD_BLOCK_SIZE_BIT + }; + + using SparseImageFormatFlags = Flags; + + VULKAN_HPP_INLINE SparseImageFormatFlags operator|( SparseImageFormatFlagBits bit0, SparseImageFormatFlagBits bit1 ) + { + return SparseImageFormatFlags( bit0 ) | bit1; + } + + VULKAN_HPP_INLINE SparseImageFormatFlags operator~( SparseImageFormatFlagBits bits ) + { + return ~( SparseImageFormatFlags( bits ) ); + } + + template <> struct FlagTraits + { + enum + { + allFlags = VkFlags(SparseImageFormatFlagBits::eSingleMiptail) | VkFlags(SparseImageFormatFlagBits::eAlignedMipSize) | VkFlags(SparseImageFormatFlagBits::eNonstandardBlockSize) + }; + }; + + struct SparseImageFormatProperties + { + operator VkSparseImageFormatProperties const&() const + { + return *reinterpret_cast(this); + } + + operator VkSparseImageFormatProperties &() + { + return *reinterpret_cast(this); + } + + bool operator==( SparseImageFormatProperties const& rhs ) const + { + return ( aspectMask == rhs.aspectMask ) + && ( imageGranularity == rhs.imageGranularity ) + && ( flags == rhs.flags ); + } + + bool operator!=( SparseImageFormatProperties const& rhs ) const + { + return !operator==( rhs ); + } + + ImageAspectFlags aspectMask; + Extent3D imageGranularity; + SparseImageFormatFlags flags; + }; + static_assert( sizeof( SparseImageFormatProperties ) == sizeof( VkSparseImageFormatProperties ), "struct and wrapper have different size!" ); + + struct SparseImageMemoryRequirements + { + operator VkSparseImageMemoryRequirements const&() const + { + return *reinterpret_cast(this); + } + + operator VkSparseImageMemoryRequirements &() + { + return *reinterpret_cast(this); + } + + bool operator==( SparseImageMemoryRequirements const& rhs ) const + { + return ( formatProperties == rhs.formatProperties ) + && ( imageMipTailFirstLod == rhs.imageMipTailFirstLod ) + && ( imageMipTailSize == rhs.imageMipTailSize ) + && ( imageMipTailOffset == rhs.imageMipTailOffset ) + && ( imageMipTailStride == rhs.imageMipTailStride ); + } + + bool operator!=( SparseImageMemoryRequirements const& rhs ) const + { + return !operator==( rhs ); + } + + SparseImageFormatProperties formatProperties; + uint32_t imageMipTailFirstLod; + DeviceSize imageMipTailSize; + DeviceSize imageMipTailOffset; + DeviceSize imageMipTailStride; + }; + static_assert( sizeof( SparseImageMemoryRequirements ) == sizeof( VkSparseImageMemoryRequirements ), "struct and wrapper have different size!" ); + + struct SparseImageFormatProperties2 + { + operator VkSparseImageFormatProperties2 const&() const + { + return *reinterpret_cast(this); + } + + operator VkSparseImageFormatProperties2 &() + { + return *reinterpret_cast(this); + } + + bool operator==( SparseImageFormatProperties2 const& rhs ) const + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( properties == rhs.properties ); + } + + bool operator!=( SparseImageFormatProperties2 const& rhs ) const + { + return !operator==( rhs ); + } + + private: + StructureType sType = StructureType::eSparseImageFormatProperties2; + + public: + void* pNext = nullptr; + SparseImageFormatProperties properties; + }; + static_assert( sizeof( SparseImageFormatProperties2 ) == sizeof( VkSparseImageFormatProperties2 ), "struct and wrapper have different size!" ); + + using SparseImageFormatProperties2KHR = SparseImageFormatProperties2; + + struct SparseImageMemoryRequirements2 + { + operator VkSparseImageMemoryRequirements2 const&() const + { + return *reinterpret_cast(this); + } + + operator VkSparseImageMemoryRequirements2 &() + { + return *reinterpret_cast(this); + } + + bool operator==( SparseImageMemoryRequirements2 const& rhs ) const + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( memoryRequirements == rhs.memoryRequirements ); + } + + bool operator!=( SparseImageMemoryRequirements2 const& rhs ) const + { + return !operator==( rhs ); + } + + private: + StructureType sType = StructureType::eSparseImageMemoryRequirements2; + + public: + void* pNext = nullptr; + SparseImageMemoryRequirements memoryRequirements; + }; + static_assert( sizeof( SparseImageMemoryRequirements2 ) == sizeof( VkSparseImageMemoryRequirements2 ), "struct and wrapper have different size!" ); + + using SparseImageMemoryRequirements2KHR = SparseImageMemoryRequirements2; + + enum class SparseMemoryBindFlagBits + { + eMetadata = VK_SPARSE_MEMORY_BIND_METADATA_BIT + }; + + using SparseMemoryBindFlags = Flags; + + VULKAN_HPP_INLINE SparseMemoryBindFlags operator|( SparseMemoryBindFlagBits bit0, SparseMemoryBindFlagBits bit1 ) + { + return SparseMemoryBindFlags( bit0 ) | bit1; + } + + VULKAN_HPP_INLINE SparseMemoryBindFlags operator~( SparseMemoryBindFlagBits bits ) + { + return ~( SparseMemoryBindFlags( bits ) ); + } + + template <> struct FlagTraits + { + enum + { + allFlags = VkFlags(SparseMemoryBindFlagBits::eMetadata) + }; + }; + + struct SparseMemoryBind + { + SparseMemoryBind( DeviceSize resourceOffset_ = 0, + DeviceSize size_ = 0, + DeviceMemory memory_ = DeviceMemory(), + DeviceSize memoryOffset_ = 0, + SparseMemoryBindFlags flags_ = SparseMemoryBindFlags() ) + : resourceOffset( resourceOffset_ ) + , size( size_ ) + , memory( memory_ ) + , memoryOffset( memoryOffset_ ) + , flags( flags_ ) + { + } + + SparseMemoryBind( VkSparseMemoryBind const & rhs ) + { + memcpy( this, &rhs, sizeof( SparseMemoryBind ) ); + } + + SparseMemoryBind& operator=( VkSparseMemoryBind const & rhs ) + { + memcpy( this, &rhs, sizeof( SparseMemoryBind ) ); + return *this; + } + SparseMemoryBind& setResourceOffset( DeviceSize resourceOffset_ ) + { + resourceOffset = resourceOffset_; + return *this; + } + + SparseMemoryBind& setSize( DeviceSize size_ ) + { + size = size_; + return *this; + } + + SparseMemoryBind& setMemory( DeviceMemory memory_ ) + { + memory = memory_; + return *this; + } + + SparseMemoryBind& setMemoryOffset( DeviceSize memoryOffset_ ) + { + memoryOffset = memoryOffset_; + return *this; + } + + SparseMemoryBind& setFlags( SparseMemoryBindFlags flags_ ) + { + flags = flags_; + return *this; + } + + operator VkSparseMemoryBind const&() const + { + return *reinterpret_cast(this); + } + + operator VkSparseMemoryBind &() + { + return *reinterpret_cast(this); + } + + bool operator==( SparseMemoryBind const& rhs ) const + { + return ( resourceOffset == rhs.resourceOffset ) + && ( size == rhs.size ) + && ( memory == rhs.memory ) + && ( memoryOffset == rhs.memoryOffset ) + && ( flags == rhs.flags ); + } + + bool operator!=( SparseMemoryBind const& rhs ) const + { + return !operator==( rhs ); + } + + DeviceSize resourceOffset; + DeviceSize size; + DeviceMemory memory; + DeviceSize memoryOffset; + SparseMemoryBindFlags flags; + }; + static_assert( sizeof( SparseMemoryBind ) == sizeof( VkSparseMemoryBind ), "struct and wrapper have different size!" ); + + struct SparseImageMemoryBind + { + SparseImageMemoryBind( ImageSubresource subresource_ = ImageSubresource(), + Offset3D offset_ = Offset3D(), + Extent3D extent_ = Extent3D(), + DeviceMemory memory_ = DeviceMemory(), + DeviceSize memoryOffset_ = 0, + SparseMemoryBindFlags flags_ = SparseMemoryBindFlags() ) + : subresource( subresource_ ) + , offset( offset_ ) + , extent( extent_ ) + , memory( memory_ ) + , memoryOffset( memoryOffset_ ) + , flags( flags_ ) + { + } + + SparseImageMemoryBind( VkSparseImageMemoryBind const & rhs ) + { + memcpy( this, &rhs, sizeof( SparseImageMemoryBind ) ); + } + + SparseImageMemoryBind& operator=( VkSparseImageMemoryBind const & rhs ) + { + memcpy( this, &rhs, sizeof( SparseImageMemoryBind ) ); + return *this; + } + SparseImageMemoryBind& setSubresource( ImageSubresource subresource_ ) + { + subresource = subresource_; + return *this; + } + + SparseImageMemoryBind& setOffset( Offset3D offset_ ) + { + offset = offset_; + return *this; + } + + SparseImageMemoryBind& setExtent( Extent3D extent_ ) + { + extent = extent_; + return *this; + } + + SparseImageMemoryBind& setMemory( DeviceMemory memory_ ) + { + memory = memory_; + return *this; + } + + SparseImageMemoryBind& setMemoryOffset( DeviceSize memoryOffset_ ) + { + memoryOffset = memoryOffset_; + return *this; + } + + SparseImageMemoryBind& setFlags( SparseMemoryBindFlags flags_ ) + { + flags = flags_; + return *this; + } + + operator VkSparseImageMemoryBind const&() const + { + return *reinterpret_cast(this); + } + + operator VkSparseImageMemoryBind &() + { + return *reinterpret_cast(this); + } + + bool operator==( SparseImageMemoryBind const& rhs ) const + { + return ( subresource == rhs.subresource ) + && ( offset == rhs.offset ) + && ( extent == rhs.extent ) + && ( memory == rhs.memory ) + && ( memoryOffset == rhs.memoryOffset ) + && ( flags == rhs.flags ); + } + + bool operator!=( SparseImageMemoryBind const& rhs ) const + { + return !operator==( rhs ); + } + + ImageSubresource subresource; + Offset3D offset; + Extent3D extent; + DeviceMemory memory; + DeviceSize memoryOffset; + SparseMemoryBindFlags flags; + }; + static_assert( sizeof( SparseImageMemoryBind ) == sizeof( VkSparseImageMemoryBind ), "struct and wrapper have different size!" ); + + struct SparseBufferMemoryBindInfo + { + SparseBufferMemoryBindInfo( Buffer buffer_ = Buffer(), + uint32_t bindCount_ = 0, + const SparseMemoryBind* pBinds_ = nullptr ) + : buffer( buffer_ ) + , bindCount( bindCount_ ) + , pBinds( pBinds_ ) + { + } + + SparseBufferMemoryBindInfo( VkSparseBufferMemoryBindInfo const & rhs ) + { + memcpy( this, &rhs, sizeof( SparseBufferMemoryBindInfo ) ); + } + + SparseBufferMemoryBindInfo& operator=( VkSparseBufferMemoryBindInfo const & rhs ) + { + memcpy( this, &rhs, sizeof( SparseBufferMemoryBindInfo ) ); + return *this; + } + SparseBufferMemoryBindInfo& setBuffer( Buffer buffer_ ) + { + buffer = buffer_; + return *this; + } + + SparseBufferMemoryBindInfo& setBindCount( uint32_t bindCount_ ) + { + bindCount = bindCount_; + return *this; + } + + SparseBufferMemoryBindInfo& setPBinds( const SparseMemoryBind* pBinds_ ) + { + pBinds = pBinds_; + return *this; + } + + operator VkSparseBufferMemoryBindInfo const&() const + { + return *reinterpret_cast(this); + } + + operator VkSparseBufferMemoryBindInfo &() + { + return *reinterpret_cast(this); + } + + bool operator==( SparseBufferMemoryBindInfo const& rhs ) const + { + return ( buffer == rhs.buffer ) + && ( bindCount == rhs.bindCount ) + && ( pBinds == rhs.pBinds ); + } + + bool operator!=( SparseBufferMemoryBindInfo const& rhs ) const + { + return !operator==( rhs ); + } + + Buffer buffer; + uint32_t bindCount; + const SparseMemoryBind* pBinds; + }; + static_assert( sizeof( SparseBufferMemoryBindInfo ) == sizeof( VkSparseBufferMemoryBindInfo ), "struct and wrapper have different size!" ); + + struct SparseImageOpaqueMemoryBindInfo + { + SparseImageOpaqueMemoryBindInfo( Image image_ = Image(), + uint32_t bindCount_ = 0, + const SparseMemoryBind* pBinds_ = nullptr ) + : image( image_ ) + , bindCount( bindCount_ ) + , pBinds( pBinds_ ) + { + } + + SparseImageOpaqueMemoryBindInfo( VkSparseImageOpaqueMemoryBindInfo const & rhs ) + { + memcpy( this, &rhs, sizeof( SparseImageOpaqueMemoryBindInfo ) ); + } + + SparseImageOpaqueMemoryBindInfo& operator=( VkSparseImageOpaqueMemoryBindInfo const & rhs ) + { + memcpy( this, &rhs, sizeof( SparseImageOpaqueMemoryBindInfo ) ); + return *this; + } + SparseImageOpaqueMemoryBindInfo& setImage( Image image_ ) + { + image = image_; + return *this; + } + + SparseImageOpaqueMemoryBindInfo& setBindCount( uint32_t bindCount_ ) + { + bindCount = bindCount_; + return *this; + } + + SparseImageOpaqueMemoryBindInfo& setPBinds( const SparseMemoryBind* pBinds_ ) + { + pBinds = pBinds_; + return *this; + } + + operator VkSparseImageOpaqueMemoryBindInfo const&() const + { + return *reinterpret_cast(this); + } + + operator VkSparseImageOpaqueMemoryBindInfo &() + { + return *reinterpret_cast(this); + } + + bool operator==( SparseImageOpaqueMemoryBindInfo const& rhs ) const + { + return ( image == rhs.image ) + && ( bindCount == rhs.bindCount ) + && ( pBinds == rhs.pBinds ); + } + + bool operator!=( SparseImageOpaqueMemoryBindInfo const& rhs ) const + { + return !operator==( rhs ); + } + + Image image; + uint32_t bindCount; + const SparseMemoryBind* pBinds; + }; + static_assert( sizeof( SparseImageOpaqueMemoryBindInfo ) == sizeof( VkSparseImageOpaqueMemoryBindInfo ), "struct and wrapper have different size!" ); + + struct SparseImageMemoryBindInfo + { + SparseImageMemoryBindInfo( Image image_ = Image(), + uint32_t bindCount_ = 0, + const SparseImageMemoryBind* pBinds_ = nullptr ) + : image( image_ ) + , bindCount( bindCount_ ) + , pBinds( pBinds_ ) + { + } + + SparseImageMemoryBindInfo( VkSparseImageMemoryBindInfo const & rhs ) + { + memcpy( this, &rhs, sizeof( SparseImageMemoryBindInfo ) ); + } + + SparseImageMemoryBindInfo& operator=( VkSparseImageMemoryBindInfo const & rhs ) + { + memcpy( this, &rhs, sizeof( SparseImageMemoryBindInfo ) ); + return *this; + } + SparseImageMemoryBindInfo& setImage( Image image_ ) + { + image = image_; + return *this; + } + + SparseImageMemoryBindInfo& setBindCount( uint32_t bindCount_ ) + { + bindCount = bindCount_; + return *this; + } + + SparseImageMemoryBindInfo& setPBinds( const SparseImageMemoryBind* pBinds_ ) + { + pBinds = pBinds_; + return *this; + } + + operator VkSparseImageMemoryBindInfo const&() const + { + return *reinterpret_cast(this); + } + + operator VkSparseImageMemoryBindInfo &() + { + return *reinterpret_cast(this); + } + + bool operator==( SparseImageMemoryBindInfo const& rhs ) const + { + return ( image == rhs.image ) + && ( bindCount == rhs.bindCount ) + && ( pBinds == rhs.pBinds ); + } + + bool operator!=( SparseImageMemoryBindInfo const& rhs ) const + { + return !operator==( rhs ); + } + + Image image; + uint32_t bindCount; + const SparseImageMemoryBind* pBinds; + }; + static_assert( sizeof( SparseImageMemoryBindInfo ) == sizeof( VkSparseImageMemoryBindInfo ), "struct and wrapper have different size!" ); + + struct BindSparseInfo + { + BindSparseInfo( uint32_t waitSemaphoreCount_ = 0, + const Semaphore* pWaitSemaphores_ = nullptr, + uint32_t bufferBindCount_ = 0, + const SparseBufferMemoryBindInfo* pBufferBinds_ = nullptr, + uint32_t imageOpaqueBindCount_ = 0, + const SparseImageOpaqueMemoryBindInfo* pImageOpaqueBinds_ = nullptr, + uint32_t imageBindCount_ = 0, + const SparseImageMemoryBindInfo* pImageBinds_ = nullptr, + uint32_t signalSemaphoreCount_ = 0, + const Semaphore* pSignalSemaphores_ = nullptr ) + : waitSemaphoreCount( waitSemaphoreCount_ ) + , pWaitSemaphores( pWaitSemaphores_ ) + , bufferBindCount( bufferBindCount_ ) + , pBufferBinds( pBufferBinds_ ) + , imageOpaqueBindCount( imageOpaqueBindCount_ ) + , pImageOpaqueBinds( pImageOpaqueBinds_ ) + , imageBindCount( imageBindCount_ ) + , pImageBinds( pImageBinds_ ) + , signalSemaphoreCount( signalSemaphoreCount_ ) + , pSignalSemaphores( pSignalSemaphores_ ) + { + } + + BindSparseInfo( VkBindSparseInfo const & rhs ) + { + memcpy( this, &rhs, sizeof( BindSparseInfo ) ); + } + + BindSparseInfo& operator=( VkBindSparseInfo const & rhs ) + { + memcpy( this, &rhs, sizeof( BindSparseInfo ) ); + return *this; + } + BindSparseInfo& setPNext( const void* pNext_ ) + { + pNext = pNext_; + return *this; + } + + BindSparseInfo& setWaitSemaphoreCount( uint32_t waitSemaphoreCount_ ) + { + waitSemaphoreCount = waitSemaphoreCount_; + return *this; + } + + BindSparseInfo& setPWaitSemaphores( const Semaphore* pWaitSemaphores_ ) + { + pWaitSemaphores = pWaitSemaphores_; + return *this; + } + + BindSparseInfo& setBufferBindCount( uint32_t bufferBindCount_ ) + { + bufferBindCount = bufferBindCount_; + return *this; + } + + BindSparseInfo& setPBufferBinds( const SparseBufferMemoryBindInfo* pBufferBinds_ ) + { + pBufferBinds = pBufferBinds_; + return *this; + } + + BindSparseInfo& setImageOpaqueBindCount( uint32_t imageOpaqueBindCount_ ) + { + imageOpaqueBindCount = imageOpaqueBindCount_; + return *this; + } + + BindSparseInfo& setPImageOpaqueBinds( const SparseImageOpaqueMemoryBindInfo* pImageOpaqueBinds_ ) + { + pImageOpaqueBinds = pImageOpaqueBinds_; + return *this; + } + + BindSparseInfo& setImageBindCount( uint32_t imageBindCount_ ) + { + imageBindCount = imageBindCount_; + return *this; + } + + BindSparseInfo& setPImageBinds( const SparseImageMemoryBindInfo* pImageBinds_ ) + { + pImageBinds = pImageBinds_; + return *this; + } + + BindSparseInfo& setSignalSemaphoreCount( uint32_t signalSemaphoreCount_ ) + { + signalSemaphoreCount = signalSemaphoreCount_; + return *this; + } + + BindSparseInfo& setPSignalSemaphores( const Semaphore* pSignalSemaphores_ ) + { + pSignalSemaphores = pSignalSemaphores_; + return *this; + } + + operator VkBindSparseInfo const&() const + { + return *reinterpret_cast(this); + } + + operator VkBindSparseInfo &() + { + return *reinterpret_cast(this); + } + + bool operator==( BindSparseInfo const& rhs ) const + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( waitSemaphoreCount == rhs.waitSemaphoreCount ) + && ( pWaitSemaphores == rhs.pWaitSemaphores ) + && ( bufferBindCount == rhs.bufferBindCount ) + && ( pBufferBinds == rhs.pBufferBinds ) + && ( imageOpaqueBindCount == rhs.imageOpaqueBindCount ) + && ( pImageOpaqueBinds == rhs.pImageOpaqueBinds ) + && ( imageBindCount == rhs.imageBindCount ) + && ( pImageBinds == rhs.pImageBinds ) + && ( signalSemaphoreCount == rhs.signalSemaphoreCount ) + && ( pSignalSemaphores == rhs.pSignalSemaphores ); + } + + bool operator!=( BindSparseInfo const& rhs ) const + { + return !operator==( rhs ); + } + + private: + StructureType sType = StructureType::eBindSparseInfo; + + public: + const void* pNext = nullptr; + uint32_t waitSemaphoreCount; + const Semaphore* pWaitSemaphores; + uint32_t bufferBindCount; + const SparseBufferMemoryBindInfo* pBufferBinds; + uint32_t imageOpaqueBindCount; + const SparseImageOpaqueMemoryBindInfo* pImageOpaqueBinds; + uint32_t imageBindCount; + const SparseImageMemoryBindInfo* pImageBinds; + uint32_t signalSemaphoreCount; + const Semaphore* pSignalSemaphores; + }; + static_assert( sizeof( BindSparseInfo ) == sizeof( VkBindSparseInfo ), "struct and wrapper have different size!" ); + + enum class PipelineStageFlagBits + { + eTopOfPipe = VK_PIPELINE_STAGE_TOP_OF_PIPE_BIT, + eDrawIndirect = VK_PIPELINE_STAGE_DRAW_INDIRECT_BIT, + eVertexInput = VK_PIPELINE_STAGE_VERTEX_INPUT_BIT, + eVertexShader = VK_PIPELINE_STAGE_VERTEX_SHADER_BIT, + eTessellationControlShader = VK_PIPELINE_STAGE_TESSELLATION_CONTROL_SHADER_BIT, + eTessellationEvaluationShader = VK_PIPELINE_STAGE_TESSELLATION_EVALUATION_SHADER_BIT, + eGeometryShader = VK_PIPELINE_STAGE_GEOMETRY_SHADER_BIT, + eFragmentShader = VK_PIPELINE_STAGE_FRAGMENT_SHADER_BIT, + eEarlyFragmentTests = VK_PIPELINE_STAGE_EARLY_FRAGMENT_TESTS_BIT, + eLateFragmentTests = VK_PIPELINE_STAGE_LATE_FRAGMENT_TESTS_BIT, + eColorAttachmentOutput = VK_PIPELINE_STAGE_COLOR_ATTACHMENT_OUTPUT_BIT, + eComputeShader = VK_PIPELINE_STAGE_COMPUTE_SHADER_BIT, + eTransfer = VK_PIPELINE_STAGE_TRANSFER_BIT, + eBottomOfPipe = VK_PIPELINE_STAGE_BOTTOM_OF_PIPE_BIT, + eHost = VK_PIPELINE_STAGE_HOST_BIT, + eAllGraphics = VK_PIPELINE_STAGE_ALL_GRAPHICS_BIT, + eAllCommands = VK_PIPELINE_STAGE_ALL_COMMANDS_BIT, + eTransformFeedbackEXT = VK_PIPELINE_STAGE_TRANSFORM_FEEDBACK_BIT_EXT, + eConditionalRenderingEXT = VK_PIPELINE_STAGE_CONDITIONAL_RENDERING_BIT_EXT, + eCommandProcessNVX = VK_PIPELINE_STAGE_COMMAND_PROCESS_BIT_NVX, + eShadingRateImageNV = VK_PIPELINE_STAGE_SHADING_RATE_IMAGE_BIT_NV, + eRayTracingShaderNV = VK_PIPELINE_STAGE_RAY_TRACING_SHADER_BIT_NV, + eAccelerationStructureBuildNV = VK_PIPELINE_STAGE_ACCELERATION_STRUCTURE_BUILD_BIT_NV, + eTaskShaderNV = VK_PIPELINE_STAGE_TASK_SHADER_BIT_NV, + eMeshShaderNV = VK_PIPELINE_STAGE_MESH_SHADER_BIT_NV + }; + + using PipelineStageFlags = Flags; + + VULKAN_HPP_INLINE PipelineStageFlags operator|( PipelineStageFlagBits bit0, PipelineStageFlagBits bit1 ) + { + return PipelineStageFlags( bit0 ) | bit1; + } + + VULKAN_HPP_INLINE PipelineStageFlags operator~( PipelineStageFlagBits bits ) + { + return ~( PipelineStageFlags( bits ) ); + } + + template <> struct FlagTraits + { + enum + { + allFlags = VkFlags(PipelineStageFlagBits::eTopOfPipe) | VkFlags(PipelineStageFlagBits::eDrawIndirect) | VkFlags(PipelineStageFlagBits::eVertexInput) | VkFlags(PipelineStageFlagBits::eVertexShader) | VkFlags(PipelineStageFlagBits::eTessellationControlShader) | VkFlags(PipelineStageFlagBits::eTessellationEvaluationShader) | VkFlags(PipelineStageFlagBits::eGeometryShader) | VkFlags(PipelineStageFlagBits::eFragmentShader) | VkFlags(PipelineStageFlagBits::eEarlyFragmentTests) | VkFlags(PipelineStageFlagBits::eLateFragmentTests) | VkFlags(PipelineStageFlagBits::eColorAttachmentOutput) | VkFlags(PipelineStageFlagBits::eComputeShader) | VkFlags(PipelineStageFlagBits::eTransfer) | VkFlags(PipelineStageFlagBits::eBottomOfPipe) | VkFlags(PipelineStageFlagBits::eHost) | VkFlags(PipelineStageFlagBits::eAllGraphics) | VkFlags(PipelineStageFlagBits::eAllCommands) | VkFlags(PipelineStageFlagBits::eTransformFeedbackEXT) | VkFlags(PipelineStageFlagBits::eConditionalRenderingEXT) | VkFlags(PipelineStageFlagBits::eCommandProcessNVX) | VkFlags(PipelineStageFlagBits::eShadingRateImageNV) | VkFlags(PipelineStageFlagBits::eRayTracingShaderNV) | VkFlags(PipelineStageFlagBits::eAccelerationStructureBuildNV) | VkFlags(PipelineStageFlagBits::eTaskShaderNV) | VkFlags(PipelineStageFlagBits::eMeshShaderNV) + }; + }; + + struct QueueFamilyCheckpointPropertiesNV + { + operator VkQueueFamilyCheckpointPropertiesNV const&() const + { + return *reinterpret_cast(this); + } + + operator VkQueueFamilyCheckpointPropertiesNV &() + { + return *reinterpret_cast(this); + } + + bool operator==( QueueFamilyCheckpointPropertiesNV const& rhs ) const + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( checkpointExecutionStageMask == rhs.checkpointExecutionStageMask ); + } + + bool operator!=( QueueFamilyCheckpointPropertiesNV const& rhs ) const + { + return !operator==( rhs ); + } + + private: + StructureType sType = StructureType::eQueueFamilyCheckpointPropertiesNV; + + public: + void* pNext = nullptr; + PipelineStageFlags checkpointExecutionStageMask; + }; + static_assert( sizeof( QueueFamilyCheckpointPropertiesNV ) == sizeof( VkQueueFamilyCheckpointPropertiesNV ), "struct and wrapper have different size!" ); + + struct CheckpointDataNV + { + operator VkCheckpointDataNV const&() const + { + return *reinterpret_cast(this); + } + + operator VkCheckpointDataNV &() + { + return *reinterpret_cast(this); + } + + bool operator==( CheckpointDataNV const& rhs ) const + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( stage == rhs.stage ) + && ( pCheckpointMarker == rhs.pCheckpointMarker ); + } + + bool operator!=( CheckpointDataNV const& rhs ) const + { + return !operator==( rhs ); + } + + private: + StructureType sType = StructureType::eCheckpointDataNV; + + public: + void* pNext = nullptr; + PipelineStageFlagBits stage; + void* pCheckpointMarker; + }; + static_assert( sizeof( CheckpointDataNV ) == sizeof( VkCheckpointDataNV ), "struct and wrapper have different size!" ); + + enum class CommandPoolCreateFlagBits + { + eTransient = VK_COMMAND_POOL_CREATE_TRANSIENT_BIT, + eResetCommandBuffer = VK_COMMAND_POOL_CREATE_RESET_COMMAND_BUFFER_BIT, + eProtected = VK_COMMAND_POOL_CREATE_PROTECTED_BIT + }; + + using CommandPoolCreateFlags = Flags; + + VULKAN_HPP_INLINE CommandPoolCreateFlags operator|( CommandPoolCreateFlagBits bit0, CommandPoolCreateFlagBits bit1 ) + { + return CommandPoolCreateFlags( bit0 ) | bit1; + } + + VULKAN_HPP_INLINE CommandPoolCreateFlags operator~( CommandPoolCreateFlagBits bits ) + { + return ~( CommandPoolCreateFlags( bits ) ); + } + + template <> struct FlagTraits + { + enum + { + allFlags = VkFlags(CommandPoolCreateFlagBits::eTransient) | VkFlags(CommandPoolCreateFlagBits::eResetCommandBuffer) | VkFlags(CommandPoolCreateFlagBits::eProtected) + }; + }; + + struct CommandPoolCreateInfo + { + CommandPoolCreateInfo( CommandPoolCreateFlags flags_ = CommandPoolCreateFlags(), + uint32_t queueFamilyIndex_ = 0 ) + : flags( flags_ ) + , queueFamilyIndex( queueFamilyIndex_ ) + { + } + + CommandPoolCreateInfo( VkCommandPoolCreateInfo const & rhs ) + { + memcpy( this, &rhs, sizeof( CommandPoolCreateInfo ) ); + } + + CommandPoolCreateInfo& operator=( VkCommandPoolCreateInfo const & rhs ) + { + memcpy( this, &rhs, sizeof( CommandPoolCreateInfo ) ); + return *this; + } + CommandPoolCreateInfo& setPNext( const void* pNext_ ) + { + pNext = pNext_; + return *this; + } + + CommandPoolCreateInfo& setFlags( CommandPoolCreateFlags flags_ ) + { + flags = flags_; + return *this; + } + + CommandPoolCreateInfo& setQueueFamilyIndex( uint32_t queueFamilyIndex_ ) + { + queueFamilyIndex = queueFamilyIndex_; + return *this; + } + + operator VkCommandPoolCreateInfo const&() const + { + return *reinterpret_cast(this); + } + + operator VkCommandPoolCreateInfo &() + { + return *reinterpret_cast(this); + } + + bool operator==( CommandPoolCreateInfo const& rhs ) const + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( flags == rhs.flags ) + && ( queueFamilyIndex == rhs.queueFamilyIndex ); + } + + bool operator!=( CommandPoolCreateInfo const& rhs ) const + { + return !operator==( rhs ); + } + + private: + StructureType sType = StructureType::eCommandPoolCreateInfo; + + public: + const void* pNext = nullptr; + CommandPoolCreateFlags flags; + uint32_t queueFamilyIndex; + }; + static_assert( sizeof( CommandPoolCreateInfo ) == sizeof( VkCommandPoolCreateInfo ), "struct and wrapper have different size!" ); + + enum class CommandPoolResetFlagBits + { + eReleaseResources = VK_COMMAND_POOL_RESET_RELEASE_RESOURCES_BIT + }; + + using CommandPoolResetFlags = Flags; + + VULKAN_HPP_INLINE CommandPoolResetFlags operator|( CommandPoolResetFlagBits bit0, CommandPoolResetFlagBits bit1 ) + { + return CommandPoolResetFlags( bit0 ) | bit1; + } + + VULKAN_HPP_INLINE CommandPoolResetFlags operator~( CommandPoolResetFlagBits bits ) + { + return ~( CommandPoolResetFlags( bits ) ); + } + + template <> struct FlagTraits + { + enum + { + allFlags = VkFlags(CommandPoolResetFlagBits::eReleaseResources) + }; + }; + + enum class CommandBufferResetFlagBits + { + eReleaseResources = VK_COMMAND_BUFFER_RESET_RELEASE_RESOURCES_BIT + }; + + using CommandBufferResetFlags = Flags; + + VULKAN_HPP_INLINE CommandBufferResetFlags operator|( CommandBufferResetFlagBits bit0, CommandBufferResetFlagBits bit1 ) + { + return CommandBufferResetFlags( bit0 ) | bit1; + } + + VULKAN_HPP_INLINE CommandBufferResetFlags operator~( CommandBufferResetFlagBits bits ) + { + return ~( CommandBufferResetFlags( bits ) ); + } + + template <> struct FlagTraits + { + enum + { + allFlags = VkFlags(CommandBufferResetFlagBits::eReleaseResources) + }; + }; + + enum class SampleCountFlagBits + { + e1 = VK_SAMPLE_COUNT_1_BIT, + e2 = VK_SAMPLE_COUNT_2_BIT, + e4 = VK_SAMPLE_COUNT_4_BIT, + e8 = VK_SAMPLE_COUNT_8_BIT, + e16 = VK_SAMPLE_COUNT_16_BIT, + e32 = VK_SAMPLE_COUNT_32_BIT, + e64 = VK_SAMPLE_COUNT_64_BIT + }; + + using SampleCountFlags = Flags; + + VULKAN_HPP_INLINE SampleCountFlags operator|( SampleCountFlagBits bit0, SampleCountFlagBits bit1 ) + { + return SampleCountFlags( bit0 ) | bit1; + } + + VULKAN_HPP_INLINE SampleCountFlags operator~( SampleCountFlagBits bits ) + { + return ~( SampleCountFlags( bits ) ); + } + + template <> struct FlagTraits + { + enum + { + allFlags = VkFlags(SampleCountFlagBits::e1) | VkFlags(SampleCountFlagBits::e2) | VkFlags(SampleCountFlagBits::e4) | VkFlags(SampleCountFlagBits::e8) | VkFlags(SampleCountFlagBits::e16) | VkFlags(SampleCountFlagBits::e32) | VkFlags(SampleCountFlagBits::e64) + }; + }; + + struct ImageFormatProperties + { + operator VkImageFormatProperties const&() const + { + return *reinterpret_cast(this); + } + + operator VkImageFormatProperties &() + { + return *reinterpret_cast(this); + } + + bool operator==( ImageFormatProperties const& rhs ) const + { + return ( maxExtent == rhs.maxExtent ) + && ( maxMipLevels == rhs.maxMipLevels ) + && ( maxArrayLayers == rhs.maxArrayLayers ) + && ( sampleCounts == rhs.sampleCounts ) + && ( maxResourceSize == rhs.maxResourceSize ); + } + + bool operator!=( ImageFormatProperties const& rhs ) const + { + return !operator==( rhs ); + } + + Extent3D maxExtent; + uint32_t maxMipLevels; + uint32_t maxArrayLayers; + SampleCountFlags sampleCounts; + DeviceSize maxResourceSize; + }; + static_assert( sizeof( ImageFormatProperties ) == sizeof( VkImageFormatProperties ), "struct and wrapper have different size!" ); + + struct ImageCreateInfo + { + ImageCreateInfo( ImageCreateFlags flags_ = ImageCreateFlags(), + ImageType imageType_ = ImageType::e1D, + Format format_ = Format::eUndefined, + Extent3D extent_ = Extent3D(), + uint32_t mipLevels_ = 0, + uint32_t arrayLayers_ = 0, + SampleCountFlagBits samples_ = SampleCountFlagBits::e1, + ImageTiling tiling_ = ImageTiling::eOptimal, + ImageUsageFlags usage_ = ImageUsageFlags(), + SharingMode sharingMode_ = SharingMode::eExclusive, + uint32_t queueFamilyIndexCount_ = 0, + const uint32_t* pQueueFamilyIndices_ = nullptr, + ImageLayout initialLayout_ = ImageLayout::eUndefined ) + : flags( flags_ ) + , imageType( imageType_ ) + , format( format_ ) + , extent( extent_ ) + , mipLevels( mipLevels_ ) + , arrayLayers( arrayLayers_ ) + , samples( samples_ ) + , tiling( tiling_ ) + , usage( usage_ ) + , sharingMode( sharingMode_ ) + , queueFamilyIndexCount( queueFamilyIndexCount_ ) + , pQueueFamilyIndices( pQueueFamilyIndices_ ) + , initialLayout( initialLayout_ ) + { + } + + ImageCreateInfo( VkImageCreateInfo const & rhs ) + { + memcpy( this, &rhs, sizeof( ImageCreateInfo ) ); + } + + ImageCreateInfo& operator=( VkImageCreateInfo const & rhs ) + { + memcpy( this, &rhs, sizeof( ImageCreateInfo ) ); + return *this; + } + ImageCreateInfo& setPNext( const void* pNext_ ) + { + pNext = pNext_; + return *this; + } + + ImageCreateInfo& setFlags( ImageCreateFlags flags_ ) + { + flags = flags_; + return *this; + } + + ImageCreateInfo& setImageType( ImageType imageType_ ) + { + imageType = imageType_; + return *this; + } + + ImageCreateInfo& setFormat( Format format_ ) + { + format = format_; + return *this; + } + + ImageCreateInfo& setExtent( Extent3D extent_ ) + { + extent = extent_; + return *this; + } + + ImageCreateInfo& setMipLevels( uint32_t mipLevels_ ) + { + mipLevels = mipLevels_; + return *this; + } + + ImageCreateInfo& setArrayLayers( uint32_t arrayLayers_ ) + { + arrayLayers = arrayLayers_; + return *this; + } + + ImageCreateInfo& setSamples( SampleCountFlagBits samples_ ) + { + samples = samples_; + return *this; + } + + ImageCreateInfo& setTiling( ImageTiling tiling_ ) + { + tiling = tiling_; + return *this; + } + + ImageCreateInfo& setUsage( ImageUsageFlags usage_ ) + { + usage = usage_; + return *this; + } + + ImageCreateInfo& setSharingMode( SharingMode sharingMode_ ) + { + sharingMode = sharingMode_; + return *this; + } + + ImageCreateInfo& setQueueFamilyIndexCount( uint32_t queueFamilyIndexCount_ ) + { + queueFamilyIndexCount = queueFamilyIndexCount_; + return *this; + } + + ImageCreateInfo& setPQueueFamilyIndices( const uint32_t* pQueueFamilyIndices_ ) + { + pQueueFamilyIndices = pQueueFamilyIndices_; + return *this; + } + + ImageCreateInfo& setInitialLayout( ImageLayout initialLayout_ ) + { + initialLayout = initialLayout_; + return *this; + } + + operator VkImageCreateInfo const&() const + { + return *reinterpret_cast(this); + } + + operator VkImageCreateInfo &() + { + return *reinterpret_cast(this); + } + + bool operator==( ImageCreateInfo const& rhs ) const + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( flags == rhs.flags ) + && ( imageType == rhs.imageType ) + && ( format == rhs.format ) + && ( extent == rhs.extent ) + && ( mipLevels == rhs.mipLevels ) + && ( arrayLayers == rhs.arrayLayers ) + && ( samples == rhs.samples ) + && ( tiling == rhs.tiling ) + && ( usage == rhs.usage ) + && ( sharingMode == rhs.sharingMode ) + && ( queueFamilyIndexCount == rhs.queueFamilyIndexCount ) + && ( pQueueFamilyIndices == rhs.pQueueFamilyIndices ) + && ( initialLayout == rhs.initialLayout ); + } + + bool operator!=( ImageCreateInfo const& rhs ) const + { + return !operator==( rhs ); + } + + private: + StructureType sType = StructureType::eImageCreateInfo; + + public: + const void* pNext = nullptr; + ImageCreateFlags flags; + ImageType imageType; + Format format; + Extent3D extent; + uint32_t mipLevels; + uint32_t arrayLayers; + SampleCountFlagBits samples; + ImageTiling tiling; + ImageUsageFlags usage; + SharingMode sharingMode; + uint32_t queueFamilyIndexCount; + const uint32_t* pQueueFamilyIndices; + ImageLayout initialLayout; + }; + static_assert( sizeof( ImageCreateInfo ) == sizeof( VkImageCreateInfo ), "struct and wrapper have different size!" ); + + struct PipelineMultisampleStateCreateInfo + { + PipelineMultisampleStateCreateInfo( PipelineMultisampleStateCreateFlags flags_ = PipelineMultisampleStateCreateFlags(), + SampleCountFlagBits rasterizationSamples_ = SampleCountFlagBits::e1, + Bool32 sampleShadingEnable_ = 0, + float minSampleShading_ = 0, + const SampleMask* pSampleMask_ = nullptr, + Bool32 alphaToCoverageEnable_ = 0, + Bool32 alphaToOneEnable_ = 0 ) + : flags( flags_ ) + , rasterizationSamples( rasterizationSamples_ ) + , sampleShadingEnable( sampleShadingEnable_ ) + , minSampleShading( minSampleShading_ ) + , pSampleMask( pSampleMask_ ) + , alphaToCoverageEnable( alphaToCoverageEnable_ ) + , alphaToOneEnable( alphaToOneEnable_ ) + { + } + + PipelineMultisampleStateCreateInfo( VkPipelineMultisampleStateCreateInfo const & rhs ) + { + memcpy( this, &rhs, sizeof( PipelineMultisampleStateCreateInfo ) ); + } + + PipelineMultisampleStateCreateInfo& operator=( VkPipelineMultisampleStateCreateInfo const & rhs ) + { + memcpy( this, &rhs, sizeof( PipelineMultisampleStateCreateInfo ) ); + return *this; + } + PipelineMultisampleStateCreateInfo& setPNext( const void* pNext_ ) + { + pNext = pNext_; + return *this; + } + + PipelineMultisampleStateCreateInfo& setFlags( PipelineMultisampleStateCreateFlags flags_ ) + { + flags = flags_; + return *this; + } + + PipelineMultisampleStateCreateInfo& setRasterizationSamples( SampleCountFlagBits rasterizationSamples_ ) + { + rasterizationSamples = rasterizationSamples_; + return *this; + } + + PipelineMultisampleStateCreateInfo& setSampleShadingEnable( Bool32 sampleShadingEnable_ ) + { + sampleShadingEnable = sampleShadingEnable_; + return *this; + } + + PipelineMultisampleStateCreateInfo& setMinSampleShading( float minSampleShading_ ) + { + minSampleShading = minSampleShading_; + return *this; + } + + PipelineMultisampleStateCreateInfo& setPSampleMask( const SampleMask* pSampleMask_ ) + { + pSampleMask = pSampleMask_; + return *this; + } + + PipelineMultisampleStateCreateInfo& setAlphaToCoverageEnable( Bool32 alphaToCoverageEnable_ ) + { + alphaToCoverageEnable = alphaToCoverageEnable_; + return *this; + } + + PipelineMultisampleStateCreateInfo& setAlphaToOneEnable( Bool32 alphaToOneEnable_ ) + { + alphaToOneEnable = alphaToOneEnable_; + return *this; + } + + operator VkPipelineMultisampleStateCreateInfo const&() const + { + return *reinterpret_cast(this); + } + + operator VkPipelineMultisampleStateCreateInfo &() + { + return *reinterpret_cast(this); + } + + bool operator==( PipelineMultisampleStateCreateInfo const& rhs ) const + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( flags == rhs.flags ) + && ( rasterizationSamples == rhs.rasterizationSamples ) + && ( sampleShadingEnable == rhs.sampleShadingEnable ) + && ( minSampleShading == rhs.minSampleShading ) + && ( pSampleMask == rhs.pSampleMask ) + && ( alphaToCoverageEnable == rhs.alphaToCoverageEnable ) + && ( alphaToOneEnable == rhs.alphaToOneEnable ); + } + + bool operator!=( PipelineMultisampleStateCreateInfo const& rhs ) const + { + return !operator==( rhs ); + } + + private: + StructureType sType = StructureType::ePipelineMultisampleStateCreateInfo; + + public: + const void* pNext = nullptr; + PipelineMultisampleStateCreateFlags flags; + SampleCountFlagBits rasterizationSamples; + Bool32 sampleShadingEnable; + float minSampleShading; + const SampleMask* pSampleMask; + Bool32 alphaToCoverageEnable; + Bool32 alphaToOneEnable; + }; + static_assert( sizeof( PipelineMultisampleStateCreateInfo ) == sizeof( VkPipelineMultisampleStateCreateInfo ), "struct and wrapper have different size!" ); + + struct GraphicsPipelineCreateInfo + { + GraphicsPipelineCreateInfo( PipelineCreateFlags flags_ = PipelineCreateFlags(), + uint32_t stageCount_ = 0, + const PipelineShaderStageCreateInfo* pStages_ = nullptr, + const PipelineVertexInputStateCreateInfo* pVertexInputState_ = nullptr, + const PipelineInputAssemblyStateCreateInfo* pInputAssemblyState_ = nullptr, + const PipelineTessellationStateCreateInfo* pTessellationState_ = nullptr, + const PipelineViewportStateCreateInfo* pViewportState_ = nullptr, + const PipelineRasterizationStateCreateInfo* pRasterizationState_ = nullptr, + const PipelineMultisampleStateCreateInfo* pMultisampleState_ = nullptr, + const PipelineDepthStencilStateCreateInfo* pDepthStencilState_ = nullptr, + const PipelineColorBlendStateCreateInfo* pColorBlendState_ = nullptr, + const PipelineDynamicStateCreateInfo* pDynamicState_ = nullptr, + PipelineLayout layout_ = PipelineLayout(), + RenderPass renderPass_ = RenderPass(), + uint32_t subpass_ = 0, + Pipeline basePipelineHandle_ = Pipeline(), + int32_t basePipelineIndex_ = 0 ) + : flags( flags_ ) + , stageCount( stageCount_ ) + , pStages( pStages_ ) + , pVertexInputState( pVertexInputState_ ) + , pInputAssemblyState( pInputAssemblyState_ ) + , pTessellationState( pTessellationState_ ) + , pViewportState( pViewportState_ ) + , pRasterizationState( pRasterizationState_ ) + , pMultisampleState( pMultisampleState_ ) + , pDepthStencilState( pDepthStencilState_ ) + , pColorBlendState( pColorBlendState_ ) + , pDynamicState( pDynamicState_ ) + , layout( layout_ ) + , renderPass( renderPass_ ) + , subpass( subpass_ ) + , basePipelineHandle( basePipelineHandle_ ) + , basePipelineIndex( basePipelineIndex_ ) + { + } + + GraphicsPipelineCreateInfo( VkGraphicsPipelineCreateInfo const & rhs ) + { + memcpy( this, &rhs, sizeof( GraphicsPipelineCreateInfo ) ); + } + + GraphicsPipelineCreateInfo& operator=( VkGraphicsPipelineCreateInfo const & rhs ) + { + memcpy( this, &rhs, sizeof( GraphicsPipelineCreateInfo ) ); + return *this; + } + GraphicsPipelineCreateInfo& setPNext( const void* pNext_ ) + { + pNext = pNext_; + return *this; + } + + GraphicsPipelineCreateInfo& setFlags( PipelineCreateFlags flags_ ) + { + flags = flags_; + return *this; + } + + GraphicsPipelineCreateInfo& setStageCount( uint32_t stageCount_ ) + { + stageCount = stageCount_; + return *this; + } + + GraphicsPipelineCreateInfo& setPStages( const PipelineShaderStageCreateInfo* pStages_ ) + { + pStages = pStages_; + return *this; + } + + GraphicsPipelineCreateInfo& setPVertexInputState( const PipelineVertexInputStateCreateInfo* pVertexInputState_ ) + { + pVertexInputState = pVertexInputState_; + return *this; + } + + GraphicsPipelineCreateInfo& setPInputAssemblyState( const PipelineInputAssemblyStateCreateInfo* pInputAssemblyState_ ) + { + pInputAssemblyState = pInputAssemblyState_; + return *this; + } + + GraphicsPipelineCreateInfo& setPTessellationState( const PipelineTessellationStateCreateInfo* pTessellationState_ ) + { + pTessellationState = pTessellationState_; + return *this; + } + + GraphicsPipelineCreateInfo& setPViewportState( const PipelineViewportStateCreateInfo* pViewportState_ ) + { + pViewportState = pViewportState_; + return *this; + } + + GraphicsPipelineCreateInfo& setPRasterizationState( const PipelineRasterizationStateCreateInfo* pRasterizationState_ ) + { + pRasterizationState = pRasterizationState_; + return *this; + } + + GraphicsPipelineCreateInfo& setPMultisampleState( const PipelineMultisampleStateCreateInfo* pMultisampleState_ ) + { + pMultisampleState = pMultisampleState_; + return *this; + } + + GraphicsPipelineCreateInfo& setPDepthStencilState( const PipelineDepthStencilStateCreateInfo* pDepthStencilState_ ) + { + pDepthStencilState = pDepthStencilState_; + return *this; + } + + GraphicsPipelineCreateInfo& setPColorBlendState( const PipelineColorBlendStateCreateInfo* pColorBlendState_ ) + { + pColorBlendState = pColorBlendState_; + return *this; + } + + GraphicsPipelineCreateInfo& setPDynamicState( const PipelineDynamicStateCreateInfo* pDynamicState_ ) + { + pDynamicState = pDynamicState_; + return *this; + } + + GraphicsPipelineCreateInfo& setLayout( PipelineLayout layout_ ) + { + layout = layout_; + return *this; + } + + GraphicsPipelineCreateInfo& setRenderPass( RenderPass renderPass_ ) + { + renderPass = renderPass_; + return *this; + } + + GraphicsPipelineCreateInfo& setSubpass( uint32_t subpass_ ) + { + subpass = subpass_; + return *this; + } + + GraphicsPipelineCreateInfo& setBasePipelineHandle( Pipeline basePipelineHandle_ ) + { + basePipelineHandle = basePipelineHandle_; + return *this; + } + + GraphicsPipelineCreateInfo& setBasePipelineIndex( int32_t basePipelineIndex_ ) + { + basePipelineIndex = basePipelineIndex_; + return *this; + } + + operator VkGraphicsPipelineCreateInfo const&() const + { + return *reinterpret_cast(this); + } + + operator VkGraphicsPipelineCreateInfo &() + { + return *reinterpret_cast(this); + } + + bool operator==( GraphicsPipelineCreateInfo const& rhs ) const + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( flags == rhs.flags ) + && ( stageCount == rhs.stageCount ) + && ( pStages == rhs.pStages ) + && ( pVertexInputState == rhs.pVertexInputState ) + && ( pInputAssemblyState == rhs.pInputAssemblyState ) + && ( pTessellationState == rhs.pTessellationState ) + && ( pViewportState == rhs.pViewportState ) + && ( pRasterizationState == rhs.pRasterizationState ) + && ( pMultisampleState == rhs.pMultisampleState ) + && ( pDepthStencilState == rhs.pDepthStencilState ) + && ( pColorBlendState == rhs.pColorBlendState ) + && ( pDynamicState == rhs.pDynamicState ) + && ( layout == rhs.layout ) + && ( renderPass == rhs.renderPass ) + && ( subpass == rhs.subpass ) + && ( basePipelineHandle == rhs.basePipelineHandle ) + && ( basePipelineIndex == rhs.basePipelineIndex ); + } + + bool operator!=( GraphicsPipelineCreateInfo const& rhs ) const + { + return !operator==( rhs ); + } + + private: + StructureType sType = StructureType::eGraphicsPipelineCreateInfo; + + public: + const void* pNext = nullptr; + PipelineCreateFlags flags; + uint32_t stageCount; + const PipelineShaderStageCreateInfo* pStages; + const PipelineVertexInputStateCreateInfo* pVertexInputState; + const PipelineInputAssemblyStateCreateInfo* pInputAssemblyState; + const PipelineTessellationStateCreateInfo* pTessellationState; + const PipelineViewportStateCreateInfo* pViewportState; + const PipelineRasterizationStateCreateInfo* pRasterizationState; + const PipelineMultisampleStateCreateInfo* pMultisampleState; + const PipelineDepthStencilStateCreateInfo* pDepthStencilState; + const PipelineColorBlendStateCreateInfo* pColorBlendState; + const PipelineDynamicStateCreateInfo* pDynamicState; + PipelineLayout layout; + RenderPass renderPass; + uint32_t subpass; + Pipeline basePipelineHandle; + int32_t basePipelineIndex; + }; + static_assert( sizeof( GraphicsPipelineCreateInfo ) == sizeof( VkGraphicsPipelineCreateInfo ), "struct and wrapper have different size!" ); + + struct PhysicalDeviceLimits + { + operator VkPhysicalDeviceLimits const&() const + { + return *reinterpret_cast(this); + } + + operator VkPhysicalDeviceLimits &() + { + return *reinterpret_cast(this); + } + + bool operator==( PhysicalDeviceLimits const& rhs ) const + { + return ( maxImageDimension1D == rhs.maxImageDimension1D ) + && ( maxImageDimension2D == rhs.maxImageDimension2D ) + && ( maxImageDimension3D == rhs.maxImageDimension3D ) + && ( maxImageDimensionCube == rhs.maxImageDimensionCube ) + && ( maxImageArrayLayers == rhs.maxImageArrayLayers ) + && ( maxTexelBufferElements == rhs.maxTexelBufferElements ) + && ( maxUniformBufferRange == rhs.maxUniformBufferRange ) + && ( maxStorageBufferRange == rhs.maxStorageBufferRange ) + && ( maxPushConstantsSize == rhs.maxPushConstantsSize ) + && ( maxMemoryAllocationCount == rhs.maxMemoryAllocationCount ) + && ( maxSamplerAllocationCount == rhs.maxSamplerAllocationCount ) + && ( bufferImageGranularity == rhs.bufferImageGranularity ) + && ( sparseAddressSpaceSize == rhs.sparseAddressSpaceSize ) + && ( maxBoundDescriptorSets == rhs.maxBoundDescriptorSets ) + && ( maxPerStageDescriptorSamplers == rhs.maxPerStageDescriptorSamplers ) + && ( maxPerStageDescriptorUniformBuffers == rhs.maxPerStageDescriptorUniformBuffers ) + && ( maxPerStageDescriptorStorageBuffers == rhs.maxPerStageDescriptorStorageBuffers ) + && ( maxPerStageDescriptorSampledImages == rhs.maxPerStageDescriptorSampledImages ) + && ( maxPerStageDescriptorStorageImages == rhs.maxPerStageDescriptorStorageImages ) + && ( maxPerStageDescriptorInputAttachments == rhs.maxPerStageDescriptorInputAttachments ) + && ( maxPerStageResources == rhs.maxPerStageResources ) + && ( maxDescriptorSetSamplers == rhs.maxDescriptorSetSamplers ) + && ( maxDescriptorSetUniformBuffers == rhs.maxDescriptorSetUniformBuffers ) + && ( maxDescriptorSetUniformBuffersDynamic == rhs.maxDescriptorSetUniformBuffersDynamic ) + && ( maxDescriptorSetStorageBuffers == rhs.maxDescriptorSetStorageBuffers ) + && ( maxDescriptorSetStorageBuffersDynamic == rhs.maxDescriptorSetStorageBuffersDynamic ) + && ( maxDescriptorSetSampledImages == rhs.maxDescriptorSetSampledImages ) + && ( maxDescriptorSetStorageImages == rhs.maxDescriptorSetStorageImages ) + && ( maxDescriptorSetInputAttachments == rhs.maxDescriptorSetInputAttachments ) + && ( maxVertexInputAttributes == rhs.maxVertexInputAttributes ) + && ( maxVertexInputBindings == rhs.maxVertexInputBindings ) + && ( maxVertexInputAttributeOffset == rhs.maxVertexInputAttributeOffset ) + && ( maxVertexInputBindingStride == rhs.maxVertexInputBindingStride ) + && ( maxVertexOutputComponents == rhs.maxVertexOutputComponents ) + && ( maxTessellationGenerationLevel == rhs.maxTessellationGenerationLevel ) + && ( maxTessellationPatchSize == rhs.maxTessellationPatchSize ) + && ( maxTessellationControlPerVertexInputComponents == rhs.maxTessellationControlPerVertexInputComponents ) + && ( maxTessellationControlPerVertexOutputComponents == rhs.maxTessellationControlPerVertexOutputComponents ) + && ( maxTessellationControlPerPatchOutputComponents == rhs.maxTessellationControlPerPatchOutputComponents ) + && ( maxTessellationControlTotalOutputComponents == rhs.maxTessellationControlTotalOutputComponents ) + && ( maxTessellationEvaluationInputComponents == rhs.maxTessellationEvaluationInputComponents ) + && ( maxTessellationEvaluationOutputComponents == rhs.maxTessellationEvaluationOutputComponents ) + && ( maxGeometryShaderInvocations == rhs.maxGeometryShaderInvocations ) + && ( maxGeometryInputComponents == rhs.maxGeometryInputComponents ) + && ( maxGeometryOutputComponents == rhs.maxGeometryOutputComponents ) + && ( maxGeometryOutputVertices == rhs.maxGeometryOutputVertices ) + && ( maxGeometryTotalOutputComponents == rhs.maxGeometryTotalOutputComponents ) + && ( maxFragmentInputComponents == rhs.maxFragmentInputComponents ) + && ( maxFragmentOutputAttachments == rhs.maxFragmentOutputAttachments ) + && ( maxFragmentDualSrcAttachments == rhs.maxFragmentDualSrcAttachments ) + && ( maxFragmentCombinedOutputResources == rhs.maxFragmentCombinedOutputResources ) + && ( maxComputeSharedMemorySize == rhs.maxComputeSharedMemorySize ) + && ( memcmp( maxComputeWorkGroupCount, rhs.maxComputeWorkGroupCount, 3 * sizeof( uint32_t ) ) == 0 ) + && ( maxComputeWorkGroupInvocations == rhs.maxComputeWorkGroupInvocations ) + && ( memcmp( maxComputeWorkGroupSize, rhs.maxComputeWorkGroupSize, 3 * sizeof( uint32_t ) ) == 0 ) + && ( subPixelPrecisionBits == rhs.subPixelPrecisionBits ) + && ( subTexelPrecisionBits == rhs.subTexelPrecisionBits ) + && ( mipmapPrecisionBits == rhs.mipmapPrecisionBits ) + && ( maxDrawIndexedIndexValue == rhs.maxDrawIndexedIndexValue ) + && ( maxDrawIndirectCount == rhs.maxDrawIndirectCount ) + && ( maxSamplerLodBias == rhs.maxSamplerLodBias ) + && ( maxSamplerAnisotropy == rhs.maxSamplerAnisotropy ) + && ( maxViewports == rhs.maxViewports ) + && ( memcmp( maxViewportDimensions, rhs.maxViewportDimensions, 2 * sizeof( uint32_t ) ) == 0 ) + && ( memcmp( viewportBoundsRange, rhs.viewportBoundsRange, 2 * sizeof( float ) ) == 0 ) + && ( viewportSubPixelBits == rhs.viewportSubPixelBits ) + && ( minMemoryMapAlignment == rhs.minMemoryMapAlignment ) + && ( minTexelBufferOffsetAlignment == rhs.minTexelBufferOffsetAlignment ) + && ( minUniformBufferOffsetAlignment == rhs.minUniformBufferOffsetAlignment ) + && ( minStorageBufferOffsetAlignment == rhs.minStorageBufferOffsetAlignment ) + && ( minTexelOffset == rhs.minTexelOffset ) + && ( maxTexelOffset == rhs.maxTexelOffset ) + && ( minTexelGatherOffset == rhs.minTexelGatherOffset ) + && ( maxTexelGatherOffset == rhs.maxTexelGatherOffset ) + && ( minInterpolationOffset == rhs.minInterpolationOffset ) + && ( maxInterpolationOffset == rhs.maxInterpolationOffset ) + && ( subPixelInterpolationOffsetBits == rhs.subPixelInterpolationOffsetBits ) + && ( maxFramebufferWidth == rhs.maxFramebufferWidth ) + && ( maxFramebufferHeight == rhs.maxFramebufferHeight ) + && ( maxFramebufferLayers == rhs.maxFramebufferLayers ) + && ( framebufferColorSampleCounts == rhs.framebufferColorSampleCounts ) + && ( framebufferDepthSampleCounts == rhs.framebufferDepthSampleCounts ) + && ( framebufferStencilSampleCounts == rhs.framebufferStencilSampleCounts ) + && ( framebufferNoAttachmentsSampleCounts == rhs.framebufferNoAttachmentsSampleCounts ) + && ( maxColorAttachments == rhs.maxColorAttachments ) + && ( sampledImageColorSampleCounts == rhs.sampledImageColorSampleCounts ) + && ( sampledImageIntegerSampleCounts == rhs.sampledImageIntegerSampleCounts ) + && ( sampledImageDepthSampleCounts == rhs.sampledImageDepthSampleCounts ) + && ( sampledImageStencilSampleCounts == rhs.sampledImageStencilSampleCounts ) + && ( storageImageSampleCounts == rhs.storageImageSampleCounts ) + && ( maxSampleMaskWords == rhs.maxSampleMaskWords ) + && ( timestampComputeAndGraphics == rhs.timestampComputeAndGraphics ) + && ( timestampPeriod == rhs.timestampPeriod ) + && ( maxClipDistances == rhs.maxClipDistances ) + && ( maxCullDistances == rhs.maxCullDistances ) + && ( maxCombinedClipAndCullDistances == rhs.maxCombinedClipAndCullDistances ) + && ( discreteQueuePriorities == rhs.discreteQueuePriorities ) + && ( memcmp( pointSizeRange, rhs.pointSizeRange, 2 * sizeof( float ) ) == 0 ) + && ( memcmp( lineWidthRange, rhs.lineWidthRange, 2 * sizeof( float ) ) == 0 ) + && ( pointSizeGranularity == rhs.pointSizeGranularity ) + && ( lineWidthGranularity == rhs.lineWidthGranularity ) + && ( strictLines == rhs.strictLines ) + && ( standardSampleLocations == rhs.standardSampleLocations ) + && ( optimalBufferCopyOffsetAlignment == rhs.optimalBufferCopyOffsetAlignment ) + && ( optimalBufferCopyRowPitchAlignment == rhs.optimalBufferCopyRowPitchAlignment ) + && ( nonCoherentAtomSize == rhs.nonCoherentAtomSize ); + } + + bool operator!=( PhysicalDeviceLimits const& rhs ) const + { + return !operator==( rhs ); + } + + uint32_t maxImageDimension1D; + uint32_t maxImageDimension2D; + uint32_t maxImageDimension3D; + uint32_t maxImageDimensionCube; + uint32_t maxImageArrayLayers; + uint32_t maxTexelBufferElements; + uint32_t maxUniformBufferRange; + uint32_t maxStorageBufferRange; + uint32_t maxPushConstantsSize; + uint32_t maxMemoryAllocationCount; + uint32_t maxSamplerAllocationCount; + DeviceSize bufferImageGranularity; + DeviceSize sparseAddressSpaceSize; + uint32_t maxBoundDescriptorSets; + uint32_t maxPerStageDescriptorSamplers; + uint32_t maxPerStageDescriptorUniformBuffers; + uint32_t maxPerStageDescriptorStorageBuffers; + uint32_t maxPerStageDescriptorSampledImages; + uint32_t maxPerStageDescriptorStorageImages; + uint32_t maxPerStageDescriptorInputAttachments; + uint32_t maxPerStageResources; + uint32_t maxDescriptorSetSamplers; + uint32_t maxDescriptorSetUniformBuffers; + uint32_t maxDescriptorSetUniformBuffersDynamic; + uint32_t maxDescriptorSetStorageBuffers; + uint32_t maxDescriptorSetStorageBuffersDynamic; + uint32_t maxDescriptorSetSampledImages; + uint32_t maxDescriptorSetStorageImages; + uint32_t maxDescriptorSetInputAttachments; + uint32_t maxVertexInputAttributes; + uint32_t maxVertexInputBindings; + uint32_t maxVertexInputAttributeOffset; + uint32_t maxVertexInputBindingStride; + uint32_t maxVertexOutputComponents; + uint32_t maxTessellationGenerationLevel; + uint32_t maxTessellationPatchSize; + uint32_t maxTessellationControlPerVertexInputComponents; + uint32_t maxTessellationControlPerVertexOutputComponents; + uint32_t maxTessellationControlPerPatchOutputComponents; + uint32_t maxTessellationControlTotalOutputComponents; + uint32_t maxTessellationEvaluationInputComponents; + uint32_t maxTessellationEvaluationOutputComponents; + uint32_t maxGeometryShaderInvocations; + uint32_t maxGeometryInputComponents; + uint32_t maxGeometryOutputComponents; + uint32_t maxGeometryOutputVertices; + uint32_t maxGeometryTotalOutputComponents; + uint32_t maxFragmentInputComponents; + uint32_t maxFragmentOutputAttachments; + uint32_t maxFragmentDualSrcAttachments; + uint32_t maxFragmentCombinedOutputResources; + uint32_t maxComputeSharedMemorySize; + uint32_t maxComputeWorkGroupCount[3]; + uint32_t maxComputeWorkGroupInvocations; + uint32_t maxComputeWorkGroupSize[3]; + uint32_t subPixelPrecisionBits; + uint32_t subTexelPrecisionBits; + uint32_t mipmapPrecisionBits; + uint32_t maxDrawIndexedIndexValue; + uint32_t maxDrawIndirectCount; + float maxSamplerLodBias; + float maxSamplerAnisotropy; + uint32_t maxViewports; + uint32_t maxViewportDimensions[2]; + float viewportBoundsRange[2]; + uint32_t viewportSubPixelBits; + size_t minMemoryMapAlignment; + DeviceSize minTexelBufferOffsetAlignment; + DeviceSize minUniformBufferOffsetAlignment; + DeviceSize minStorageBufferOffsetAlignment; + int32_t minTexelOffset; + uint32_t maxTexelOffset; + int32_t minTexelGatherOffset; + uint32_t maxTexelGatherOffset; + float minInterpolationOffset; + float maxInterpolationOffset; + uint32_t subPixelInterpolationOffsetBits; + uint32_t maxFramebufferWidth; + uint32_t maxFramebufferHeight; + uint32_t maxFramebufferLayers; + SampleCountFlags framebufferColorSampleCounts; + SampleCountFlags framebufferDepthSampleCounts; + SampleCountFlags framebufferStencilSampleCounts; + SampleCountFlags framebufferNoAttachmentsSampleCounts; + uint32_t maxColorAttachments; + SampleCountFlags sampledImageColorSampleCounts; + SampleCountFlags sampledImageIntegerSampleCounts; + SampleCountFlags sampledImageDepthSampleCounts; + SampleCountFlags sampledImageStencilSampleCounts; + SampleCountFlags storageImageSampleCounts; + uint32_t maxSampleMaskWords; + Bool32 timestampComputeAndGraphics; + float timestampPeriod; + uint32_t maxClipDistances; + uint32_t maxCullDistances; + uint32_t maxCombinedClipAndCullDistances; + uint32_t discreteQueuePriorities; + float pointSizeRange[2]; + float lineWidthRange[2]; + float pointSizeGranularity; + float lineWidthGranularity; + Bool32 strictLines; + Bool32 standardSampleLocations; + DeviceSize optimalBufferCopyOffsetAlignment; + DeviceSize optimalBufferCopyRowPitchAlignment; + DeviceSize nonCoherentAtomSize; + }; + static_assert( sizeof( PhysicalDeviceLimits ) == sizeof( VkPhysicalDeviceLimits ), "struct and wrapper have different size!" ); + + struct PhysicalDeviceProperties + { + operator VkPhysicalDeviceProperties const&() const + { + return *reinterpret_cast(this); + } + + operator VkPhysicalDeviceProperties &() + { + return *reinterpret_cast(this); + } + + bool operator==( PhysicalDeviceProperties const& rhs ) const + { + return ( apiVersion == rhs.apiVersion ) + && ( driverVersion == rhs.driverVersion ) + && ( vendorID == rhs.vendorID ) + && ( deviceID == rhs.deviceID ) + && ( deviceType == rhs.deviceType ) + && ( memcmp( deviceName, rhs.deviceName, VK_MAX_PHYSICAL_DEVICE_NAME_SIZE * sizeof( char ) ) == 0 ) + && ( memcmp( pipelineCacheUUID, rhs.pipelineCacheUUID, VK_UUID_SIZE * sizeof( uint8_t ) ) == 0 ) + && ( limits == rhs.limits ) + && ( sparseProperties == rhs.sparseProperties ); + } + + bool operator!=( PhysicalDeviceProperties const& rhs ) const + { + return !operator==( rhs ); + } + + uint32_t apiVersion; + uint32_t driverVersion; + uint32_t vendorID; + uint32_t deviceID; + PhysicalDeviceType deviceType; + char deviceName[VK_MAX_PHYSICAL_DEVICE_NAME_SIZE]; + uint8_t pipelineCacheUUID[VK_UUID_SIZE]; + PhysicalDeviceLimits limits; + PhysicalDeviceSparseProperties sparseProperties; + }; + static_assert( sizeof( PhysicalDeviceProperties ) == sizeof( VkPhysicalDeviceProperties ), "struct and wrapper have different size!" ); + + struct PhysicalDeviceProperties2 + { + operator VkPhysicalDeviceProperties2 const&() const + { + return *reinterpret_cast(this); + } + + operator VkPhysicalDeviceProperties2 &() + { + return *reinterpret_cast(this); + } + + bool operator==( PhysicalDeviceProperties2 const& rhs ) const + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( properties == rhs.properties ); + } + + bool operator!=( PhysicalDeviceProperties2 const& rhs ) const + { + return !operator==( rhs ); + } + + private: + StructureType sType = StructureType::ePhysicalDeviceProperties2; + + public: + void* pNext = nullptr; + PhysicalDeviceProperties properties; + }; + static_assert( sizeof( PhysicalDeviceProperties2 ) == sizeof( VkPhysicalDeviceProperties2 ), "struct and wrapper have different size!" ); + + using PhysicalDeviceProperties2KHR = PhysicalDeviceProperties2; + + struct ImageFormatProperties2 + { + operator VkImageFormatProperties2 const&() const + { + return *reinterpret_cast(this); + } + + operator VkImageFormatProperties2 &() + { + return *reinterpret_cast(this); + } + + bool operator==( ImageFormatProperties2 const& rhs ) const + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( imageFormatProperties == rhs.imageFormatProperties ); + } + + bool operator!=( ImageFormatProperties2 const& rhs ) const + { + return !operator==( rhs ); + } + + private: + StructureType sType = StructureType::eImageFormatProperties2; + + public: + void* pNext = nullptr; + ImageFormatProperties imageFormatProperties; + }; + static_assert( sizeof( ImageFormatProperties2 ) == sizeof( VkImageFormatProperties2 ), "struct and wrapper have different size!" ); + + using ImageFormatProperties2KHR = ImageFormatProperties2; + + struct PhysicalDeviceSparseImageFormatInfo2 + { + PhysicalDeviceSparseImageFormatInfo2( Format format_ = Format::eUndefined, + ImageType type_ = ImageType::e1D, + SampleCountFlagBits samples_ = SampleCountFlagBits::e1, + ImageUsageFlags usage_ = ImageUsageFlags(), + ImageTiling tiling_ = ImageTiling::eOptimal ) + : format( format_ ) + , type( type_ ) + , samples( samples_ ) + , usage( usage_ ) + , tiling( tiling_ ) + { + } + + PhysicalDeviceSparseImageFormatInfo2( VkPhysicalDeviceSparseImageFormatInfo2 const & rhs ) + { + memcpy( this, &rhs, sizeof( PhysicalDeviceSparseImageFormatInfo2 ) ); + } + + PhysicalDeviceSparseImageFormatInfo2& operator=( VkPhysicalDeviceSparseImageFormatInfo2 const & rhs ) + { + memcpy( this, &rhs, sizeof( PhysicalDeviceSparseImageFormatInfo2 ) ); + return *this; + } + PhysicalDeviceSparseImageFormatInfo2& setPNext( const void* pNext_ ) + { + pNext = pNext_; + return *this; + } + + PhysicalDeviceSparseImageFormatInfo2& setFormat( Format format_ ) + { + format = format_; + return *this; + } + + PhysicalDeviceSparseImageFormatInfo2& setType( ImageType type_ ) + { + type = type_; + return *this; + } + + PhysicalDeviceSparseImageFormatInfo2& setSamples( SampleCountFlagBits samples_ ) + { + samples = samples_; + return *this; + } + + PhysicalDeviceSparseImageFormatInfo2& setUsage( ImageUsageFlags usage_ ) + { + usage = usage_; + return *this; + } + + PhysicalDeviceSparseImageFormatInfo2& setTiling( ImageTiling tiling_ ) + { + tiling = tiling_; + return *this; + } + + operator VkPhysicalDeviceSparseImageFormatInfo2 const&() const + { + return *reinterpret_cast(this); + } + + operator VkPhysicalDeviceSparseImageFormatInfo2 &() + { + return *reinterpret_cast(this); + } + + bool operator==( PhysicalDeviceSparseImageFormatInfo2 const& rhs ) const + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( format == rhs.format ) + && ( type == rhs.type ) + && ( samples == rhs.samples ) + && ( usage == rhs.usage ) + && ( tiling == rhs.tiling ); + } + + bool operator!=( PhysicalDeviceSparseImageFormatInfo2 const& rhs ) const + { + return !operator==( rhs ); + } + + private: + StructureType sType = StructureType::ePhysicalDeviceSparseImageFormatInfo2; + + public: + const void* pNext = nullptr; + Format format; + ImageType type; + SampleCountFlagBits samples; + ImageUsageFlags usage; + ImageTiling tiling; + }; + static_assert( sizeof( PhysicalDeviceSparseImageFormatInfo2 ) == sizeof( VkPhysicalDeviceSparseImageFormatInfo2 ), "struct and wrapper have different size!" ); + + using PhysicalDeviceSparseImageFormatInfo2KHR = PhysicalDeviceSparseImageFormatInfo2; + + struct SampleLocationsInfoEXT + { + SampleLocationsInfoEXT( SampleCountFlagBits sampleLocationsPerPixel_ = SampleCountFlagBits::e1, + Extent2D sampleLocationGridSize_ = Extent2D(), + uint32_t sampleLocationsCount_ = 0, + const SampleLocationEXT* pSampleLocations_ = nullptr ) + : sampleLocationsPerPixel( sampleLocationsPerPixel_ ) + , sampleLocationGridSize( sampleLocationGridSize_ ) + , sampleLocationsCount( sampleLocationsCount_ ) + , pSampleLocations( pSampleLocations_ ) + { + } + + SampleLocationsInfoEXT( VkSampleLocationsInfoEXT const & rhs ) + { + memcpy( this, &rhs, sizeof( SampleLocationsInfoEXT ) ); + } + + SampleLocationsInfoEXT& operator=( VkSampleLocationsInfoEXT const & rhs ) + { + memcpy( this, &rhs, sizeof( SampleLocationsInfoEXT ) ); + return *this; + } + SampleLocationsInfoEXT& setPNext( const void* pNext_ ) + { + pNext = pNext_; + return *this; + } + + SampleLocationsInfoEXT& setSampleLocationsPerPixel( SampleCountFlagBits sampleLocationsPerPixel_ ) + { + sampleLocationsPerPixel = sampleLocationsPerPixel_; + return *this; + } + + SampleLocationsInfoEXT& setSampleLocationGridSize( Extent2D sampleLocationGridSize_ ) + { + sampleLocationGridSize = sampleLocationGridSize_; + return *this; + } + + SampleLocationsInfoEXT& setSampleLocationsCount( uint32_t sampleLocationsCount_ ) + { + sampleLocationsCount = sampleLocationsCount_; + return *this; + } + + SampleLocationsInfoEXT& setPSampleLocations( const SampleLocationEXT* pSampleLocations_ ) + { + pSampleLocations = pSampleLocations_; + return *this; + } + + operator VkSampleLocationsInfoEXT const&() const + { + return *reinterpret_cast(this); + } + + operator VkSampleLocationsInfoEXT &() + { + return *reinterpret_cast(this); + } + + bool operator==( SampleLocationsInfoEXT const& rhs ) const + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( sampleLocationsPerPixel == rhs.sampleLocationsPerPixel ) + && ( sampleLocationGridSize == rhs.sampleLocationGridSize ) + && ( sampleLocationsCount == rhs.sampleLocationsCount ) + && ( pSampleLocations == rhs.pSampleLocations ); + } + + bool operator!=( SampleLocationsInfoEXT const& rhs ) const + { + return !operator==( rhs ); + } + + private: + StructureType sType = StructureType::eSampleLocationsInfoEXT; + + public: + const void* pNext = nullptr; + SampleCountFlagBits sampleLocationsPerPixel; + Extent2D sampleLocationGridSize; + uint32_t sampleLocationsCount; + const SampleLocationEXT* pSampleLocations; + }; + static_assert( sizeof( SampleLocationsInfoEXT ) == sizeof( VkSampleLocationsInfoEXT ), "struct and wrapper have different size!" ); + + struct AttachmentSampleLocationsEXT + { + AttachmentSampleLocationsEXT( uint32_t attachmentIndex_ = 0, + SampleLocationsInfoEXT sampleLocationsInfo_ = SampleLocationsInfoEXT() ) + : attachmentIndex( attachmentIndex_ ) + , sampleLocationsInfo( sampleLocationsInfo_ ) + { + } + + AttachmentSampleLocationsEXT( VkAttachmentSampleLocationsEXT const & rhs ) + { + memcpy( this, &rhs, sizeof( AttachmentSampleLocationsEXT ) ); + } + + AttachmentSampleLocationsEXT& operator=( VkAttachmentSampleLocationsEXT const & rhs ) + { + memcpy( this, &rhs, sizeof( AttachmentSampleLocationsEXT ) ); + return *this; + } + AttachmentSampleLocationsEXT& setAttachmentIndex( uint32_t attachmentIndex_ ) + { + attachmentIndex = attachmentIndex_; + return *this; + } + + AttachmentSampleLocationsEXT& setSampleLocationsInfo( SampleLocationsInfoEXT sampleLocationsInfo_ ) + { + sampleLocationsInfo = sampleLocationsInfo_; + return *this; + } + + operator VkAttachmentSampleLocationsEXT const&() const + { + return *reinterpret_cast(this); + } + + operator VkAttachmentSampleLocationsEXT &() + { + return *reinterpret_cast(this); + } + + bool operator==( AttachmentSampleLocationsEXT const& rhs ) const + { + return ( attachmentIndex == rhs.attachmentIndex ) + && ( sampleLocationsInfo == rhs.sampleLocationsInfo ); + } + + bool operator!=( AttachmentSampleLocationsEXT const& rhs ) const + { + return !operator==( rhs ); + } + + uint32_t attachmentIndex; + SampleLocationsInfoEXT sampleLocationsInfo; + }; + static_assert( sizeof( AttachmentSampleLocationsEXT ) == sizeof( VkAttachmentSampleLocationsEXT ), "struct and wrapper have different size!" ); + + struct SubpassSampleLocationsEXT + { + SubpassSampleLocationsEXT( uint32_t subpassIndex_ = 0, + SampleLocationsInfoEXT sampleLocationsInfo_ = SampleLocationsInfoEXT() ) + : subpassIndex( subpassIndex_ ) + , sampleLocationsInfo( sampleLocationsInfo_ ) + { + } + + SubpassSampleLocationsEXT( VkSubpassSampleLocationsEXT const & rhs ) + { + memcpy( this, &rhs, sizeof( SubpassSampleLocationsEXT ) ); + } + + SubpassSampleLocationsEXT& operator=( VkSubpassSampleLocationsEXT const & rhs ) + { + memcpy( this, &rhs, sizeof( SubpassSampleLocationsEXT ) ); + return *this; + } + SubpassSampleLocationsEXT& setSubpassIndex( uint32_t subpassIndex_ ) + { + subpassIndex = subpassIndex_; + return *this; + } + + SubpassSampleLocationsEXT& setSampleLocationsInfo( SampleLocationsInfoEXT sampleLocationsInfo_ ) + { + sampleLocationsInfo = sampleLocationsInfo_; + return *this; + } + + operator VkSubpassSampleLocationsEXT const&() const + { + return *reinterpret_cast(this); + } + + operator VkSubpassSampleLocationsEXT &() + { + return *reinterpret_cast(this); + } + + bool operator==( SubpassSampleLocationsEXT const& rhs ) const + { + return ( subpassIndex == rhs.subpassIndex ) + && ( sampleLocationsInfo == rhs.sampleLocationsInfo ); + } + + bool operator!=( SubpassSampleLocationsEXT const& rhs ) const + { + return !operator==( rhs ); + } + + uint32_t subpassIndex; + SampleLocationsInfoEXT sampleLocationsInfo; + }; + static_assert( sizeof( SubpassSampleLocationsEXT ) == sizeof( VkSubpassSampleLocationsEXT ), "struct and wrapper have different size!" ); + + struct RenderPassSampleLocationsBeginInfoEXT + { + RenderPassSampleLocationsBeginInfoEXT( uint32_t attachmentInitialSampleLocationsCount_ = 0, + const AttachmentSampleLocationsEXT* pAttachmentInitialSampleLocations_ = nullptr, + uint32_t postSubpassSampleLocationsCount_ = 0, + const SubpassSampleLocationsEXT* pPostSubpassSampleLocations_ = nullptr ) + : attachmentInitialSampleLocationsCount( attachmentInitialSampleLocationsCount_ ) + , pAttachmentInitialSampleLocations( pAttachmentInitialSampleLocations_ ) + , postSubpassSampleLocationsCount( postSubpassSampleLocationsCount_ ) + , pPostSubpassSampleLocations( pPostSubpassSampleLocations_ ) + { + } + + RenderPassSampleLocationsBeginInfoEXT( VkRenderPassSampleLocationsBeginInfoEXT const & rhs ) + { + memcpy( this, &rhs, sizeof( RenderPassSampleLocationsBeginInfoEXT ) ); + } + + RenderPassSampleLocationsBeginInfoEXT& operator=( VkRenderPassSampleLocationsBeginInfoEXT const & rhs ) + { + memcpy( this, &rhs, sizeof( RenderPassSampleLocationsBeginInfoEXT ) ); + return *this; + } + RenderPassSampleLocationsBeginInfoEXT& setPNext( const void* pNext_ ) + { + pNext = pNext_; + return *this; + } + + RenderPassSampleLocationsBeginInfoEXT& setAttachmentInitialSampleLocationsCount( uint32_t attachmentInitialSampleLocationsCount_ ) + { + attachmentInitialSampleLocationsCount = attachmentInitialSampleLocationsCount_; + return *this; + } + + RenderPassSampleLocationsBeginInfoEXT& setPAttachmentInitialSampleLocations( const AttachmentSampleLocationsEXT* pAttachmentInitialSampleLocations_ ) + { + pAttachmentInitialSampleLocations = pAttachmentInitialSampleLocations_; + return *this; + } + + RenderPassSampleLocationsBeginInfoEXT& setPostSubpassSampleLocationsCount( uint32_t postSubpassSampleLocationsCount_ ) + { + postSubpassSampleLocationsCount = postSubpassSampleLocationsCount_; + return *this; + } + + RenderPassSampleLocationsBeginInfoEXT& setPPostSubpassSampleLocations( const SubpassSampleLocationsEXT* pPostSubpassSampleLocations_ ) + { + pPostSubpassSampleLocations = pPostSubpassSampleLocations_; + return *this; + } + + operator VkRenderPassSampleLocationsBeginInfoEXT const&() const + { + return *reinterpret_cast(this); + } + + operator VkRenderPassSampleLocationsBeginInfoEXT &() + { + return *reinterpret_cast(this); + } + + bool operator==( RenderPassSampleLocationsBeginInfoEXT const& rhs ) const + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( attachmentInitialSampleLocationsCount == rhs.attachmentInitialSampleLocationsCount ) + && ( pAttachmentInitialSampleLocations == rhs.pAttachmentInitialSampleLocations ) + && ( postSubpassSampleLocationsCount == rhs.postSubpassSampleLocationsCount ) + && ( pPostSubpassSampleLocations == rhs.pPostSubpassSampleLocations ); + } + + bool operator!=( RenderPassSampleLocationsBeginInfoEXT const& rhs ) const + { + return !operator==( rhs ); + } + + private: + StructureType sType = StructureType::eRenderPassSampleLocationsBeginInfoEXT; + + public: + const void* pNext = nullptr; + uint32_t attachmentInitialSampleLocationsCount; + const AttachmentSampleLocationsEXT* pAttachmentInitialSampleLocations; + uint32_t postSubpassSampleLocationsCount; + const SubpassSampleLocationsEXT* pPostSubpassSampleLocations; + }; + static_assert( sizeof( RenderPassSampleLocationsBeginInfoEXT ) == sizeof( VkRenderPassSampleLocationsBeginInfoEXT ), "struct and wrapper have different size!" ); + + struct PipelineSampleLocationsStateCreateInfoEXT + { + PipelineSampleLocationsStateCreateInfoEXT( Bool32 sampleLocationsEnable_ = 0, + SampleLocationsInfoEXT sampleLocationsInfo_ = SampleLocationsInfoEXT() ) + : sampleLocationsEnable( sampleLocationsEnable_ ) + , sampleLocationsInfo( sampleLocationsInfo_ ) + { + } + + PipelineSampleLocationsStateCreateInfoEXT( VkPipelineSampleLocationsStateCreateInfoEXT const & rhs ) + { + memcpy( this, &rhs, sizeof( PipelineSampleLocationsStateCreateInfoEXT ) ); + } + + PipelineSampleLocationsStateCreateInfoEXT& operator=( VkPipelineSampleLocationsStateCreateInfoEXT const & rhs ) + { + memcpy( this, &rhs, sizeof( PipelineSampleLocationsStateCreateInfoEXT ) ); + return *this; + } + PipelineSampleLocationsStateCreateInfoEXT& setPNext( const void* pNext_ ) + { + pNext = pNext_; + return *this; + } + + PipelineSampleLocationsStateCreateInfoEXT& setSampleLocationsEnable( Bool32 sampleLocationsEnable_ ) + { + sampleLocationsEnable = sampleLocationsEnable_; + return *this; + } + + PipelineSampleLocationsStateCreateInfoEXT& setSampleLocationsInfo( SampleLocationsInfoEXT sampleLocationsInfo_ ) + { + sampleLocationsInfo = sampleLocationsInfo_; + return *this; + } + + operator VkPipelineSampleLocationsStateCreateInfoEXT const&() const + { + return *reinterpret_cast(this); + } + + operator VkPipelineSampleLocationsStateCreateInfoEXT &() + { + return *reinterpret_cast(this); + } + + bool operator==( PipelineSampleLocationsStateCreateInfoEXT const& rhs ) const + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( sampleLocationsEnable == rhs.sampleLocationsEnable ) + && ( sampleLocationsInfo == rhs.sampleLocationsInfo ); + } + + bool operator!=( PipelineSampleLocationsStateCreateInfoEXT const& rhs ) const + { + return !operator==( rhs ); + } + + private: + StructureType sType = StructureType::ePipelineSampleLocationsStateCreateInfoEXT; + + public: + const void* pNext = nullptr; + Bool32 sampleLocationsEnable; + SampleLocationsInfoEXT sampleLocationsInfo; + }; + static_assert( sizeof( PipelineSampleLocationsStateCreateInfoEXT ) == sizeof( VkPipelineSampleLocationsStateCreateInfoEXT ), "struct and wrapper have different size!" ); + + struct PhysicalDeviceSampleLocationsPropertiesEXT + { + operator VkPhysicalDeviceSampleLocationsPropertiesEXT const&() const + { + return *reinterpret_cast(this); + } + + operator VkPhysicalDeviceSampleLocationsPropertiesEXT &() + { + return *reinterpret_cast(this); + } + + bool operator==( PhysicalDeviceSampleLocationsPropertiesEXT const& rhs ) const + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( sampleLocationSampleCounts == rhs.sampleLocationSampleCounts ) + && ( maxSampleLocationGridSize == rhs.maxSampleLocationGridSize ) + && ( memcmp( sampleLocationCoordinateRange, rhs.sampleLocationCoordinateRange, 2 * sizeof( float ) ) == 0 ) + && ( sampleLocationSubPixelBits == rhs.sampleLocationSubPixelBits ) + && ( variableSampleLocations == rhs.variableSampleLocations ); + } + + bool operator!=( PhysicalDeviceSampleLocationsPropertiesEXT const& rhs ) const + { + return !operator==( rhs ); + } + + private: + StructureType sType = StructureType::ePhysicalDeviceSampleLocationsPropertiesEXT; + + public: + void* pNext = nullptr; + SampleCountFlags sampleLocationSampleCounts; + Extent2D maxSampleLocationGridSize; + float sampleLocationCoordinateRange[2]; + uint32_t sampleLocationSubPixelBits; + Bool32 variableSampleLocations; + }; + static_assert( sizeof( PhysicalDeviceSampleLocationsPropertiesEXT ) == sizeof( VkPhysicalDeviceSampleLocationsPropertiesEXT ), "struct and wrapper have different size!" ); + + enum class AttachmentDescriptionFlagBits + { + eMayAlias = VK_ATTACHMENT_DESCRIPTION_MAY_ALIAS_BIT + }; + + using AttachmentDescriptionFlags = Flags; + + VULKAN_HPP_INLINE AttachmentDescriptionFlags operator|( AttachmentDescriptionFlagBits bit0, AttachmentDescriptionFlagBits bit1 ) + { + return AttachmentDescriptionFlags( bit0 ) | bit1; + } + + VULKAN_HPP_INLINE AttachmentDescriptionFlags operator~( AttachmentDescriptionFlagBits bits ) + { + return ~( AttachmentDescriptionFlags( bits ) ); + } + + template <> struct FlagTraits + { + enum + { + allFlags = VkFlags(AttachmentDescriptionFlagBits::eMayAlias) + }; + }; + + struct AttachmentDescription + { + AttachmentDescription( AttachmentDescriptionFlags flags_ = AttachmentDescriptionFlags(), + Format format_ = Format::eUndefined, + SampleCountFlagBits samples_ = SampleCountFlagBits::e1, + AttachmentLoadOp loadOp_ = AttachmentLoadOp::eLoad, + AttachmentStoreOp storeOp_ = AttachmentStoreOp::eStore, + AttachmentLoadOp stencilLoadOp_ = AttachmentLoadOp::eLoad, + AttachmentStoreOp stencilStoreOp_ = AttachmentStoreOp::eStore, + ImageLayout initialLayout_ = ImageLayout::eUndefined, + ImageLayout finalLayout_ = ImageLayout::eUndefined ) + : flags( flags_ ) + , format( format_ ) + , samples( samples_ ) + , loadOp( loadOp_ ) + , storeOp( storeOp_ ) + , stencilLoadOp( stencilLoadOp_ ) + , stencilStoreOp( stencilStoreOp_ ) + , initialLayout( initialLayout_ ) + , finalLayout( finalLayout_ ) + { + } + + AttachmentDescription( VkAttachmentDescription const & rhs ) + { + memcpy( this, &rhs, sizeof( AttachmentDescription ) ); + } + + AttachmentDescription& operator=( VkAttachmentDescription const & rhs ) + { + memcpy( this, &rhs, sizeof( AttachmentDescription ) ); + return *this; + } + AttachmentDescription& setFlags( AttachmentDescriptionFlags flags_ ) + { + flags = flags_; + return *this; + } + + AttachmentDescription& setFormat( Format format_ ) + { + format = format_; + return *this; + } + + AttachmentDescription& setSamples( SampleCountFlagBits samples_ ) + { + samples = samples_; + return *this; + } + + AttachmentDescription& setLoadOp( AttachmentLoadOp loadOp_ ) + { + loadOp = loadOp_; + return *this; + } + + AttachmentDescription& setStoreOp( AttachmentStoreOp storeOp_ ) + { + storeOp = storeOp_; + return *this; + } + + AttachmentDescription& setStencilLoadOp( AttachmentLoadOp stencilLoadOp_ ) + { + stencilLoadOp = stencilLoadOp_; + return *this; + } + + AttachmentDescription& setStencilStoreOp( AttachmentStoreOp stencilStoreOp_ ) + { + stencilStoreOp = stencilStoreOp_; + return *this; + } + + AttachmentDescription& setInitialLayout( ImageLayout initialLayout_ ) + { + initialLayout = initialLayout_; + return *this; + } + + AttachmentDescription& setFinalLayout( ImageLayout finalLayout_ ) + { + finalLayout = finalLayout_; + return *this; + } + + operator VkAttachmentDescription const&() const + { + return *reinterpret_cast(this); + } + + operator VkAttachmentDescription &() + { + return *reinterpret_cast(this); + } + + bool operator==( AttachmentDescription const& rhs ) const + { + return ( flags == rhs.flags ) + && ( format == rhs.format ) + && ( samples == rhs.samples ) + && ( loadOp == rhs.loadOp ) + && ( storeOp == rhs.storeOp ) + && ( stencilLoadOp == rhs.stencilLoadOp ) + && ( stencilStoreOp == rhs.stencilStoreOp ) + && ( initialLayout == rhs.initialLayout ) + && ( finalLayout == rhs.finalLayout ); + } + + bool operator!=( AttachmentDescription const& rhs ) const + { + return !operator==( rhs ); + } + + AttachmentDescriptionFlags flags; + Format format; + SampleCountFlagBits samples; + AttachmentLoadOp loadOp; + AttachmentStoreOp storeOp; + AttachmentLoadOp stencilLoadOp; + AttachmentStoreOp stencilStoreOp; + ImageLayout initialLayout; + ImageLayout finalLayout; + }; + static_assert( sizeof( AttachmentDescription ) == sizeof( VkAttachmentDescription ), "struct and wrapper have different size!" ); + + struct AttachmentDescription2KHR + { + AttachmentDescription2KHR( AttachmentDescriptionFlags flags_ = AttachmentDescriptionFlags(), + Format format_ = Format::eUndefined, + SampleCountFlagBits samples_ = SampleCountFlagBits::e1, + AttachmentLoadOp loadOp_ = AttachmentLoadOp::eLoad, + AttachmentStoreOp storeOp_ = AttachmentStoreOp::eStore, + AttachmentLoadOp stencilLoadOp_ = AttachmentLoadOp::eLoad, + AttachmentStoreOp stencilStoreOp_ = AttachmentStoreOp::eStore, + ImageLayout initialLayout_ = ImageLayout::eUndefined, + ImageLayout finalLayout_ = ImageLayout::eUndefined ) + : flags( flags_ ) + , format( format_ ) + , samples( samples_ ) + , loadOp( loadOp_ ) + , storeOp( storeOp_ ) + , stencilLoadOp( stencilLoadOp_ ) + , stencilStoreOp( stencilStoreOp_ ) + , initialLayout( initialLayout_ ) + , finalLayout( finalLayout_ ) + { + } + + AttachmentDescription2KHR( VkAttachmentDescription2KHR const & rhs ) + { + memcpy( this, &rhs, sizeof( AttachmentDescription2KHR ) ); + } + + AttachmentDescription2KHR& operator=( VkAttachmentDescription2KHR const & rhs ) + { + memcpy( this, &rhs, sizeof( AttachmentDescription2KHR ) ); + return *this; + } + AttachmentDescription2KHR& setPNext( const void* pNext_ ) + { + pNext = pNext_; + return *this; + } + + AttachmentDescription2KHR& setFlags( AttachmentDescriptionFlags flags_ ) + { + flags = flags_; + return *this; + } + + AttachmentDescription2KHR& setFormat( Format format_ ) + { + format = format_; + return *this; + } + + AttachmentDescription2KHR& setSamples( SampleCountFlagBits samples_ ) + { + samples = samples_; + return *this; + } + + AttachmentDescription2KHR& setLoadOp( AttachmentLoadOp loadOp_ ) + { + loadOp = loadOp_; + return *this; + } + + AttachmentDescription2KHR& setStoreOp( AttachmentStoreOp storeOp_ ) + { + storeOp = storeOp_; + return *this; + } + + AttachmentDescription2KHR& setStencilLoadOp( AttachmentLoadOp stencilLoadOp_ ) + { + stencilLoadOp = stencilLoadOp_; + return *this; + } + + AttachmentDescription2KHR& setStencilStoreOp( AttachmentStoreOp stencilStoreOp_ ) + { + stencilStoreOp = stencilStoreOp_; + return *this; + } + + AttachmentDescription2KHR& setInitialLayout( ImageLayout initialLayout_ ) + { + initialLayout = initialLayout_; + return *this; + } + + AttachmentDescription2KHR& setFinalLayout( ImageLayout finalLayout_ ) + { + finalLayout = finalLayout_; + return *this; + } + + operator VkAttachmentDescription2KHR const&() const + { + return *reinterpret_cast(this); + } + + operator VkAttachmentDescription2KHR &() + { + return *reinterpret_cast(this); + } + + bool operator==( AttachmentDescription2KHR const& rhs ) const + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( flags == rhs.flags ) + && ( format == rhs.format ) + && ( samples == rhs.samples ) + && ( loadOp == rhs.loadOp ) + && ( storeOp == rhs.storeOp ) + && ( stencilLoadOp == rhs.stencilLoadOp ) + && ( stencilStoreOp == rhs.stencilStoreOp ) + && ( initialLayout == rhs.initialLayout ) + && ( finalLayout == rhs.finalLayout ); + } + + bool operator!=( AttachmentDescription2KHR const& rhs ) const + { + return !operator==( rhs ); + } + + private: + StructureType sType = StructureType::eAttachmentDescription2KHR; + + public: + const void* pNext = nullptr; + AttachmentDescriptionFlags flags; + Format format; + SampleCountFlagBits samples; + AttachmentLoadOp loadOp; + AttachmentStoreOp storeOp; + AttachmentLoadOp stencilLoadOp; + AttachmentStoreOp stencilStoreOp; + ImageLayout initialLayout; + ImageLayout finalLayout; + }; + static_assert( sizeof( AttachmentDescription2KHR ) == sizeof( VkAttachmentDescription2KHR ), "struct and wrapper have different size!" ); + + enum class StencilFaceFlagBits + { + eFront = VK_STENCIL_FACE_FRONT_BIT, + eBack = VK_STENCIL_FACE_BACK_BIT, + eVkStencilFrontAndBack = VK_STENCIL_FRONT_AND_BACK + }; + + using StencilFaceFlags = Flags; + + VULKAN_HPP_INLINE StencilFaceFlags operator|( StencilFaceFlagBits bit0, StencilFaceFlagBits bit1 ) + { + return StencilFaceFlags( bit0 ) | bit1; + } + + VULKAN_HPP_INLINE StencilFaceFlags operator~( StencilFaceFlagBits bits ) + { + return ~( StencilFaceFlags( bits ) ); + } + + template <> struct FlagTraits + { + enum + { + allFlags = VkFlags(StencilFaceFlagBits::eFront) | VkFlags(StencilFaceFlagBits::eBack) | VkFlags(StencilFaceFlagBits::eVkStencilFrontAndBack) + }; + }; + + enum class DescriptorPoolCreateFlagBits + { + eFreeDescriptorSet = VK_DESCRIPTOR_POOL_CREATE_FREE_DESCRIPTOR_SET_BIT, + eUpdateAfterBindEXT = VK_DESCRIPTOR_POOL_CREATE_UPDATE_AFTER_BIND_BIT_EXT + }; + + using DescriptorPoolCreateFlags = Flags; + + VULKAN_HPP_INLINE DescriptorPoolCreateFlags operator|( DescriptorPoolCreateFlagBits bit0, DescriptorPoolCreateFlagBits bit1 ) + { + return DescriptorPoolCreateFlags( bit0 ) | bit1; + } + + VULKAN_HPP_INLINE DescriptorPoolCreateFlags operator~( DescriptorPoolCreateFlagBits bits ) + { + return ~( DescriptorPoolCreateFlags( bits ) ); + } + + template <> struct FlagTraits + { + enum + { + allFlags = VkFlags(DescriptorPoolCreateFlagBits::eFreeDescriptorSet) | VkFlags(DescriptorPoolCreateFlagBits::eUpdateAfterBindEXT) + }; + }; + + struct DescriptorPoolCreateInfo + { + DescriptorPoolCreateInfo( DescriptorPoolCreateFlags flags_ = DescriptorPoolCreateFlags(), + uint32_t maxSets_ = 0, + uint32_t poolSizeCount_ = 0, + const DescriptorPoolSize* pPoolSizes_ = nullptr ) + : flags( flags_ ) + , maxSets( maxSets_ ) + , poolSizeCount( poolSizeCount_ ) + , pPoolSizes( pPoolSizes_ ) + { + } + + DescriptorPoolCreateInfo( VkDescriptorPoolCreateInfo const & rhs ) + { + memcpy( this, &rhs, sizeof( DescriptorPoolCreateInfo ) ); + } + + DescriptorPoolCreateInfo& operator=( VkDescriptorPoolCreateInfo const & rhs ) + { + memcpy( this, &rhs, sizeof( DescriptorPoolCreateInfo ) ); + return *this; + } + DescriptorPoolCreateInfo& setPNext( const void* pNext_ ) + { + pNext = pNext_; + return *this; + } + + DescriptorPoolCreateInfo& setFlags( DescriptorPoolCreateFlags flags_ ) + { + flags = flags_; + return *this; + } + + DescriptorPoolCreateInfo& setMaxSets( uint32_t maxSets_ ) + { + maxSets = maxSets_; + return *this; + } + + DescriptorPoolCreateInfo& setPoolSizeCount( uint32_t poolSizeCount_ ) + { + poolSizeCount = poolSizeCount_; + return *this; + } + + DescriptorPoolCreateInfo& setPPoolSizes( const DescriptorPoolSize* pPoolSizes_ ) + { + pPoolSizes = pPoolSizes_; + return *this; + } + + operator VkDescriptorPoolCreateInfo const&() const + { + return *reinterpret_cast(this); + } + + operator VkDescriptorPoolCreateInfo &() + { + return *reinterpret_cast(this); + } + + bool operator==( DescriptorPoolCreateInfo const& rhs ) const + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( flags == rhs.flags ) + && ( maxSets == rhs.maxSets ) + && ( poolSizeCount == rhs.poolSizeCount ) + && ( pPoolSizes == rhs.pPoolSizes ); + } + + bool operator!=( DescriptorPoolCreateInfo const& rhs ) const + { + return !operator==( rhs ); + } + + private: + StructureType sType = StructureType::eDescriptorPoolCreateInfo; + + public: + const void* pNext = nullptr; + DescriptorPoolCreateFlags flags; + uint32_t maxSets; + uint32_t poolSizeCount; + const DescriptorPoolSize* pPoolSizes; + }; + static_assert( sizeof( DescriptorPoolCreateInfo ) == sizeof( VkDescriptorPoolCreateInfo ), "struct and wrapper have different size!" ); + + enum class DependencyFlagBits + { + eByRegion = VK_DEPENDENCY_BY_REGION_BIT, + eDeviceGroup = VK_DEPENDENCY_DEVICE_GROUP_BIT, + eDeviceGroupKHR = VK_DEPENDENCY_DEVICE_GROUP_BIT, + eViewLocal = VK_DEPENDENCY_VIEW_LOCAL_BIT, + eViewLocalKHR = VK_DEPENDENCY_VIEW_LOCAL_BIT + }; + + using DependencyFlags = Flags; + + VULKAN_HPP_INLINE DependencyFlags operator|( DependencyFlagBits bit0, DependencyFlagBits bit1 ) + { + return DependencyFlags( bit0 ) | bit1; + } + + VULKAN_HPP_INLINE DependencyFlags operator~( DependencyFlagBits bits ) + { + return ~( DependencyFlags( bits ) ); + } + + template <> struct FlagTraits + { + enum + { + allFlags = VkFlags(DependencyFlagBits::eByRegion) | VkFlags(DependencyFlagBits::eDeviceGroup) | VkFlags(DependencyFlagBits::eViewLocal) + }; + }; + + struct SubpassDependency + { + SubpassDependency( uint32_t srcSubpass_ = 0, + uint32_t dstSubpass_ = 0, + PipelineStageFlags srcStageMask_ = PipelineStageFlags(), + PipelineStageFlags dstStageMask_ = PipelineStageFlags(), + AccessFlags srcAccessMask_ = AccessFlags(), + AccessFlags dstAccessMask_ = AccessFlags(), + DependencyFlags dependencyFlags_ = DependencyFlags() ) + : srcSubpass( srcSubpass_ ) + , dstSubpass( dstSubpass_ ) + , srcStageMask( srcStageMask_ ) + , dstStageMask( dstStageMask_ ) + , srcAccessMask( srcAccessMask_ ) + , dstAccessMask( dstAccessMask_ ) + , dependencyFlags( dependencyFlags_ ) + { + } + + SubpassDependency( VkSubpassDependency const & rhs ) + { + memcpy( this, &rhs, sizeof( SubpassDependency ) ); + } + + SubpassDependency& operator=( VkSubpassDependency const & rhs ) + { + memcpy( this, &rhs, sizeof( SubpassDependency ) ); + return *this; + } + SubpassDependency& setSrcSubpass( uint32_t srcSubpass_ ) + { + srcSubpass = srcSubpass_; + return *this; + } + + SubpassDependency& setDstSubpass( uint32_t dstSubpass_ ) + { + dstSubpass = dstSubpass_; + return *this; + } + + SubpassDependency& setSrcStageMask( PipelineStageFlags srcStageMask_ ) + { + srcStageMask = srcStageMask_; + return *this; + } + + SubpassDependency& setDstStageMask( PipelineStageFlags dstStageMask_ ) + { + dstStageMask = dstStageMask_; + return *this; + } + + SubpassDependency& setSrcAccessMask( AccessFlags srcAccessMask_ ) + { + srcAccessMask = srcAccessMask_; + return *this; + } + + SubpassDependency& setDstAccessMask( AccessFlags dstAccessMask_ ) + { + dstAccessMask = dstAccessMask_; + return *this; + } + + SubpassDependency& setDependencyFlags( DependencyFlags dependencyFlags_ ) + { + dependencyFlags = dependencyFlags_; + return *this; + } + + operator VkSubpassDependency const&() const + { + return *reinterpret_cast(this); + } + + operator VkSubpassDependency &() + { + return *reinterpret_cast(this); + } + + bool operator==( SubpassDependency const& rhs ) const + { + return ( srcSubpass == rhs.srcSubpass ) + && ( dstSubpass == rhs.dstSubpass ) + && ( srcStageMask == rhs.srcStageMask ) + && ( dstStageMask == rhs.dstStageMask ) + && ( srcAccessMask == rhs.srcAccessMask ) + && ( dstAccessMask == rhs.dstAccessMask ) + && ( dependencyFlags == rhs.dependencyFlags ); + } + + bool operator!=( SubpassDependency const& rhs ) const + { + return !operator==( rhs ); + } + + uint32_t srcSubpass; + uint32_t dstSubpass; + PipelineStageFlags srcStageMask; + PipelineStageFlags dstStageMask; + AccessFlags srcAccessMask; + AccessFlags dstAccessMask; + DependencyFlags dependencyFlags; + }; + static_assert( sizeof( SubpassDependency ) == sizeof( VkSubpassDependency ), "struct and wrapper have different size!" ); + + struct SubpassDependency2KHR + { + SubpassDependency2KHR( uint32_t srcSubpass_ = 0, + uint32_t dstSubpass_ = 0, + PipelineStageFlags srcStageMask_ = PipelineStageFlags(), + PipelineStageFlags dstStageMask_ = PipelineStageFlags(), + AccessFlags srcAccessMask_ = AccessFlags(), + AccessFlags dstAccessMask_ = AccessFlags(), + DependencyFlags dependencyFlags_ = DependencyFlags(), + int32_t viewOffset_ = 0 ) + : srcSubpass( srcSubpass_ ) + , dstSubpass( dstSubpass_ ) + , srcStageMask( srcStageMask_ ) + , dstStageMask( dstStageMask_ ) + , srcAccessMask( srcAccessMask_ ) + , dstAccessMask( dstAccessMask_ ) + , dependencyFlags( dependencyFlags_ ) + , viewOffset( viewOffset_ ) + { + } + + SubpassDependency2KHR( VkSubpassDependency2KHR const & rhs ) + { + memcpy( this, &rhs, sizeof( SubpassDependency2KHR ) ); + } + + SubpassDependency2KHR& operator=( VkSubpassDependency2KHR const & rhs ) + { + memcpy( this, &rhs, sizeof( SubpassDependency2KHR ) ); + return *this; + } + SubpassDependency2KHR& setPNext( const void* pNext_ ) + { + pNext = pNext_; + return *this; + } + + SubpassDependency2KHR& setSrcSubpass( uint32_t srcSubpass_ ) + { + srcSubpass = srcSubpass_; + return *this; + } + + SubpassDependency2KHR& setDstSubpass( uint32_t dstSubpass_ ) + { + dstSubpass = dstSubpass_; + return *this; + } + + SubpassDependency2KHR& setSrcStageMask( PipelineStageFlags srcStageMask_ ) + { + srcStageMask = srcStageMask_; + return *this; + } + + SubpassDependency2KHR& setDstStageMask( PipelineStageFlags dstStageMask_ ) + { + dstStageMask = dstStageMask_; + return *this; + } + + SubpassDependency2KHR& setSrcAccessMask( AccessFlags srcAccessMask_ ) + { + srcAccessMask = srcAccessMask_; + return *this; + } + + SubpassDependency2KHR& setDstAccessMask( AccessFlags dstAccessMask_ ) + { + dstAccessMask = dstAccessMask_; + return *this; + } + + SubpassDependency2KHR& setDependencyFlags( DependencyFlags dependencyFlags_ ) + { + dependencyFlags = dependencyFlags_; + return *this; + } + + SubpassDependency2KHR& setViewOffset( int32_t viewOffset_ ) + { + viewOffset = viewOffset_; + return *this; + } + + operator VkSubpassDependency2KHR const&() const + { + return *reinterpret_cast(this); + } + + operator VkSubpassDependency2KHR &() + { + return *reinterpret_cast(this); + } + + bool operator==( SubpassDependency2KHR const& rhs ) const + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( srcSubpass == rhs.srcSubpass ) + && ( dstSubpass == rhs.dstSubpass ) + && ( srcStageMask == rhs.srcStageMask ) + && ( dstStageMask == rhs.dstStageMask ) + && ( srcAccessMask == rhs.srcAccessMask ) + && ( dstAccessMask == rhs.dstAccessMask ) + && ( dependencyFlags == rhs.dependencyFlags ) + && ( viewOffset == rhs.viewOffset ); + } + + bool operator!=( SubpassDependency2KHR const& rhs ) const + { + return !operator==( rhs ); + } + + private: + StructureType sType = StructureType::eSubpassDependency2KHR; + + public: + const void* pNext = nullptr; + uint32_t srcSubpass; + uint32_t dstSubpass; + PipelineStageFlags srcStageMask; + PipelineStageFlags dstStageMask; + AccessFlags srcAccessMask; + AccessFlags dstAccessMask; + DependencyFlags dependencyFlags; + int32_t viewOffset; + }; + static_assert( sizeof( SubpassDependency2KHR ) == sizeof( VkSubpassDependency2KHR ), "struct and wrapper have different size!" ); + + enum class PresentModeKHR + { + eImmediate = VK_PRESENT_MODE_IMMEDIATE_KHR, + eMailbox = VK_PRESENT_MODE_MAILBOX_KHR, + eFifo = VK_PRESENT_MODE_FIFO_KHR, + eFifoRelaxed = VK_PRESENT_MODE_FIFO_RELAXED_KHR, + eSharedDemandRefresh = VK_PRESENT_MODE_SHARED_DEMAND_REFRESH_KHR, + eSharedContinuousRefresh = VK_PRESENT_MODE_SHARED_CONTINUOUS_REFRESH_KHR + }; + + enum class ColorSpaceKHR + { + eSrgbNonlinear = VK_COLOR_SPACE_SRGB_NONLINEAR_KHR, + eVkColorspaceSrgbNonlinear = VK_COLOR_SPACE_SRGB_NONLINEAR_KHR, + eDisplayP3NonlinearEXT = VK_COLOR_SPACE_DISPLAY_P3_NONLINEAR_EXT, + eExtendedSrgbLinearEXT = VK_COLOR_SPACE_EXTENDED_SRGB_LINEAR_EXT, + eDciP3LinearEXT = VK_COLOR_SPACE_DCI_P3_LINEAR_EXT, + eDciP3NonlinearEXT = VK_COLOR_SPACE_DCI_P3_NONLINEAR_EXT, + eBt709LinearEXT = VK_COLOR_SPACE_BT709_LINEAR_EXT, + eBt709NonlinearEXT = VK_COLOR_SPACE_BT709_NONLINEAR_EXT, + eBt2020LinearEXT = VK_COLOR_SPACE_BT2020_LINEAR_EXT, + eHdr10St2084EXT = VK_COLOR_SPACE_HDR10_ST2084_EXT, + eDolbyvisionEXT = VK_COLOR_SPACE_DOLBYVISION_EXT, + eHdr10HlgEXT = VK_COLOR_SPACE_HDR10_HLG_EXT, + eAdobergbLinearEXT = VK_COLOR_SPACE_ADOBERGB_LINEAR_EXT, + eAdobergbNonlinearEXT = VK_COLOR_SPACE_ADOBERGB_NONLINEAR_EXT, + ePassThroughEXT = VK_COLOR_SPACE_PASS_THROUGH_EXT, + eExtendedSrgbNonlinearEXT = VK_COLOR_SPACE_EXTENDED_SRGB_NONLINEAR_EXT + }; + + struct SurfaceFormatKHR + { + operator VkSurfaceFormatKHR const&() const + { + return *reinterpret_cast(this); + } + + operator VkSurfaceFormatKHR &() + { + return *reinterpret_cast(this); + } + + bool operator==( SurfaceFormatKHR const& rhs ) const + { + return ( format == rhs.format ) + && ( colorSpace == rhs.colorSpace ); + } + + bool operator!=( SurfaceFormatKHR const& rhs ) const + { + return !operator==( rhs ); + } + + Format format; + ColorSpaceKHR colorSpace; + }; + static_assert( sizeof( SurfaceFormatKHR ) == sizeof( VkSurfaceFormatKHR ), "struct and wrapper have different size!" ); + + struct SurfaceFormat2KHR + { + operator VkSurfaceFormat2KHR const&() const + { + return *reinterpret_cast(this); + } + + operator VkSurfaceFormat2KHR &() + { + return *reinterpret_cast(this); + } + + bool operator==( SurfaceFormat2KHR const& rhs ) const + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( surfaceFormat == rhs.surfaceFormat ); + } + + bool operator!=( SurfaceFormat2KHR const& rhs ) const + { + return !operator==( rhs ); + } + + private: + StructureType sType = StructureType::eSurfaceFormat2KHR; + + public: + void* pNext = nullptr; + SurfaceFormatKHR surfaceFormat; + }; + static_assert( sizeof( SurfaceFormat2KHR ) == sizeof( VkSurfaceFormat2KHR ), "struct and wrapper have different size!" ); + + enum class DisplayPlaneAlphaFlagBitsKHR + { + eOpaque = VK_DISPLAY_PLANE_ALPHA_OPAQUE_BIT_KHR, + eGlobal = VK_DISPLAY_PLANE_ALPHA_GLOBAL_BIT_KHR, + ePerPixel = VK_DISPLAY_PLANE_ALPHA_PER_PIXEL_BIT_KHR, + ePerPixelPremultiplied = VK_DISPLAY_PLANE_ALPHA_PER_PIXEL_PREMULTIPLIED_BIT_KHR + }; + + using DisplayPlaneAlphaFlagsKHR = Flags; + + VULKAN_HPP_INLINE DisplayPlaneAlphaFlagsKHR operator|( DisplayPlaneAlphaFlagBitsKHR bit0, DisplayPlaneAlphaFlagBitsKHR bit1 ) + { + return DisplayPlaneAlphaFlagsKHR( bit0 ) | bit1; + } + + VULKAN_HPP_INLINE DisplayPlaneAlphaFlagsKHR operator~( DisplayPlaneAlphaFlagBitsKHR bits ) + { + return ~( DisplayPlaneAlphaFlagsKHR( bits ) ); + } + + template <> struct FlagTraits + { + enum + { + allFlags = VkFlags(DisplayPlaneAlphaFlagBitsKHR::eOpaque) | VkFlags(DisplayPlaneAlphaFlagBitsKHR::eGlobal) | VkFlags(DisplayPlaneAlphaFlagBitsKHR::ePerPixel) | VkFlags(DisplayPlaneAlphaFlagBitsKHR::ePerPixelPremultiplied) + }; + }; + + struct DisplayPlaneCapabilitiesKHR + { + operator VkDisplayPlaneCapabilitiesKHR const&() const + { + return *reinterpret_cast(this); + } + + operator VkDisplayPlaneCapabilitiesKHR &() + { + return *reinterpret_cast(this); + } + + bool operator==( DisplayPlaneCapabilitiesKHR const& rhs ) const + { + return ( supportedAlpha == rhs.supportedAlpha ) + && ( minSrcPosition == rhs.minSrcPosition ) + && ( maxSrcPosition == rhs.maxSrcPosition ) + && ( minSrcExtent == rhs.minSrcExtent ) + && ( maxSrcExtent == rhs.maxSrcExtent ) + && ( minDstPosition == rhs.minDstPosition ) + && ( maxDstPosition == rhs.maxDstPosition ) + && ( minDstExtent == rhs.minDstExtent ) + && ( maxDstExtent == rhs.maxDstExtent ); + } + + bool operator!=( DisplayPlaneCapabilitiesKHR const& rhs ) const + { + return !operator==( rhs ); + } + + DisplayPlaneAlphaFlagsKHR supportedAlpha; + Offset2D minSrcPosition; + Offset2D maxSrcPosition; + Extent2D minSrcExtent; + Extent2D maxSrcExtent; + Offset2D minDstPosition; + Offset2D maxDstPosition; + Extent2D minDstExtent; + Extent2D maxDstExtent; + }; + static_assert( sizeof( DisplayPlaneCapabilitiesKHR ) == sizeof( VkDisplayPlaneCapabilitiesKHR ), "struct and wrapper have different size!" ); + + struct DisplayPlaneCapabilities2KHR + { + operator VkDisplayPlaneCapabilities2KHR const&() const + { + return *reinterpret_cast(this); + } + + operator VkDisplayPlaneCapabilities2KHR &() + { + return *reinterpret_cast(this); + } + + bool operator==( DisplayPlaneCapabilities2KHR const& rhs ) const + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( capabilities == rhs.capabilities ); + } + + bool operator!=( DisplayPlaneCapabilities2KHR const& rhs ) const + { + return !operator==( rhs ); + } + + private: + StructureType sType = StructureType::eDisplayPlaneCapabilities2KHR; + + public: + void* pNext = nullptr; + DisplayPlaneCapabilitiesKHR capabilities; + }; + static_assert( sizeof( DisplayPlaneCapabilities2KHR ) == sizeof( VkDisplayPlaneCapabilities2KHR ), "struct and wrapper have different size!" ); + + enum class CompositeAlphaFlagBitsKHR + { + eOpaque = VK_COMPOSITE_ALPHA_OPAQUE_BIT_KHR, + ePreMultiplied = VK_COMPOSITE_ALPHA_PRE_MULTIPLIED_BIT_KHR, + ePostMultiplied = VK_COMPOSITE_ALPHA_POST_MULTIPLIED_BIT_KHR, + eInherit = VK_COMPOSITE_ALPHA_INHERIT_BIT_KHR + }; + + using CompositeAlphaFlagsKHR = Flags; + + VULKAN_HPP_INLINE CompositeAlphaFlagsKHR operator|( CompositeAlphaFlagBitsKHR bit0, CompositeAlphaFlagBitsKHR bit1 ) + { + return CompositeAlphaFlagsKHR( bit0 ) | bit1; + } + + VULKAN_HPP_INLINE CompositeAlphaFlagsKHR operator~( CompositeAlphaFlagBitsKHR bits ) + { + return ~( CompositeAlphaFlagsKHR( bits ) ); + } + + template <> struct FlagTraits + { + enum + { + allFlags = VkFlags(CompositeAlphaFlagBitsKHR::eOpaque) | VkFlags(CompositeAlphaFlagBitsKHR::ePreMultiplied) | VkFlags(CompositeAlphaFlagBitsKHR::ePostMultiplied) | VkFlags(CompositeAlphaFlagBitsKHR::eInherit) + }; + }; + + enum class SurfaceTransformFlagBitsKHR + { + eIdentity = VK_SURFACE_TRANSFORM_IDENTITY_BIT_KHR, + eRotate90 = VK_SURFACE_TRANSFORM_ROTATE_90_BIT_KHR, + eRotate180 = VK_SURFACE_TRANSFORM_ROTATE_180_BIT_KHR, + eRotate270 = VK_SURFACE_TRANSFORM_ROTATE_270_BIT_KHR, + eHorizontalMirror = VK_SURFACE_TRANSFORM_HORIZONTAL_MIRROR_BIT_KHR, + eHorizontalMirrorRotate90 = VK_SURFACE_TRANSFORM_HORIZONTAL_MIRROR_ROTATE_90_BIT_KHR, + eHorizontalMirrorRotate180 = VK_SURFACE_TRANSFORM_HORIZONTAL_MIRROR_ROTATE_180_BIT_KHR, + eHorizontalMirrorRotate270 = VK_SURFACE_TRANSFORM_HORIZONTAL_MIRROR_ROTATE_270_BIT_KHR, + eInherit = VK_SURFACE_TRANSFORM_INHERIT_BIT_KHR + }; + + using SurfaceTransformFlagsKHR = Flags; + + VULKAN_HPP_INLINE SurfaceTransformFlagsKHR operator|( SurfaceTransformFlagBitsKHR bit0, SurfaceTransformFlagBitsKHR bit1 ) + { + return SurfaceTransformFlagsKHR( bit0 ) | bit1; + } + + VULKAN_HPP_INLINE SurfaceTransformFlagsKHR operator~( SurfaceTransformFlagBitsKHR bits ) + { + return ~( SurfaceTransformFlagsKHR( bits ) ); + } + + template <> struct FlagTraits + { + enum + { + allFlags = VkFlags(SurfaceTransformFlagBitsKHR::eIdentity) | VkFlags(SurfaceTransformFlagBitsKHR::eRotate90) | VkFlags(SurfaceTransformFlagBitsKHR::eRotate180) | VkFlags(SurfaceTransformFlagBitsKHR::eRotate270) | VkFlags(SurfaceTransformFlagBitsKHR::eHorizontalMirror) | VkFlags(SurfaceTransformFlagBitsKHR::eHorizontalMirrorRotate90) | VkFlags(SurfaceTransformFlagBitsKHR::eHorizontalMirrorRotate180) | VkFlags(SurfaceTransformFlagBitsKHR::eHorizontalMirrorRotate270) | VkFlags(SurfaceTransformFlagBitsKHR::eInherit) + }; + }; + + struct DisplayPropertiesKHR + { + operator VkDisplayPropertiesKHR const&() const + { + return *reinterpret_cast(this); + } + + operator VkDisplayPropertiesKHR &() + { + return *reinterpret_cast(this); + } + + bool operator==( DisplayPropertiesKHR const& rhs ) const + { + return ( display == rhs.display ) + && ( displayName == rhs.displayName ) + && ( physicalDimensions == rhs.physicalDimensions ) + && ( physicalResolution == rhs.physicalResolution ) + && ( supportedTransforms == rhs.supportedTransforms ) + && ( planeReorderPossible == rhs.planeReorderPossible ) + && ( persistentContent == rhs.persistentContent ); + } + + bool operator!=( DisplayPropertiesKHR const& rhs ) const + { + return !operator==( rhs ); + } + + DisplayKHR display; + const char* displayName; + Extent2D physicalDimensions; + Extent2D physicalResolution; + SurfaceTransformFlagsKHR supportedTransforms; + Bool32 planeReorderPossible; + Bool32 persistentContent; + }; + static_assert( sizeof( DisplayPropertiesKHR ) == sizeof( VkDisplayPropertiesKHR ), "struct and wrapper have different size!" ); + + struct DisplaySurfaceCreateInfoKHR + { + DisplaySurfaceCreateInfoKHR( DisplaySurfaceCreateFlagsKHR flags_ = DisplaySurfaceCreateFlagsKHR(), + DisplayModeKHR displayMode_ = DisplayModeKHR(), + uint32_t planeIndex_ = 0, + uint32_t planeStackIndex_ = 0, + SurfaceTransformFlagBitsKHR transform_ = SurfaceTransformFlagBitsKHR::eIdentity, + float globalAlpha_ = 0, + DisplayPlaneAlphaFlagBitsKHR alphaMode_ = DisplayPlaneAlphaFlagBitsKHR::eOpaque, + Extent2D imageExtent_ = Extent2D() ) + : flags( flags_ ) + , displayMode( displayMode_ ) + , planeIndex( planeIndex_ ) + , planeStackIndex( planeStackIndex_ ) + , transform( transform_ ) + , globalAlpha( globalAlpha_ ) + , alphaMode( alphaMode_ ) + , imageExtent( imageExtent_ ) + { + } + + DisplaySurfaceCreateInfoKHR( VkDisplaySurfaceCreateInfoKHR const & rhs ) + { + memcpy( this, &rhs, sizeof( DisplaySurfaceCreateInfoKHR ) ); + } + + DisplaySurfaceCreateInfoKHR& operator=( VkDisplaySurfaceCreateInfoKHR const & rhs ) + { + memcpy( this, &rhs, sizeof( DisplaySurfaceCreateInfoKHR ) ); + return *this; + } + DisplaySurfaceCreateInfoKHR& setPNext( const void* pNext_ ) + { + pNext = pNext_; + return *this; + } + + DisplaySurfaceCreateInfoKHR& setFlags( DisplaySurfaceCreateFlagsKHR flags_ ) + { + flags = flags_; + return *this; + } + + DisplaySurfaceCreateInfoKHR& setDisplayMode( DisplayModeKHR displayMode_ ) + { + displayMode = displayMode_; + return *this; + } + + DisplaySurfaceCreateInfoKHR& setPlaneIndex( uint32_t planeIndex_ ) + { + planeIndex = planeIndex_; + return *this; + } + + DisplaySurfaceCreateInfoKHR& setPlaneStackIndex( uint32_t planeStackIndex_ ) + { + planeStackIndex = planeStackIndex_; + return *this; + } + + DisplaySurfaceCreateInfoKHR& setTransform( SurfaceTransformFlagBitsKHR transform_ ) + { + transform = transform_; + return *this; + } + + DisplaySurfaceCreateInfoKHR& setGlobalAlpha( float globalAlpha_ ) + { + globalAlpha = globalAlpha_; + return *this; + } + + DisplaySurfaceCreateInfoKHR& setAlphaMode( DisplayPlaneAlphaFlagBitsKHR alphaMode_ ) + { + alphaMode = alphaMode_; + return *this; + } + + DisplaySurfaceCreateInfoKHR& setImageExtent( Extent2D imageExtent_ ) + { + imageExtent = imageExtent_; + return *this; + } + + operator VkDisplaySurfaceCreateInfoKHR const&() const + { + return *reinterpret_cast(this); + } + + operator VkDisplaySurfaceCreateInfoKHR &() + { + return *reinterpret_cast(this); + } + + bool operator==( DisplaySurfaceCreateInfoKHR const& rhs ) const + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( flags == rhs.flags ) + && ( displayMode == rhs.displayMode ) + && ( planeIndex == rhs.planeIndex ) + && ( planeStackIndex == rhs.planeStackIndex ) + && ( transform == rhs.transform ) + && ( globalAlpha == rhs.globalAlpha ) + && ( alphaMode == rhs.alphaMode ) + && ( imageExtent == rhs.imageExtent ); + } + + bool operator!=( DisplaySurfaceCreateInfoKHR const& rhs ) const + { + return !operator==( rhs ); + } + + private: + StructureType sType = StructureType::eDisplaySurfaceCreateInfoKHR; + + public: + const void* pNext = nullptr; + DisplaySurfaceCreateFlagsKHR flags; + DisplayModeKHR displayMode; + uint32_t planeIndex; + uint32_t planeStackIndex; + SurfaceTransformFlagBitsKHR transform; + float globalAlpha; + DisplayPlaneAlphaFlagBitsKHR alphaMode; + Extent2D imageExtent; + }; + static_assert( sizeof( DisplaySurfaceCreateInfoKHR ) == sizeof( VkDisplaySurfaceCreateInfoKHR ), "struct and wrapper have different size!" ); + + struct SurfaceCapabilitiesKHR + { + operator VkSurfaceCapabilitiesKHR const&() const + { + return *reinterpret_cast(this); + } + + operator VkSurfaceCapabilitiesKHR &() + { + return *reinterpret_cast(this); + } + + bool operator==( SurfaceCapabilitiesKHR const& rhs ) const + { + return ( minImageCount == rhs.minImageCount ) + && ( maxImageCount == rhs.maxImageCount ) + && ( currentExtent == rhs.currentExtent ) + && ( minImageExtent == rhs.minImageExtent ) + && ( maxImageExtent == rhs.maxImageExtent ) + && ( maxImageArrayLayers == rhs.maxImageArrayLayers ) + && ( supportedTransforms == rhs.supportedTransforms ) + && ( currentTransform == rhs.currentTransform ) + && ( supportedCompositeAlpha == rhs.supportedCompositeAlpha ) + && ( supportedUsageFlags == rhs.supportedUsageFlags ); + } + + bool operator!=( SurfaceCapabilitiesKHR const& rhs ) const + { + return !operator==( rhs ); + } + + uint32_t minImageCount; + uint32_t maxImageCount; + Extent2D currentExtent; + Extent2D minImageExtent; + Extent2D maxImageExtent; + uint32_t maxImageArrayLayers; + SurfaceTransformFlagsKHR supportedTransforms; + SurfaceTransformFlagBitsKHR currentTransform; + CompositeAlphaFlagsKHR supportedCompositeAlpha; + ImageUsageFlags supportedUsageFlags; + }; + static_assert( sizeof( SurfaceCapabilitiesKHR ) == sizeof( VkSurfaceCapabilitiesKHR ), "struct and wrapper have different size!" ); + + struct SurfaceCapabilities2KHR + { + operator VkSurfaceCapabilities2KHR const&() const + { + return *reinterpret_cast(this); + } + + operator VkSurfaceCapabilities2KHR &() + { + return *reinterpret_cast(this); + } + + bool operator==( SurfaceCapabilities2KHR const& rhs ) const + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( surfaceCapabilities == rhs.surfaceCapabilities ); + } + + bool operator!=( SurfaceCapabilities2KHR const& rhs ) const + { + return !operator==( rhs ); + } + + private: + StructureType sType = StructureType::eSurfaceCapabilities2KHR; + + public: + void* pNext = nullptr; + SurfaceCapabilitiesKHR surfaceCapabilities; + }; + static_assert( sizeof( SurfaceCapabilities2KHR ) == sizeof( VkSurfaceCapabilities2KHR ), "struct and wrapper have different size!" ); + + struct DisplayProperties2KHR + { + operator VkDisplayProperties2KHR const&() const + { + return *reinterpret_cast(this); + } + + operator VkDisplayProperties2KHR &() + { + return *reinterpret_cast(this); + } + + bool operator==( DisplayProperties2KHR const& rhs ) const + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( displayProperties == rhs.displayProperties ); + } + + bool operator!=( DisplayProperties2KHR const& rhs ) const + { + return !operator==( rhs ); + } + + private: + StructureType sType = StructureType::eDisplayProperties2KHR; + + public: + void* pNext = nullptr; + DisplayPropertiesKHR displayProperties; + }; + static_assert( sizeof( DisplayProperties2KHR ) == sizeof( VkDisplayProperties2KHR ), "struct and wrapper have different size!" ); + + enum class TimeDomainEXT + { + eDevice = VK_TIME_DOMAIN_DEVICE_EXT, + eClockMonotonic = VK_TIME_DOMAIN_CLOCK_MONOTONIC_EXT, + eClockMonotonicRaw = VK_TIME_DOMAIN_CLOCK_MONOTONIC_RAW_EXT, + eQueryPerformanceCounter = VK_TIME_DOMAIN_QUERY_PERFORMANCE_COUNTER_EXT + }; + + struct CalibratedTimestampInfoEXT + { + CalibratedTimestampInfoEXT( TimeDomainEXT timeDomain_ = TimeDomainEXT::eDevice ) + : timeDomain( timeDomain_ ) + { + } + + CalibratedTimestampInfoEXT( VkCalibratedTimestampInfoEXT const & rhs ) + { + memcpy( this, &rhs, sizeof( CalibratedTimestampInfoEXT ) ); + } + + CalibratedTimestampInfoEXT& operator=( VkCalibratedTimestampInfoEXT const & rhs ) + { + memcpy( this, &rhs, sizeof( CalibratedTimestampInfoEXT ) ); + return *this; + } + CalibratedTimestampInfoEXT& setPNext( const void* pNext_ ) + { + pNext = pNext_; + return *this; + } + + CalibratedTimestampInfoEXT& setTimeDomain( TimeDomainEXT timeDomain_ ) + { + timeDomain = timeDomain_; + return *this; + } + + operator VkCalibratedTimestampInfoEXT const&() const + { + return *reinterpret_cast(this); + } + + operator VkCalibratedTimestampInfoEXT &() + { + return *reinterpret_cast(this); + } + + bool operator==( CalibratedTimestampInfoEXT const& rhs ) const + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( timeDomain == rhs.timeDomain ); + } + + bool operator!=( CalibratedTimestampInfoEXT const& rhs ) const + { + return !operator==( rhs ); + } + + private: + StructureType sType = StructureType::eCalibratedTimestampInfoEXT; + + public: + const void* pNext = nullptr; + TimeDomainEXT timeDomain; + }; + static_assert( sizeof( CalibratedTimestampInfoEXT ) == sizeof( VkCalibratedTimestampInfoEXT ), "struct and wrapper have different size!" ); + + enum class DebugReportFlagBitsEXT + { + eInformation = VK_DEBUG_REPORT_INFORMATION_BIT_EXT, + eWarning = VK_DEBUG_REPORT_WARNING_BIT_EXT, + ePerformanceWarning = VK_DEBUG_REPORT_PERFORMANCE_WARNING_BIT_EXT, + eError = VK_DEBUG_REPORT_ERROR_BIT_EXT, + eDebug = VK_DEBUG_REPORT_DEBUG_BIT_EXT + }; + + using DebugReportFlagsEXT = Flags; + + VULKAN_HPP_INLINE DebugReportFlagsEXT operator|( DebugReportFlagBitsEXT bit0, DebugReportFlagBitsEXT bit1 ) + { + return DebugReportFlagsEXT( bit0 ) | bit1; + } + + VULKAN_HPP_INLINE DebugReportFlagsEXT operator~( DebugReportFlagBitsEXT bits ) + { + return ~( DebugReportFlagsEXT( bits ) ); + } + + template <> struct FlagTraits + { + enum + { + allFlags = VkFlags(DebugReportFlagBitsEXT::eInformation) | VkFlags(DebugReportFlagBitsEXT::eWarning) | VkFlags(DebugReportFlagBitsEXT::ePerformanceWarning) | VkFlags(DebugReportFlagBitsEXT::eError) | VkFlags(DebugReportFlagBitsEXT::eDebug) + }; + }; + + struct DebugReportCallbackCreateInfoEXT + { + DebugReportCallbackCreateInfoEXT( DebugReportFlagsEXT flags_ = DebugReportFlagsEXT(), + PFN_vkDebugReportCallbackEXT pfnCallback_ = nullptr, + void* pUserData_ = nullptr ) + : flags( flags_ ) + , pfnCallback( pfnCallback_ ) + , pUserData( pUserData_ ) + { + } + + DebugReportCallbackCreateInfoEXT( VkDebugReportCallbackCreateInfoEXT const & rhs ) + { + memcpy( this, &rhs, sizeof( DebugReportCallbackCreateInfoEXT ) ); + } + + DebugReportCallbackCreateInfoEXT& operator=( VkDebugReportCallbackCreateInfoEXT const & rhs ) + { + memcpy( this, &rhs, sizeof( DebugReportCallbackCreateInfoEXT ) ); + return *this; + } + DebugReportCallbackCreateInfoEXT& setPNext( const void* pNext_ ) + { + pNext = pNext_; + return *this; + } + + DebugReportCallbackCreateInfoEXT& setFlags( DebugReportFlagsEXT flags_ ) + { + flags = flags_; + return *this; + } + + DebugReportCallbackCreateInfoEXT& setPfnCallback( PFN_vkDebugReportCallbackEXT pfnCallback_ ) + { + pfnCallback = pfnCallback_; + return *this; + } + + DebugReportCallbackCreateInfoEXT& setPUserData( void* pUserData_ ) + { + pUserData = pUserData_; + return *this; + } + + operator VkDebugReportCallbackCreateInfoEXT const&() const + { + return *reinterpret_cast(this); + } + + operator VkDebugReportCallbackCreateInfoEXT &() + { + return *reinterpret_cast(this); + } + + bool operator==( DebugReportCallbackCreateInfoEXT const& rhs ) const + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( flags == rhs.flags ) + && ( pfnCallback == rhs.pfnCallback ) + && ( pUserData == rhs.pUserData ); + } + + bool operator!=( DebugReportCallbackCreateInfoEXT const& rhs ) const + { + return !operator==( rhs ); + } + + private: + StructureType sType = StructureType::eDebugReportCallbackCreateInfoEXT; + + public: + const void* pNext = nullptr; + DebugReportFlagsEXT flags; + PFN_vkDebugReportCallbackEXT pfnCallback; + void* pUserData; + }; + static_assert( sizeof( DebugReportCallbackCreateInfoEXT ) == sizeof( VkDebugReportCallbackCreateInfoEXT ), "struct and wrapper have different size!" ); + + enum class DebugReportObjectTypeEXT + { + eUnknown = VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, + eInstance = VK_DEBUG_REPORT_OBJECT_TYPE_INSTANCE_EXT, + ePhysicalDevice = VK_DEBUG_REPORT_OBJECT_TYPE_PHYSICAL_DEVICE_EXT, + eDevice = VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT, + eQueue = VK_DEBUG_REPORT_OBJECT_TYPE_QUEUE_EXT, + eSemaphore = VK_DEBUG_REPORT_OBJECT_TYPE_SEMAPHORE_EXT, + eCommandBuffer = VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT, + eFence = VK_DEBUG_REPORT_OBJECT_TYPE_FENCE_EXT, + eDeviceMemory = VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_MEMORY_EXT, + eBuffer = VK_DEBUG_REPORT_OBJECT_TYPE_BUFFER_EXT, + eImage = VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_EXT, + eEvent = VK_DEBUG_REPORT_OBJECT_TYPE_EVENT_EXT, + eQueryPool = VK_DEBUG_REPORT_OBJECT_TYPE_QUERY_POOL_EXT, + eBufferView = VK_DEBUG_REPORT_OBJECT_TYPE_BUFFER_VIEW_EXT, + eImageView = VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_VIEW_EXT, + eShaderModule = VK_DEBUG_REPORT_OBJECT_TYPE_SHADER_MODULE_EXT, + ePipelineCache = VK_DEBUG_REPORT_OBJECT_TYPE_PIPELINE_CACHE_EXT, + ePipelineLayout = VK_DEBUG_REPORT_OBJECT_TYPE_PIPELINE_LAYOUT_EXT, + eRenderPass = VK_DEBUG_REPORT_OBJECT_TYPE_RENDER_PASS_EXT, + ePipeline = VK_DEBUG_REPORT_OBJECT_TYPE_PIPELINE_EXT, + eDescriptorSetLayout = VK_DEBUG_REPORT_OBJECT_TYPE_DESCRIPTOR_SET_LAYOUT_EXT, + eSampler = VK_DEBUG_REPORT_OBJECT_TYPE_SAMPLER_EXT, + eDescriptorPool = VK_DEBUG_REPORT_OBJECT_TYPE_DESCRIPTOR_POOL_EXT, + eDescriptorSet = VK_DEBUG_REPORT_OBJECT_TYPE_DESCRIPTOR_SET_EXT, + eFramebuffer = VK_DEBUG_REPORT_OBJECT_TYPE_FRAMEBUFFER_EXT, + eCommandPool = VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_POOL_EXT, + eSurfaceKhr = VK_DEBUG_REPORT_OBJECT_TYPE_SURFACE_KHR_EXT, + eSwapchainKhr = VK_DEBUG_REPORT_OBJECT_TYPE_SWAPCHAIN_KHR_EXT, + eDebugReportCallbackExt = VK_DEBUG_REPORT_OBJECT_TYPE_DEBUG_REPORT_CALLBACK_EXT_EXT, + eDebugReport = VK_DEBUG_REPORT_OBJECT_TYPE_DEBUG_REPORT_CALLBACK_EXT_EXT, + eDisplayKhr = VK_DEBUG_REPORT_OBJECT_TYPE_DISPLAY_KHR_EXT, + eDisplayModeKhr = VK_DEBUG_REPORT_OBJECT_TYPE_DISPLAY_MODE_KHR_EXT, + eObjectTableNvx = VK_DEBUG_REPORT_OBJECT_TYPE_OBJECT_TABLE_NVX_EXT, + eIndirectCommandsLayoutNvx = VK_DEBUG_REPORT_OBJECT_TYPE_INDIRECT_COMMANDS_LAYOUT_NVX_EXT, + eValidationCacheExt = VK_DEBUG_REPORT_OBJECT_TYPE_VALIDATION_CACHE_EXT_EXT, + eValidationCache = VK_DEBUG_REPORT_OBJECT_TYPE_VALIDATION_CACHE_EXT_EXT, + eSamplerYcbcrConversion = VK_DEBUG_REPORT_OBJECT_TYPE_SAMPLER_YCBCR_CONVERSION_EXT, + eSamplerYcbcrConversionKHR = VK_DEBUG_REPORT_OBJECT_TYPE_SAMPLER_YCBCR_CONVERSION_EXT, + eDescriptorUpdateTemplate = VK_DEBUG_REPORT_OBJECT_TYPE_DESCRIPTOR_UPDATE_TEMPLATE_EXT, + eDescriptorUpdateTemplateKHR = VK_DEBUG_REPORT_OBJECT_TYPE_DESCRIPTOR_UPDATE_TEMPLATE_EXT, + eAccelerationStructureNV = VK_DEBUG_REPORT_OBJECT_TYPE_ACCELERATION_STRUCTURE_NV_EXT + }; + + struct DebugMarkerObjectNameInfoEXT + { + DebugMarkerObjectNameInfoEXT( DebugReportObjectTypeEXT objectType_ = DebugReportObjectTypeEXT::eUnknown, + uint64_t object_ = 0, + const char* pObjectName_ = nullptr ) + : objectType( objectType_ ) + , object( object_ ) + , pObjectName( pObjectName_ ) + { + } + + DebugMarkerObjectNameInfoEXT( VkDebugMarkerObjectNameInfoEXT const & rhs ) + { + memcpy( this, &rhs, sizeof( DebugMarkerObjectNameInfoEXT ) ); + } + + DebugMarkerObjectNameInfoEXT& operator=( VkDebugMarkerObjectNameInfoEXT const & rhs ) + { + memcpy( this, &rhs, sizeof( DebugMarkerObjectNameInfoEXT ) ); + return *this; + } + DebugMarkerObjectNameInfoEXT& setPNext( const void* pNext_ ) + { + pNext = pNext_; + return *this; + } + + DebugMarkerObjectNameInfoEXT& setObjectType( DebugReportObjectTypeEXT objectType_ ) + { + objectType = objectType_; + return *this; + } + + DebugMarkerObjectNameInfoEXT& setObject( uint64_t object_ ) + { + object = object_; + return *this; + } + + DebugMarkerObjectNameInfoEXT& setPObjectName( const char* pObjectName_ ) + { + pObjectName = pObjectName_; + return *this; + } + + operator VkDebugMarkerObjectNameInfoEXT const&() const + { + return *reinterpret_cast(this); + } + + operator VkDebugMarkerObjectNameInfoEXT &() + { + return *reinterpret_cast(this); + } + + bool operator==( DebugMarkerObjectNameInfoEXT const& rhs ) const + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( objectType == rhs.objectType ) + && ( object == rhs.object ) + && ( pObjectName == rhs.pObjectName ); + } + + bool operator!=( DebugMarkerObjectNameInfoEXT const& rhs ) const + { + return !operator==( rhs ); + } + + private: + StructureType sType = StructureType::eDebugMarkerObjectNameInfoEXT; + + public: + const void* pNext = nullptr; + DebugReportObjectTypeEXT objectType; + uint64_t object; + const char* pObjectName; + }; + static_assert( sizeof( DebugMarkerObjectNameInfoEXT ) == sizeof( VkDebugMarkerObjectNameInfoEXT ), "struct and wrapper have different size!" ); + + struct DebugMarkerObjectTagInfoEXT + { + DebugMarkerObjectTagInfoEXT( DebugReportObjectTypeEXT objectType_ = DebugReportObjectTypeEXT::eUnknown, + uint64_t object_ = 0, + uint64_t tagName_ = 0, + size_t tagSize_ = 0, + const void* pTag_ = nullptr ) + : objectType( objectType_ ) + , object( object_ ) + , tagName( tagName_ ) + , tagSize( tagSize_ ) + , pTag( pTag_ ) + { + } + + DebugMarkerObjectTagInfoEXT( VkDebugMarkerObjectTagInfoEXT const & rhs ) + { + memcpy( this, &rhs, sizeof( DebugMarkerObjectTagInfoEXT ) ); + } + + DebugMarkerObjectTagInfoEXT& operator=( VkDebugMarkerObjectTagInfoEXT const & rhs ) + { + memcpy( this, &rhs, sizeof( DebugMarkerObjectTagInfoEXT ) ); + return *this; + } + DebugMarkerObjectTagInfoEXT& setPNext( const void* pNext_ ) + { + pNext = pNext_; + return *this; + } + + DebugMarkerObjectTagInfoEXT& setObjectType( DebugReportObjectTypeEXT objectType_ ) + { + objectType = objectType_; + return *this; + } + + DebugMarkerObjectTagInfoEXT& setObject( uint64_t object_ ) + { + object = object_; + return *this; + } + + DebugMarkerObjectTagInfoEXT& setTagName( uint64_t tagName_ ) + { + tagName = tagName_; + return *this; + } + + DebugMarkerObjectTagInfoEXT& setTagSize( size_t tagSize_ ) + { + tagSize = tagSize_; + return *this; + } + + DebugMarkerObjectTagInfoEXT& setPTag( const void* pTag_ ) + { + pTag = pTag_; + return *this; + } + + operator VkDebugMarkerObjectTagInfoEXT const&() const + { + return *reinterpret_cast(this); + } + + operator VkDebugMarkerObjectTagInfoEXT &() + { + return *reinterpret_cast(this); + } + + bool operator==( DebugMarkerObjectTagInfoEXT const& rhs ) const + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( objectType == rhs.objectType ) + && ( object == rhs.object ) + && ( tagName == rhs.tagName ) + && ( tagSize == rhs.tagSize ) + && ( pTag == rhs.pTag ); + } + + bool operator!=( DebugMarkerObjectTagInfoEXT const& rhs ) const + { + return !operator==( rhs ); + } + + private: + StructureType sType = StructureType::eDebugMarkerObjectTagInfoEXT; + + public: + const void* pNext = nullptr; + DebugReportObjectTypeEXT objectType; + uint64_t object; + uint64_t tagName; + size_t tagSize; + const void* pTag; + }; + static_assert( sizeof( DebugMarkerObjectTagInfoEXT ) == sizeof( VkDebugMarkerObjectTagInfoEXT ), "struct and wrapper have different size!" ); + + enum class RasterizationOrderAMD + { + eStrict = VK_RASTERIZATION_ORDER_STRICT_AMD, + eRelaxed = VK_RASTERIZATION_ORDER_RELAXED_AMD + }; + + struct PipelineRasterizationStateRasterizationOrderAMD + { + PipelineRasterizationStateRasterizationOrderAMD( RasterizationOrderAMD rasterizationOrder_ = RasterizationOrderAMD::eStrict ) + : rasterizationOrder( rasterizationOrder_ ) + { + } + + PipelineRasterizationStateRasterizationOrderAMD( VkPipelineRasterizationStateRasterizationOrderAMD const & rhs ) + { + memcpy( this, &rhs, sizeof( PipelineRasterizationStateRasterizationOrderAMD ) ); + } + + PipelineRasterizationStateRasterizationOrderAMD& operator=( VkPipelineRasterizationStateRasterizationOrderAMD const & rhs ) + { + memcpy( this, &rhs, sizeof( PipelineRasterizationStateRasterizationOrderAMD ) ); + return *this; + } + PipelineRasterizationStateRasterizationOrderAMD& setPNext( const void* pNext_ ) + { + pNext = pNext_; + return *this; + } + + PipelineRasterizationStateRasterizationOrderAMD& setRasterizationOrder( RasterizationOrderAMD rasterizationOrder_ ) + { + rasterizationOrder = rasterizationOrder_; + return *this; + } + + operator VkPipelineRasterizationStateRasterizationOrderAMD const&() const + { + return *reinterpret_cast(this); + } + + operator VkPipelineRasterizationStateRasterizationOrderAMD &() + { + return *reinterpret_cast(this); + } + + bool operator==( PipelineRasterizationStateRasterizationOrderAMD const& rhs ) const + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( rasterizationOrder == rhs.rasterizationOrder ); + } + + bool operator!=( PipelineRasterizationStateRasterizationOrderAMD const& rhs ) const + { + return !operator==( rhs ); + } + + private: + StructureType sType = StructureType::ePipelineRasterizationStateRasterizationOrderAMD; + + public: + const void* pNext = nullptr; + RasterizationOrderAMD rasterizationOrder; + }; + static_assert( sizeof( PipelineRasterizationStateRasterizationOrderAMD ) == sizeof( VkPipelineRasterizationStateRasterizationOrderAMD ), "struct and wrapper have different size!" ); + + enum class ExternalMemoryHandleTypeFlagBitsNV + { + eOpaqueWin32 = VK_EXTERNAL_MEMORY_HANDLE_TYPE_OPAQUE_WIN32_BIT_NV, + eOpaqueWin32Kmt = VK_EXTERNAL_MEMORY_HANDLE_TYPE_OPAQUE_WIN32_KMT_BIT_NV, + eD3D11Image = VK_EXTERNAL_MEMORY_HANDLE_TYPE_D3D11_IMAGE_BIT_NV, + eD3D11ImageKmt = VK_EXTERNAL_MEMORY_HANDLE_TYPE_D3D11_IMAGE_KMT_BIT_NV + }; + + using ExternalMemoryHandleTypeFlagsNV = Flags; + + VULKAN_HPP_INLINE ExternalMemoryHandleTypeFlagsNV operator|( ExternalMemoryHandleTypeFlagBitsNV bit0, ExternalMemoryHandleTypeFlagBitsNV bit1 ) + { + return ExternalMemoryHandleTypeFlagsNV( bit0 ) | bit1; + } + + VULKAN_HPP_INLINE ExternalMemoryHandleTypeFlagsNV operator~( ExternalMemoryHandleTypeFlagBitsNV bits ) + { + return ~( ExternalMemoryHandleTypeFlagsNV( bits ) ); + } + + template <> struct FlagTraits + { + enum + { + allFlags = VkFlags(ExternalMemoryHandleTypeFlagBitsNV::eOpaqueWin32) | VkFlags(ExternalMemoryHandleTypeFlagBitsNV::eOpaqueWin32Kmt) | VkFlags(ExternalMemoryHandleTypeFlagBitsNV::eD3D11Image) | VkFlags(ExternalMemoryHandleTypeFlagBitsNV::eD3D11ImageKmt) + }; + }; + + struct ExternalMemoryImageCreateInfoNV + { + ExternalMemoryImageCreateInfoNV( ExternalMemoryHandleTypeFlagsNV handleTypes_ = ExternalMemoryHandleTypeFlagsNV() ) + : handleTypes( handleTypes_ ) + { + } + + ExternalMemoryImageCreateInfoNV( VkExternalMemoryImageCreateInfoNV const & rhs ) + { + memcpy( this, &rhs, sizeof( ExternalMemoryImageCreateInfoNV ) ); + } + + ExternalMemoryImageCreateInfoNV& operator=( VkExternalMemoryImageCreateInfoNV const & rhs ) + { + memcpy( this, &rhs, sizeof( ExternalMemoryImageCreateInfoNV ) ); + return *this; + } + ExternalMemoryImageCreateInfoNV& setPNext( const void* pNext_ ) + { + pNext = pNext_; + return *this; + } + + ExternalMemoryImageCreateInfoNV& setHandleTypes( ExternalMemoryHandleTypeFlagsNV handleTypes_ ) + { + handleTypes = handleTypes_; + return *this; + } + + operator VkExternalMemoryImageCreateInfoNV const&() const + { + return *reinterpret_cast(this); + } + + operator VkExternalMemoryImageCreateInfoNV &() + { + return *reinterpret_cast(this); + } + + bool operator==( ExternalMemoryImageCreateInfoNV const& rhs ) const + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( handleTypes == rhs.handleTypes ); + } + + bool operator!=( ExternalMemoryImageCreateInfoNV const& rhs ) const + { + return !operator==( rhs ); + } + + private: + StructureType sType = StructureType::eExternalMemoryImageCreateInfoNV; + + public: + const void* pNext = nullptr; + ExternalMemoryHandleTypeFlagsNV handleTypes; + }; + static_assert( sizeof( ExternalMemoryImageCreateInfoNV ) == sizeof( VkExternalMemoryImageCreateInfoNV ), "struct and wrapper have different size!" ); + + struct ExportMemoryAllocateInfoNV + { + ExportMemoryAllocateInfoNV( ExternalMemoryHandleTypeFlagsNV handleTypes_ = ExternalMemoryHandleTypeFlagsNV() ) + : handleTypes( handleTypes_ ) + { + } + + ExportMemoryAllocateInfoNV( VkExportMemoryAllocateInfoNV const & rhs ) + { + memcpy( this, &rhs, sizeof( ExportMemoryAllocateInfoNV ) ); + } + + ExportMemoryAllocateInfoNV& operator=( VkExportMemoryAllocateInfoNV const & rhs ) + { + memcpy( this, &rhs, sizeof( ExportMemoryAllocateInfoNV ) ); + return *this; + } + ExportMemoryAllocateInfoNV& setPNext( const void* pNext_ ) + { + pNext = pNext_; + return *this; + } + + ExportMemoryAllocateInfoNV& setHandleTypes( ExternalMemoryHandleTypeFlagsNV handleTypes_ ) + { + handleTypes = handleTypes_; + return *this; + } + + operator VkExportMemoryAllocateInfoNV const&() const + { + return *reinterpret_cast(this); + } + + operator VkExportMemoryAllocateInfoNV &() + { + return *reinterpret_cast(this); + } + + bool operator==( ExportMemoryAllocateInfoNV const& rhs ) const + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( handleTypes == rhs.handleTypes ); + } + + bool operator!=( ExportMemoryAllocateInfoNV const& rhs ) const + { + return !operator==( rhs ); + } + + private: + StructureType sType = StructureType::eExportMemoryAllocateInfoNV; + + public: + const void* pNext = nullptr; + ExternalMemoryHandleTypeFlagsNV handleTypes; + }; + static_assert( sizeof( ExportMemoryAllocateInfoNV ) == sizeof( VkExportMemoryAllocateInfoNV ), "struct and wrapper have different size!" ); + +#ifdef VK_USE_PLATFORM_WIN32_NV + struct ImportMemoryWin32HandleInfoNV + { + ImportMemoryWin32HandleInfoNV( ExternalMemoryHandleTypeFlagsNV handleType_ = ExternalMemoryHandleTypeFlagsNV(), + HANDLE handle_ = 0 ) + : handleType( handleType_ ) + , handle( handle_ ) + { + } + + ImportMemoryWin32HandleInfoNV( VkImportMemoryWin32HandleInfoNV const & rhs ) + { + memcpy( this, &rhs, sizeof( ImportMemoryWin32HandleInfoNV ) ); + } + + ImportMemoryWin32HandleInfoNV& operator=( VkImportMemoryWin32HandleInfoNV const & rhs ) + { + memcpy( this, &rhs, sizeof( ImportMemoryWin32HandleInfoNV ) ); + return *this; + } + ImportMemoryWin32HandleInfoNV& setPNext( const void* pNext_ ) + { + pNext = pNext_; + return *this; + } + + ImportMemoryWin32HandleInfoNV& setHandleType( ExternalMemoryHandleTypeFlagsNV handleType_ ) + { + handleType = handleType_; + return *this; + } + + ImportMemoryWin32HandleInfoNV& setHandle( HANDLE handle_ ) + { + handle = handle_; + return *this; + } + + operator VkImportMemoryWin32HandleInfoNV const&() const + { + return *reinterpret_cast(this); + } + + operator VkImportMemoryWin32HandleInfoNV &() + { + return *reinterpret_cast(this); + } + + bool operator==( ImportMemoryWin32HandleInfoNV const& rhs ) const + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( handleType == rhs.handleType ) + && ( handle == rhs.handle ); + } + + bool operator!=( ImportMemoryWin32HandleInfoNV const& rhs ) const + { + return !operator==( rhs ); + } + + private: + StructureType sType = StructureType::eImportMemoryWin32HandleInfoNV; + + public: + const void* pNext = nullptr; + ExternalMemoryHandleTypeFlagsNV handleType; + HANDLE handle; + }; + static_assert( sizeof( ImportMemoryWin32HandleInfoNV ) == sizeof( VkImportMemoryWin32HandleInfoNV ), "struct and wrapper have different size!" ); +#endif /*VK_USE_PLATFORM_WIN32_NV*/ + + enum class ExternalMemoryFeatureFlagBitsNV + { + eDedicatedOnly = VK_EXTERNAL_MEMORY_FEATURE_DEDICATED_ONLY_BIT_NV, + eExportable = VK_EXTERNAL_MEMORY_FEATURE_EXPORTABLE_BIT_NV, + eImportable = VK_EXTERNAL_MEMORY_FEATURE_IMPORTABLE_BIT_NV + }; + + using ExternalMemoryFeatureFlagsNV = Flags; + + VULKAN_HPP_INLINE ExternalMemoryFeatureFlagsNV operator|( ExternalMemoryFeatureFlagBitsNV bit0, ExternalMemoryFeatureFlagBitsNV bit1 ) + { + return ExternalMemoryFeatureFlagsNV( bit0 ) | bit1; + } + + VULKAN_HPP_INLINE ExternalMemoryFeatureFlagsNV operator~( ExternalMemoryFeatureFlagBitsNV bits ) + { + return ~( ExternalMemoryFeatureFlagsNV( bits ) ); + } + + template <> struct FlagTraits + { + enum + { + allFlags = VkFlags(ExternalMemoryFeatureFlagBitsNV::eDedicatedOnly) | VkFlags(ExternalMemoryFeatureFlagBitsNV::eExportable) | VkFlags(ExternalMemoryFeatureFlagBitsNV::eImportable) + }; + }; + + struct ExternalImageFormatPropertiesNV + { + operator VkExternalImageFormatPropertiesNV const&() const + { + return *reinterpret_cast(this); + } + + operator VkExternalImageFormatPropertiesNV &() + { + return *reinterpret_cast(this); + } + + bool operator==( ExternalImageFormatPropertiesNV const& rhs ) const + { + return ( imageFormatProperties == rhs.imageFormatProperties ) + && ( externalMemoryFeatures == rhs.externalMemoryFeatures ) + && ( exportFromImportedHandleTypes == rhs.exportFromImportedHandleTypes ) + && ( compatibleHandleTypes == rhs.compatibleHandleTypes ); + } + + bool operator!=( ExternalImageFormatPropertiesNV const& rhs ) const + { + return !operator==( rhs ); + } + + ImageFormatProperties imageFormatProperties; + ExternalMemoryFeatureFlagsNV externalMemoryFeatures; + ExternalMemoryHandleTypeFlagsNV exportFromImportedHandleTypes; + ExternalMemoryHandleTypeFlagsNV compatibleHandleTypes; + }; + static_assert( sizeof( ExternalImageFormatPropertiesNV ) == sizeof( VkExternalImageFormatPropertiesNV ), "struct and wrapper have different size!" ); + + enum class ValidationCheckEXT + { + eAll = VK_VALIDATION_CHECK_ALL_EXT, + eShaders = VK_VALIDATION_CHECK_SHADERS_EXT + }; + + struct ValidationFlagsEXT + { + ValidationFlagsEXT( uint32_t disabledValidationCheckCount_ = 0, + const ValidationCheckEXT* pDisabledValidationChecks_ = nullptr ) + : disabledValidationCheckCount( disabledValidationCheckCount_ ) + , pDisabledValidationChecks( pDisabledValidationChecks_ ) + { + } + + ValidationFlagsEXT( VkValidationFlagsEXT const & rhs ) + { + memcpy( this, &rhs, sizeof( ValidationFlagsEXT ) ); + } + + ValidationFlagsEXT& operator=( VkValidationFlagsEXT const & rhs ) + { + memcpy( this, &rhs, sizeof( ValidationFlagsEXT ) ); + return *this; + } + ValidationFlagsEXT& setPNext( const void* pNext_ ) + { + pNext = pNext_; + return *this; + } + + ValidationFlagsEXT& setDisabledValidationCheckCount( uint32_t disabledValidationCheckCount_ ) + { + disabledValidationCheckCount = disabledValidationCheckCount_; + return *this; + } + + ValidationFlagsEXT& setPDisabledValidationChecks( const ValidationCheckEXT* pDisabledValidationChecks_ ) + { + pDisabledValidationChecks = pDisabledValidationChecks_; + return *this; + } + + operator VkValidationFlagsEXT const&() const + { + return *reinterpret_cast(this); + } + + operator VkValidationFlagsEXT &() + { + return *reinterpret_cast(this); + } + + bool operator==( ValidationFlagsEXT const& rhs ) const + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( disabledValidationCheckCount == rhs.disabledValidationCheckCount ) + && ( pDisabledValidationChecks == rhs.pDisabledValidationChecks ); + } + + bool operator!=( ValidationFlagsEXT const& rhs ) const + { + return !operator==( rhs ); + } + + private: + StructureType sType = StructureType::eValidationFlagsEXT; + + public: + const void* pNext = nullptr; + uint32_t disabledValidationCheckCount; + const ValidationCheckEXT* pDisabledValidationChecks; + }; + static_assert( sizeof( ValidationFlagsEXT ) == sizeof( VkValidationFlagsEXT ), "struct and wrapper have different size!" ); + + enum class SubgroupFeatureFlagBits + { + eBasic = VK_SUBGROUP_FEATURE_BASIC_BIT, + eVote = VK_SUBGROUP_FEATURE_VOTE_BIT, + eArithmetic = VK_SUBGROUP_FEATURE_ARITHMETIC_BIT, + eBallot = VK_SUBGROUP_FEATURE_BALLOT_BIT, + eShuffle = VK_SUBGROUP_FEATURE_SHUFFLE_BIT, + eShuffleRelative = VK_SUBGROUP_FEATURE_SHUFFLE_RELATIVE_BIT, + eClustered = VK_SUBGROUP_FEATURE_CLUSTERED_BIT, + eQuad = VK_SUBGROUP_FEATURE_QUAD_BIT, + ePartitionedNV = VK_SUBGROUP_FEATURE_PARTITIONED_BIT_NV + }; + + using SubgroupFeatureFlags = Flags; + + VULKAN_HPP_INLINE SubgroupFeatureFlags operator|( SubgroupFeatureFlagBits bit0, SubgroupFeatureFlagBits bit1 ) + { + return SubgroupFeatureFlags( bit0 ) | bit1; + } + + VULKAN_HPP_INLINE SubgroupFeatureFlags operator~( SubgroupFeatureFlagBits bits ) + { + return ~( SubgroupFeatureFlags( bits ) ); + } + + template <> struct FlagTraits + { + enum + { + allFlags = VkFlags(SubgroupFeatureFlagBits::eBasic) | VkFlags(SubgroupFeatureFlagBits::eVote) | VkFlags(SubgroupFeatureFlagBits::eArithmetic) | VkFlags(SubgroupFeatureFlagBits::eBallot) | VkFlags(SubgroupFeatureFlagBits::eShuffle) | VkFlags(SubgroupFeatureFlagBits::eShuffleRelative) | VkFlags(SubgroupFeatureFlagBits::eClustered) | VkFlags(SubgroupFeatureFlagBits::eQuad) | VkFlags(SubgroupFeatureFlagBits::ePartitionedNV) + }; + }; + + struct PhysicalDeviceSubgroupProperties + { + operator VkPhysicalDeviceSubgroupProperties const&() const + { + return *reinterpret_cast(this); + } + + operator VkPhysicalDeviceSubgroupProperties &() + { + return *reinterpret_cast(this); + } + + bool operator==( PhysicalDeviceSubgroupProperties const& rhs ) const + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( subgroupSize == rhs.subgroupSize ) + && ( supportedStages == rhs.supportedStages ) + && ( supportedOperations == rhs.supportedOperations ) + && ( quadOperationsInAllStages == rhs.quadOperationsInAllStages ); + } + + bool operator!=( PhysicalDeviceSubgroupProperties const& rhs ) const + { + return !operator==( rhs ); + } + + private: + StructureType sType = StructureType::ePhysicalDeviceSubgroupProperties; + + public: + void* pNext = nullptr; + uint32_t subgroupSize; + ShaderStageFlags supportedStages; + SubgroupFeatureFlags supportedOperations; + Bool32 quadOperationsInAllStages; + }; + static_assert( sizeof( PhysicalDeviceSubgroupProperties ) == sizeof( VkPhysicalDeviceSubgroupProperties ), "struct and wrapper have different size!" ); + + enum class IndirectCommandsLayoutUsageFlagBitsNVX + { + eUnorderedSequences = VK_INDIRECT_COMMANDS_LAYOUT_USAGE_UNORDERED_SEQUENCES_BIT_NVX, + eSparseSequences = VK_INDIRECT_COMMANDS_LAYOUT_USAGE_SPARSE_SEQUENCES_BIT_NVX, + eEmptyExecutions = VK_INDIRECT_COMMANDS_LAYOUT_USAGE_EMPTY_EXECUTIONS_BIT_NVX, + eIndexedSequences = VK_INDIRECT_COMMANDS_LAYOUT_USAGE_INDEXED_SEQUENCES_BIT_NVX + }; + + using IndirectCommandsLayoutUsageFlagsNVX = Flags; + + VULKAN_HPP_INLINE IndirectCommandsLayoutUsageFlagsNVX operator|( IndirectCommandsLayoutUsageFlagBitsNVX bit0, IndirectCommandsLayoutUsageFlagBitsNVX bit1 ) + { + return IndirectCommandsLayoutUsageFlagsNVX( bit0 ) | bit1; + } + + VULKAN_HPP_INLINE IndirectCommandsLayoutUsageFlagsNVX operator~( IndirectCommandsLayoutUsageFlagBitsNVX bits ) + { + return ~( IndirectCommandsLayoutUsageFlagsNVX( bits ) ); + } + + template <> struct FlagTraits + { + enum + { + allFlags = VkFlags(IndirectCommandsLayoutUsageFlagBitsNVX::eUnorderedSequences) | VkFlags(IndirectCommandsLayoutUsageFlagBitsNVX::eSparseSequences) | VkFlags(IndirectCommandsLayoutUsageFlagBitsNVX::eEmptyExecutions) | VkFlags(IndirectCommandsLayoutUsageFlagBitsNVX::eIndexedSequences) + }; + }; + + enum class ObjectEntryUsageFlagBitsNVX + { + eGraphics = VK_OBJECT_ENTRY_USAGE_GRAPHICS_BIT_NVX, + eCompute = VK_OBJECT_ENTRY_USAGE_COMPUTE_BIT_NVX + }; + + using ObjectEntryUsageFlagsNVX = Flags; + + VULKAN_HPP_INLINE ObjectEntryUsageFlagsNVX operator|( ObjectEntryUsageFlagBitsNVX bit0, ObjectEntryUsageFlagBitsNVX bit1 ) + { + return ObjectEntryUsageFlagsNVX( bit0 ) | bit1; + } + + VULKAN_HPP_INLINE ObjectEntryUsageFlagsNVX operator~( ObjectEntryUsageFlagBitsNVX bits ) + { + return ~( ObjectEntryUsageFlagsNVX( bits ) ); + } + + template <> struct FlagTraits + { + enum + { + allFlags = VkFlags(ObjectEntryUsageFlagBitsNVX::eGraphics) | VkFlags(ObjectEntryUsageFlagBitsNVX::eCompute) + }; + }; + + enum class IndirectCommandsTokenTypeNVX + { + ePipeline = VK_INDIRECT_COMMANDS_TOKEN_TYPE_PIPELINE_NVX, + eDescriptorSet = VK_INDIRECT_COMMANDS_TOKEN_TYPE_DESCRIPTOR_SET_NVX, + eIndexBuffer = VK_INDIRECT_COMMANDS_TOKEN_TYPE_INDEX_BUFFER_NVX, + eVertexBuffer = VK_INDIRECT_COMMANDS_TOKEN_TYPE_VERTEX_BUFFER_NVX, + ePushConstant = VK_INDIRECT_COMMANDS_TOKEN_TYPE_PUSH_CONSTANT_NVX, + eDrawIndexed = VK_INDIRECT_COMMANDS_TOKEN_TYPE_DRAW_INDEXED_NVX, + eDraw = VK_INDIRECT_COMMANDS_TOKEN_TYPE_DRAW_NVX, + eDispatch = VK_INDIRECT_COMMANDS_TOKEN_TYPE_DISPATCH_NVX + }; + + struct IndirectCommandsTokenNVX + { + IndirectCommandsTokenNVX( IndirectCommandsTokenTypeNVX tokenType_ = IndirectCommandsTokenTypeNVX::ePipeline, + Buffer buffer_ = Buffer(), + DeviceSize offset_ = 0 ) + : tokenType( tokenType_ ) + , buffer( buffer_ ) + , offset( offset_ ) + { + } + + IndirectCommandsTokenNVX( VkIndirectCommandsTokenNVX const & rhs ) + { + memcpy( this, &rhs, sizeof( IndirectCommandsTokenNVX ) ); + } + + IndirectCommandsTokenNVX& operator=( VkIndirectCommandsTokenNVX const & rhs ) + { + memcpy( this, &rhs, sizeof( IndirectCommandsTokenNVX ) ); + return *this; + } + IndirectCommandsTokenNVX& setTokenType( IndirectCommandsTokenTypeNVX tokenType_ ) + { + tokenType = tokenType_; + return *this; + } + + IndirectCommandsTokenNVX& setBuffer( Buffer buffer_ ) + { + buffer = buffer_; + return *this; + } + + IndirectCommandsTokenNVX& setOffset( DeviceSize offset_ ) + { + offset = offset_; + return *this; + } + + operator VkIndirectCommandsTokenNVX const&() const + { + return *reinterpret_cast(this); + } + + operator VkIndirectCommandsTokenNVX &() + { + return *reinterpret_cast(this); + } + + bool operator==( IndirectCommandsTokenNVX const& rhs ) const + { + return ( tokenType == rhs.tokenType ) + && ( buffer == rhs.buffer ) + && ( offset == rhs.offset ); + } + + bool operator!=( IndirectCommandsTokenNVX const& rhs ) const + { + return !operator==( rhs ); + } + + IndirectCommandsTokenTypeNVX tokenType; + Buffer buffer; + DeviceSize offset; + }; + static_assert( sizeof( IndirectCommandsTokenNVX ) == sizeof( VkIndirectCommandsTokenNVX ), "struct and wrapper have different size!" ); + + struct IndirectCommandsLayoutTokenNVX + { + IndirectCommandsLayoutTokenNVX( IndirectCommandsTokenTypeNVX tokenType_ = IndirectCommandsTokenTypeNVX::ePipeline, + uint32_t bindingUnit_ = 0, + uint32_t dynamicCount_ = 0, + uint32_t divisor_ = 0 ) + : tokenType( tokenType_ ) + , bindingUnit( bindingUnit_ ) + , dynamicCount( dynamicCount_ ) + , divisor( divisor_ ) + { + } + + IndirectCommandsLayoutTokenNVX( VkIndirectCommandsLayoutTokenNVX const & rhs ) + { + memcpy( this, &rhs, sizeof( IndirectCommandsLayoutTokenNVX ) ); + } + + IndirectCommandsLayoutTokenNVX& operator=( VkIndirectCommandsLayoutTokenNVX const & rhs ) + { + memcpy( this, &rhs, sizeof( IndirectCommandsLayoutTokenNVX ) ); + return *this; + } + IndirectCommandsLayoutTokenNVX& setTokenType( IndirectCommandsTokenTypeNVX tokenType_ ) + { + tokenType = tokenType_; + return *this; + } + + IndirectCommandsLayoutTokenNVX& setBindingUnit( uint32_t bindingUnit_ ) + { + bindingUnit = bindingUnit_; + return *this; + } + + IndirectCommandsLayoutTokenNVX& setDynamicCount( uint32_t dynamicCount_ ) + { + dynamicCount = dynamicCount_; + return *this; + } + + IndirectCommandsLayoutTokenNVX& setDivisor( uint32_t divisor_ ) + { + divisor = divisor_; + return *this; + } + + operator VkIndirectCommandsLayoutTokenNVX const&() const + { + return *reinterpret_cast(this); + } + + operator VkIndirectCommandsLayoutTokenNVX &() + { + return *reinterpret_cast(this); + } + + bool operator==( IndirectCommandsLayoutTokenNVX const& rhs ) const + { + return ( tokenType == rhs.tokenType ) + && ( bindingUnit == rhs.bindingUnit ) + && ( dynamicCount == rhs.dynamicCount ) + && ( divisor == rhs.divisor ); + } + + bool operator!=( IndirectCommandsLayoutTokenNVX const& rhs ) const + { + return !operator==( rhs ); + } + + IndirectCommandsTokenTypeNVX tokenType; + uint32_t bindingUnit; + uint32_t dynamicCount; + uint32_t divisor; + }; + static_assert( sizeof( IndirectCommandsLayoutTokenNVX ) == sizeof( VkIndirectCommandsLayoutTokenNVX ), "struct and wrapper have different size!" ); + + struct IndirectCommandsLayoutCreateInfoNVX + { + IndirectCommandsLayoutCreateInfoNVX( PipelineBindPoint pipelineBindPoint_ = PipelineBindPoint::eGraphics, + IndirectCommandsLayoutUsageFlagsNVX flags_ = IndirectCommandsLayoutUsageFlagsNVX(), + uint32_t tokenCount_ = 0, + const IndirectCommandsLayoutTokenNVX* pTokens_ = nullptr ) + : pipelineBindPoint( pipelineBindPoint_ ) + , flags( flags_ ) + , tokenCount( tokenCount_ ) + , pTokens( pTokens_ ) + { + } + + IndirectCommandsLayoutCreateInfoNVX( VkIndirectCommandsLayoutCreateInfoNVX const & rhs ) + { + memcpy( this, &rhs, sizeof( IndirectCommandsLayoutCreateInfoNVX ) ); + } + + IndirectCommandsLayoutCreateInfoNVX& operator=( VkIndirectCommandsLayoutCreateInfoNVX const & rhs ) + { + memcpy( this, &rhs, sizeof( IndirectCommandsLayoutCreateInfoNVX ) ); + return *this; + } + IndirectCommandsLayoutCreateInfoNVX& setPNext( const void* pNext_ ) + { + pNext = pNext_; + return *this; + } + + IndirectCommandsLayoutCreateInfoNVX& setPipelineBindPoint( PipelineBindPoint pipelineBindPoint_ ) + { + pipelineBindPoint = pipelineBindPoint_; + return *this; + } + + IndirectCommandsLayoutCreateInfoNVX& setFlags( IndirectCommandsLayoutUsageFlagsNVX flags_ ) + { + flags = flags_; + return *this; + } + + IndirectCommandsLayoutCreateInfoNVX& setTokenCount( uint32_t tokenCount_ ) + { + tokenCount = tokenCount_; + return *this; + } + + IndirectCommandsLayoutCreateInfoNVX& setPTokens( const IndirectCommandsLayoutTokenNVX* pTokens_ ) + { + pTokens = pTokens_; + return *this; + } + + operator VkIndirectCommandsLayoutCreateInfoNVX const&() const + { + return *reinterpret_cast(this); + } + + operator VkIndirectCommandsLayoutCreateInfoNVX &() + { + return *reinterpret_cast(this); + } + + bool operator==( IndirectCommandsLayoutCreateInfoNVX const& rhs ) const + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( pipelineBindPoint == rhs.pipelineBindPoint ) + && ( flags == rhs.flags ) + && ( tokenCount == rhs.tokenCount ) + && ( pTokens == rhs.pTokens ); + } + + bool operator!=( IndirectCommandsLayoutCreateInfoNVX const& rhs ) const + { + return !operator==( rhs ); + } + + private: + StructureType sType = StructureType::eIndirectCommandsLayoutCreateInfoNVX; + + public: + const void* pNext = nullptr; + PipelineBindPoint pipelineBindPoint; + IndirectCommandsLayoutUsageFlagsNVX flags; + uint32_t tokenCount; + const IndirectCommandsLayoutTokenNVX* pTokens; + }; + static_assert( sizeof( IndirectCommandsLayoutCreateInfoNVX ) == sizeof( VkIndirectCommandsLayoutCreateInfoNVX ), "struct and wrapper have different size!" ); + + enum class ObjectEntryTypeNVX + { + eDescriptorSet = VK_OBJECT_ENTRY_TYPE_DESCRIPTOR_SET_NVX, + ePipeline = VK_OBJECT_ENTRY_TYPE_PIPELINE_NVX, + eIndexBuffer = VK_OBJECT_ENTRY_TYPE_INDEX_BUFFER_NVX, + eVertexBuffer = VK_OBJECT_ENTRY_TYPE_VERTEX_BUFFER_NVX, + ePushConstant = VK_OBJECT_ENTRY_TYPE_PUSH_CONSTANT_NVX + }; + + struct ObjectTableCreateInfoNVX + { + ObjectTableCreateInfoNVX( uint32_t objectCount_ = 0, + const ObjectEntryTypeNVX* pObjectEntryTypes_ = nullptr, + const uint32_t* pObjectEntryCounts_ = nullptr, + const ObjectEntryUsageFlagsNVX* pObjectEntryUsageFlags_ = nullptr, + uint32_t maxUniformBuffersPerDescriptor_ = 0, + uint32_t maxStorageBuffersPerDescriptor_ = 0, + uint32_t maxStorageImagesPerDescriptor_ = 0, + uint32_t maxSampledImagesPerDescriptor_ = 0, + uint32_t maxPipelineLayouts_ = 0 ) + : objectCount( objectCount_ ) + , pObjectEntryTypes( pObjectEntryTypes_ ) + , pObjectEntryCounts( pObjectEntryCounts_ ) + , pObjectEntryUsageFlags( pObjectEntryUsageFlags_ ) + , maxUniformBuffersPerDescriptor( maxUniformBuffersPerDescriptor_ ) + , maxStorageBuffersPerDescriptor( maxStorageBuffersPerDescriptor_ ) + , maxStorageImagesPerDescriptor( maxStorageImagesPerDescriptor_ ) + , maxSampledImagesPerDescriptor( maxSampledImagesPerDescriptor_ ) + , maxPipelineLayouts( maxPipelineLayouts_ ) + { + } + + ObjectTableCreateInfoNVX( VkObjectTableCreateInfoNVX const & rhs ) + { + memcpy( this, &rhs, sizeof( ObjectTableCreateInfoNVX ) ); + } + + ObjectTableCreateInfoNVX& operator=( VkObjectTableCreateInfoNVX const & rhs ) + { + memcpy( this, &rhs, sizeof( ObjectTableCreateInfoNVX ) ); + return *this; + } + ObjectTableCreateInfoNVX& setPNext( const void* pNext_ ) + { + pNext = pNext_; + return *this; + } + + ObjectTableCreateInfoNVX& setObjectCount( uint32_t objectCount_ ) + { + objectCount = objectCount_; + return *this; + } + + ObjectTableCreateInfoNVX& setPObjectEntryTypes( const ObjectEntryTypeNVX* pObjectEntryTypes_ ) + { + pObjectEntryTypes = pObjectEntryTypes_; + return *this; + } + + ObjectTableCreateInfoNVX& setPObjectEntryCounts( const uint32_t* pObjectEntryCounts_ ) + { + pObjectEntryCounts = pObjectEntryCounts_; + return *this; + } + + ObjectTableCreateInfoNVX& setPObjectEntryUsageFlags( const ObjectEntryUsageFlagsNVX* pObjectEntryUsageFlags_ ) + { + pObjectEntryUsageFlags = pObjectEntryUsageFlags_; + return *this; + } + + ObjectTableCreateInfoNVX& setMaxUniformBuffersPerDescriptor( uint32_t maxUniformBuffersPerDescriptor_ ) + { + maxUniformBuffersPerDescriptor = maxUniformBuffersPerDescriptor_; + return *this; + } + + ObjectTableCreateInfoNVX& setMaxStorageBuffersPerDescriptor( uint32_t maxStorageBuffersPerDescriptor_ ) + { + maxStorageBuffersPerDescriptor = maxStorageBuffersPerDescriptor_; + return *this; + } + + ObjectTableCreateInfoNVX& setMaxStorageImagesPerDescriptor( uint32_t maxStorageImagesPerDescriptor_ ) + { + maxStorageImagesPerDescriptor = maxStorageImagesPerDescriptor_; + return *this; + } + + ObjectTableCreateInfoNVX& setMaxSampledImagesPerDescriptor( uint32_t maxSampledImagesPerDescriptor_ ) + { + maxSampledImagesPerDescriptor = maxSampledImagesPerDescriptor_; + return *this; + } + + ObjectTableCreateInfoNVX& setMaxPipelineLayouts( uint32_t maxPipelineLayouts_ ) + { + maxPipelineLayouts = maxPipelineLayouts_; + return *this; + } + + operator VkObjectTableCreateInfoNVX const&() const + { + return *reinterpret_cast(this); + } + + operator VkObjectTableCreateInfoNVX &() + { + return *reinterpret_cast(this); + } + + bool operator==( ObjectTableCreateInfoNVX const& rhs ) const + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( objectCount == rhs.objectCount ) + && ( pObjectEntryTypes == rhs.pObjectEntryTypes ) + && ( pObjectEntryCounts == rhs.pObjectEntryCounts ) + && ( pObjectEntryUsageFlags == rhs.pObjectEntryUsageFlags ) + && ( maxUniformBuffersPerDescriptor == rhs.maxUniformBuffersPerDescriptor ) + && ( maxStorageBuffersPerDescriptor == rhs.maxStorageBuffersPerDescriptor ) + && ( maxStorageImagesPerDescriptor == rhs.maxStorageImagesPerDescriptor ) + && ( maxSampledImagesPerDescriptor == rhs.maxSampledImagesPerDescriptor ) + && ( maxPipelineLayouts == rhs.maxPipelineLayouts ); + } + + bool operator!=( ObjectTableCreateInfoNVX const& rhs ) const + { + return !operator==( rhs ); + } + + private: + StructureType sType = StructureType::eObjectTableCreateInfoNVX; + + public: + const void* pNext = nullptr; + uint32_t objectCount; + const ObjectEntryTypeNVX* pObjectEntryTypes; + const uint32_t* pObjectEntryCounts; + const ObjectEntryUsageFlagsNVX* pObjectEntryUsageFlags; + uint32_t maxUniformBuffersPerDescriptor; + uint32_t maxStorageBuffersPerDescriptor; + uint32_t maxStorageImagesPerDescriptor; + uint32_t maxSampledImagesPerDescriptor; + uint32_t maxPipelineLayouts; + }; + static_assert( sizeof( ObjectTableCreateInfoNVX ) == sizeof( VkObjectTableCreateInfoNVX ), "struct and wrapper have different size!" ); + + struct ObjectTableEntryNVX + { + ObjectTableEntryNVX( ObjectEntryTypeNVX type_ = ObjectEntryTypeNVX::eDescriptorSet, + ObjectEntryUsageFlagsNVX flags_ = ObjectEntryUsageFlagsNVX() ) + : type( type_ ) + , flags( flags_ ) + { + } + + ObjectTableEntryNVX( VkObjectTableEntryNVX const & rhs ) + { + memcpy( this, &rhs, sizeof( ObjectTableEntryNVX ) ); + } + + ObjectTableEntryNVX& operator=( VkObjectTableEntryNVX const & rhs ) + { + memcpy( this, &rhs, sizeof( ObjectTableEntryNVX ) ); + return *this; + } + ObjectTableEntryNVX& setType( ObjectEntryTypeNVX type_ ) + { + type = type_; + return *this; + } + + ObjectTableEntryNVX& setFlags( ObjectEntryUsageFlagsNVX flags_ ) + { + flags = flags_; + return *this; + } + + operator VkObjectTableEntryNVX const&() const + { + return *reinterpret_cast(this); + } + + operator VkObjectTableEntryNVX &() + { + return *reinterpret_cast(this); + } + + bool operator==( ObjectTableEntryNVX const& rhs ) const + { + return ( type == rhs.type ) + && ( flags == rhs.flags ); + } + + bool operator!=( ObjectTableEntryNVX const& rhs ) const + { + return !operator==( rhs ); + } + + ObjectEntryTypeNVX type; + ObjectEntryUsageFlagsNVX flags; + }; + static_assert( sizeof( ObjectTableEntryNVX ) == sizeof( VkObjectTableEntryNVX ), "struct and wrapper have different size!" ); + + struct ObjectTablePipelineEntryNVX + { + ObjectTablePipelineEntryNVX( ObjectEntryTypeNVX type_ = ObjectEntryTypeNVX::eDescriptorSet, + ObjectEntryUsageFlagsNVX flags_ = ObjectEntryUsageFlagsNVX(), + Pipeline pipeline_ = Pipeline() ) + : type( type_ ) + , flags( flags_ ) + , pipeline( pipeline_ ) + { + } + + explicit ObjectTablePipelineEntryNVX( ObjectTableEntryNVX const& objectTableEntryNVX, + Pipeline pipeline_ = Pipeline() ) + : type( objectTableEntryNVX.type ) + , flags( objectTableEntryNVX.flags ) + , pipeline( pipeline_ ) + {} + + ObjectTablePipelineEntryNVX( VkObjectTablePipelineEntryNVX const & rhs ) + { + memcpy( this, &rhs, sizeof( ObjectTablePipelineEntryNVX ) ); + } + + ObjectTablePipelineEntryNVX& operator=( VkObjectTablePipelineEntryNVX const & rhs ) + { + memcpy( this, &rhs, sizeof( ObjectTablePipelineEntryNVX ) ); + return *this; + } + ObjectTablePipelineEntryNVX& setType( ObjectEntryTypeNVX type_ ) + { + type = type_; + return *this; + } + + ObjectTablePipelineEntryNVX& setFlags( ObjectEntryUsageFlagsNVX flags_ ) + { + flags = flags_; + return *this; + } + + ObjectTablePipelineEntryNVX& setPipeline( Pipeline pipeline_ ) + { + pipeline = pipeline_; + return *this; + } + + operator VkObjectTablePipelineEntryNVX const&() const + { + return *reinterpret_cast(this); + } + + operator VkObjectTablePipelineEntryNVX &() + { + return *reinterpret_cast(this); + } + + bool operator==( ObjectTablePipelineEntryNVX const& rhs ) const + { + return ( type == rhs.type ) + && ( flags == rhs.flags ) + && ( pipeline == rhs.pipeline ); + } + + bool operator!=( ObjectTablePipelineEntryNVX const& rhs ) const + { + return !operator==( rhs ); + } + + ObjectEntryTypeNVX type; + ObjectEntryUsageFlagsNVX flags; + Pipeline pipeline; + }; + static_assert( sizeof( ObjectTablePipelineEntryNVX ) == sizeof( VkObjectTablePipelineEntryNVX ), "struct and wrapper have different size!" ); + + struct ObjectTableDescriptorSetEntryNVX + { + ObjectTableDescriptorSetEntryNVX( ObjectEntryTypeNVX type_ = ObjectEntryTypeNVX::eDescriptorSet, + ObjectEntryUsageFlagsNVX flags_ = ObjectEntryUsageFlagsNVX(), + PipelineLayout pipelineLayout_ = PipelineLayout(), + DescriptorSet descriptorSet_ = DescriptorSet() ) + : type( type_ ) + , flags( flags_ ) + , pipelineLayout( pipelineLayout_ ) + , descriptorSet( descriptorSet_ ) + { + } + + explicit ObjectTableDescriptorSetEntryNVX( ObjectTableEntryNVX const& objectTableEntryNVX, + PipelineLayout pipelineLayout_ = PipelineLayout(), + DescriptorSet descriptorSet_ = DescriptorSet() ) + : type( objectTableEntryNVX.type ) + , flags( objectTableEntryNVX.flags ) + , pipelineLayout( pipelineLayout_ ) + , descriptorSet( descriptorSet_ ) + {} + + ObjectTableDescriptorSetEntryNVX( VkObjectTableDescriptorSetEntryNVX const & rhs ) + { + memcpy( this, &rhs, sizeof( ObjectTableDescriptorSetEntryNVX ) ); + } + + ObjectTableDescriptorSetEntryNVX& operator=( VkObjectTableDescriptorSetEntryNVX const & rhs ) + { + memcpy( this, &rhs, sizeof( ObjectTableDescriptorSetEntryNVX ) ); + return *this; + } + ObjectTableDescriptorSetEntryNVX& setType( ObjectEntryTypeNVX type_ ) + { + type = type_; + return *this; + } + + ObjectTableDescriptorSetEntryNVX& setFlags( ObjectEntryUsageFlagsNVX flags_ ) + { + flags = flags_; + return *this; + } + + ObjectTableDescriptorSetEntryNVX& setPipelineLayout( PipelineLayout pipelineLayout_ ) + { + pipelineLayout = pipelineLayout_; + return *this; + } + + ObjectTableDescriptorSetEntryNVX& setDescriptorSet( DescriptorSet descriptorSet_ ) + { + descriptorSet = descriptorSet_; + return *this; + } + + operator VkObjectTableDescriptorSetEntryNVX const&() const + { + return *reinterpret_cast(this); + } + + operator VkObjectTableDescriptorSetEntryNVX &() + { + return *reinterpret_cast(this); + } + + bool operator==( ObjectTableDescriptorSetEntryNVX const& rhs ) const + { + return ( type == rhs.type ) + && ( flags == rhs.flags ) + && ( pipelineLayout == rhs.pipelineLayout ) + && ( descriptorSet == rhs.descriptorSet ); + } + + bool operator!=( ObjectTableDescriptorSetEntryNVX const& rhs ) const + { + return !operator==( rhs ); + } + + ObjectEntryTypeNVX type; + ObjectEntryUsageFlagsNVX flags; + PipelineLayout pipelineLayout; + DescriptorSet descriptorSet; + }; + static_assert( sizeof( ObjectTableDescriptorSetEntryNVX ) == sizeof( VkObjectTableDescriptorSetEntryNVX ), "struct and wrapper have different size!" ); + + struct ObjectTableVertexBufferEntryNVX + { + ObjectTableVertexBufferEntryNVX( ObjectEntryTypeNVX type_ = ObjectEntryTypeNVX::eDescriptorSet, + ObjectEntryUsageFlagsNVX flags_ = ObjectEntryUsageFlagsNVX(), + Buffer buffer_ = Buffer() ) + : type( type_ ) + , flags( flags_ ) + , buffer( buffer_ ) + { + } + + explicit ObjectTableVertexBufferEntryNVX( ObjectTableEntryNVX const& objectTableEntryNVX, + Buffer buffer_ = Buffer() ) + : type( objectTableEntryNVX.type ) + , flags( objectTableEntryNVX.flags ) + , buffer( buffer_ ) + {} + + ObjectTableVertexBufferEntryNVX( VkObjectTableVertexBufferEntryNVX const & rhs ) + { + memcpy( this, &rhs, sizeof( ObjectTableVertexBufferEntryNVX ) ); + } + + ObjectTableVertexBufferEntryNVX& operator=( VkObjectTableVertexBufferEntryNVX const & rhs ) + { + memcpy( this, &rhs, sizeof( ObjectTableVertexBufferEntryNVX ) ); + return *this; + } + ObjectTableVertexBufferEntryNVX& setType( ObjectEntryTypeNVX type_ ) + { + type = type_; + return *this; + } + + ObjectTableVertexBufferEntryNVX& setFlags( ObjectEntryUsageFlagsNVX flags_ ) + { + flags = flags_; + return *this; + } + + ObjectTableVertexBufferEntryNVX& setBuffer( Buffer buffer_ ) + { + buffer = buffer_; + return *this; + } + + operator VkObjectTableVertexBufferEntryNVX const&() const + { + return *reinterpret_cast(this); + } + + operator VkObjectTableVertexBufferEntryNVX &() + { + return *reinterpret_cast(this); + } + + bool operator==( ObjectTableVertexBufferEntryNVX const& rhs ) const + { + return ( type == rhs.type ) + && ( flags == rhs.flags ) + && ( buffer == rhs.buffer ); + } + + bool operator!=( ObjectTableVertexBufferEntryNVX const& rhs ) const + { + return !operator==( rhs ); + } + + ObjectEntryTypeNVX type; + ObjectEntryUsageFlagsNVX flags; + Buffer buffer; + }; + static_assert( sizeof( ObjectTableVertexBufferEntryNVX ) == sizeof( VkObjectTableVertexBufferEntryNVX ), "struct and wrapper have different size!" ); + + struct ObjectTableIndexBufferEntryNVX + { + ObjectTableIndexBufferEntryNVX( ObjectEntryTypeNVX type_ = ObjectEntryTypeNVX::eDescriptorSet, + ObjectEntryUsageFlagsNVX flags_ = ObjectEntryUsageFlagsNVX(), + Buffer buffer_ = Buffer(), + IndexType indexType_ = IndexType::eUint16 ) + : type( type_ ) + , flags( flags_ ) + , buffer( buffer_ ) + , indexType( indexType_ ) + { + } + + explicit ObjectTableIndexBufferEntryNVX( ObjectTableEntryNVX const& objectTableEntryNVX, + Buffer buffer_ = Buffer(), + IndexType indexType_ = IndexType::eUint16 ) + : type( objectTableEntryNVX.type ) + , flags( objectTableEntryNVX.flags ) + , buffer( buffer_ ) + , indexType( indexType_ ) + {} + + ObjectTableIndexBufferEntryNVX( VkObjectTableIndexBufferEntryNVX const & rhs ) + { + memcpy( this, &rhs, sizeof( ObjectTableIndexBufferEntryNVX ) ); + } + + ObjectTableIndexBufferEntryNVX& operator=( VkObjectTableIndexBufferEntryNVX const & rhs ) + { + memcpy( this, &rhs, sizeof( ObjectTableIndexBufferEntryNVX ) ); + return *this; + } + ObjectTableIndexBufferEntryNVX& setType( ObjectEntryTypeNVX type_ ) + { + type = type_; + return *this; + } + + ObjectTableIndexBufferEntryNVX& setFlags( ObjectEntryUsageFlagsNVX flags_ ) + { + flags = flags_; + return *this; + } + + ObjectTableIndexBufferEntryNVX& setBuffer( Buffer buffer_ ) + { + buffer = buffer_; + return *this; + } + + ObjectTableIndexBufferEntryNVX& setIndexType( IndexType indexType_ ) + { + indexType = indexType_; + return *this; + } + + operator VkObjectTableIndexBufferEntryNVX const&() const + { + return *reinterpret_cast(this); + } + + operator VkObjectTableIndexBufferEntryNVX &() + { + return *reinterpret_cast(this); + } + + bool operator==( ObjectTableIndexBufferEntryNVX const& rhs ) const + { + return ( type == rhs.type ) + && ( flags == rhs.flags ) + && ( buffer == rhs.buffer ) + && ( indexType == rhs.indexType ); + } + + bool operator!=( ObjectTableIndexBufferEntryNVX const& rhs ) const + { + return !operator==( rhs ); + } + + ObjectEntryTypeNVX type; + ObjectEntryUsageFlagsNVX flags; + Buffer buffer; + IndexType indexType; + }; + static_assert( sizeof( ObjectTableIndexBufferEntryNVX ) == sizeof( VkObjectTableIndexBufferEntryNVX ), "struct and wrapper have different size!" ); + + struct ObjectTablePushConstantEntryNVX + { + ObjectTablePushConstantEntryNVX( ObjectEntryTypeNVX type_ = ObjectEntryTypeNVX::eDescriptorSet, + ObjectEntryUsageFlagsNVX flags_ = ObjectEntryUsageFlagsNVX(), + PipelineLayout pipelineLayout_ = PipelineLayout(), + ShaderStageFlags stageFlags_ = ShaderStageFlags() ) + : type( type_ ) + , flags( flags_ ) + , pipelineLayout( pipelineLayout_ ) + , stageFlags( stageFlags_ ) + { + } + + explicit ObjectTablePushConstantEntryNVX( ObjectTableEntryNVX const& objectTableEntryNVX, + PipelineLayout pipelineLayout_ = PipelineLayout(), + ShaderStageFlags stageFlags_ = ShaderStageFlags() ) + : type( objectTableEntryNVX.type ) + , flags( objectTableEntryNVX.flags ) + , pipelineLayout( pipelineLayout_ ) + , stageFlags( stageFlags_ ) + {} + + ObjectTablePushConstantEntryNVX( VkObjectTablePushConstantEntryNVX const & rhs ) + { + memcpy( this, &rhs, sizeof( ObjectTablePushConstantEntryNVX ) ); + } + + ObjectTablePushConstantEntryNVX& operator=( VkObjectTablePushConstantEntryNVX const & rhs ) + { + memcpy( this, &rhs, sizeof( ObjectTablePushConstantEntryNVX ) ); + return *this; + } + ObjectTablePushConstantEntryNVX& setType( ObjectEntryTypeNVX type_ ) + { + type = type_; + return *this; + } + + ObjectTablePushConstantEntryNVX& setFlags( ObjectEntryUsageFlagsNVX flags_ ) + { + flags = flags_; + return *this; + } + + ObjectTablePushConstantEntryNVX& setPipelineLayout( PipelineLayout pipelineLayout_ ) + { + pipelineLayout = pipelineLayout_; + return *this; + } + + ObjectTablePushConstantEntryNVX& setStageFlags( ShaderStageFlags stageFlags_ ) + { + stageFlags = stageFlags_; + return *this; + } + + operator VkObjectTablePushConstantEntryNVX const&() const + { + return *reinterpret_cast(this); + } + + operator VkObjectTablePushConstantEntryNVX &() + { + return *reinterpret_cast(this); + } + + bool operator==( ObjectTablePushConstantEntryNVX const& rhs ) const + { + return ( type == rhs.type ) + && ( flags == rhs.flags ) + && ( pipelineLayout == rhs.pipelineLayout ) + && ( stageFlags == rhs.stageFlags ); + } + + bool operator!=( ObjectTablePushConstantEntryNVX const& rhs ) const + { + return !operator==( rhs ); + } + + ObjectEntryTypeNVX type; + ObjectEntryUsageFlagsNVX flags; + PipelineLayout pipelineLayout; + ShaderStageFlags stageFlags; + }; + static_assert( sizeof( ObjectTablePushConstantEntryNVX ) == sizeof( VkObjectTablePushConstantEntryNVX ), "struct and wrapper have different size!" ); + + enum class DescriptorSetLayoutCreateFlagBits + { + ePushDescriptorKHR = VK_DESCRIPTOR_SET_LAYOUT_CREATE_PUSH_DESCRIPTOR_BIT_KHR, + eUpdateAfterBindPoolEXT = VK_DESCRIPTOR_SET_LAYOUT_CREATE_UPDATE_AFTER_BIND_POOL_BIT_EXT + }; + + using DescriptorSetLayoutCreateFlags = Flags; + + VULKAN_HPP_INLINE DescriptorSetLayoutCreateFlags operator|( DescriptorSetLayoutCreateFlagBits bit0, DescriptorSetLayoutCreateFlagBits bit1 ) + { + return DescriptorSetLayoutCreateFlags( bit0 ) | bit1; + } + + VULKAN_HPP_INLINE DescriptorSetLayoutCreateFlags operator~( DescriptorSetLayoutCreateFlagBits bits ) + { + return ~( DescriptorSetLayoutCreateFlags( bits ) ); + } + + template <> struct FlagTraits + { + enum + { + allFlags = VkFlags(DescriptorSetLayoutCreateFlagBits::ePushDescriptorKHR) | VkFlags(DescriptorSetLayoutCreateFlagBits::eUpdateAfterBindPoolEXT) + }; + }; + + struct DescriptorSetLayoutCreateInfo + { + DescriptorSetLayoutCreateInfo( DescriptorSetLayoutCreateFlags flags_ = DescriptorSetLayoutCreateFlags(), + uint32_t bindingCount_ = 0, + const DescriptorSetLayoutBinding* pBindings_ = nullptr ) + : flags( flags_ ) + , bindingCount( bindingCount_ ) + , pBindings( pBindings_ ) + { + } + + DescriptorSetLayoutCreateInfo( VkDescriptorSetLayoutCreateInfo const & rhs ) + { + memcpy( this, &rhs, sizeof( DescriptorSetLayoutCreateInfo ) ); + } + + DescriptorSetLayoutCreateInfo& operator=( VkDescriptorSetLayoutCreateInfo const & rhs ) + { + memcpy( this, &rhs, sizeof( DescriptorSetLayoutCreateInfo ) ); + return *this; + } + DescriptorSetLayoutCreateInfo& setPNext( const void* pNext_ ) + { + pNext = pNext_; + return *this; + } + + DescriptorSetLayoutCreateInfo& setFlags( DescriptorSetLayoutCreateFlags flags_ ) + { + flags = flags_; + return *this; + } + + DescriptorSetLayoutCreateInfo& setBindingCount( uint32_t bindingCount_ ) + { + bindingCount = bindingCount_; + return *this; + } + + DescriptorSetLayoutCreateInfo& setPBindings( const DescriptorSetLayoutBinding* pBindings_ ) + { + pBindings = pBindings_; + return *this; + } + + operator VkDescriptorSetLayoutCreateInfo const&() const + { + return *reinterpret_cast(this); + } + + operator VkDescriptorSetLayoutCreateInfo &() + { + return *reinterpret_cast(this); + } + + bool operator==( DescriptorSetLayoutCreateInfo const& rhs ) const + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( flags == rhs.flags ) + && ( bindingCount == rhs.bindingCount ) + && ( pBindings == rhs.pBindings ); + } + + bool operator!=( DescriptorSetLayoutCreateInfo const& rhs ) const + { + return !operator==( rhs ); + } + + private: + StructureType sType = StructureType::eDescriptorSetLayoutCreateInfo; + + public: + const void* pNext = nullptr; + DescriptorSetLayoutCreateFlags flags; + uint32_t bindingCount; + const DescriptorSetLayoutBinding* pBindings; + }; + static_assert( sizeof( DescriptorSetLayoutCreateInfo ) == sizeof( VkDescriptorSetLayoutCreateInfo ), "struct and wrapper have different size!" ); + + enum class ExternalMemoryHandleTypeFlagBits + { + eOpaqueFd = VK_EXTERNAL_MEMORY_HANDLE_TYPE_OPAQUE_FD_BIT, + eOpaqueFdKHR = VK_EXTERNAL_MEMORY_HANDLE_TYPE_OPAQUE_FD_BIT, + eOpaqueWin32 = VK_EXTERNAL_MEMORY_HANDLE_TYPE_OPAQUE_WIN32_BIT, + eOpaqueWin32KHR = VK_EXTERNAL_MEMORY_HANDLE_TYPE_OPAQUE_WIN32_BIT, + eOpaqueWin32Kmt = VK_EXTERNAL_MEMORY_HANDLE_TYPE_OPAQUE_WIN32_KMT_BIT, + eOpaqueWin32KmtKHR = VK_EXTERNAL_MEMORY_HANDLE_TYPE_OPAQUE_WIN32_KMT_BIT, + eD3D11Texture = VK_EXTERNAL_MEMORY_HANDLE_TYPE_D3D11_TEXTURE_BIT, + eD3D11TextureKHR = VK_EXTERNAL_MEMORY_HANDLE_TYPE_D3D11_TEXTURE_BIT, + eD3D11TextureKmt = VK_EXTERNAL_MEMORY_HANDLE_TYPE_D3D11_TEXTURE_KMT_BIT, + eD3D11TextureKmtKHR = VK_EXTERNAL_MEMORY_HANDLE_TYPE_D3D11_TEXTURE_KMT_BIT, + eD3D12Heap = VK_EXTERNAL_MEMORY_HANDLE_TYPE_D3D12_HEAP_BIT, + eD3D12HeapKHR = VK_EXTERNAL_MEMORY_HANDLE_TYPE_D3D12_HEAP_BIT, + eD3D12Resource = VK_EXTERNAL_MEMORY_HANDLE_TYPE_D3D12_RESOURCE_BIT, + eD3D12ResourceKHR = VK_EXTERNAL_MEMORY_HANDLE_TYPE_D3D12_RESOURCE_BIT, + eDmaBufEXT = VK_EXTERNAL_MEMORY_HANDLE_TYPE_DMA_BUF_BIT_EXT, + eAndroidHardwareBufferANDROID = VK_EXTERNAL_MEMORY_HANDLE_TYPE_ANDROID_HARDWARE_BUFFER_BIT_ANDROID, + eHostAllocationEXT = VK_EXTERNAL_MEMORY_HANDLE_TYPE_HOST_ALLOCATION_BIT_EXT, + eHostMappedForeignMemoryEXT = VK_EXTERNAL_MEMORY_HANDLE_TYPE_HOST_MAPPED_FOREIGN_MEMORY_BIT_EXT + }; + + using ExternalMemoryHandleTypeFlags = Flags; + + VULKAN_HPP_INLINE ExternalMemoryHandleTypeFlags operator|( ExternalMemoryHandleTypeFlagBits bit0, ExternalMemoryHandleTypeFlagBits bit1 ) + { + return ExternalMemoryHandleTypeFlags( bit0 ) | bit1; + } + + VULKAN_HPP_INLINE ExternalMemoryHandleTypeFlags operator~( ExternalMemoryHandleTypeFlagBits bits ) + { + return ~( ExternalMemoryHandleTypeFlags( bits ) ); + } + + template <> struct FlagTraits + { + enum + { + allFlags = VkFlags(ExternalMemoryHandleTypeFlagBits::eOpaqueFd) | VkFlags(ExternalMemoryHandleTypeFlagBits::eOpaqueWin32) | VkFlags(ExternalMemoryHandleTypeFlagBits::eOpaqueWin32Kmt) | VkFlags(ExternalMemoryHandleTypeFlagBits::eD3D11Texture) | VkFlags(ExternalMemoryHandleTypeFlagBits::eD3D11TextureKmt) | VkFlags(ExternalMemoryHandleTypeFlagBits::eD3D12Heap) | VkFlags(ExternalMemoryHandleTypeFlagBits::eD3D12Resource) | VkFlags(ExternalMemoryHandleTypeFlagBits::eDmaBufEXT) | VkFlags(ExternalMemoryHandleTypeFlagBits::eAndroidHardwareBufferANDROID) | VkFlags(ExternalMemoryHandleTypeFlagBits::eHostAllocationEXT) | VkFlags(ExternalMemoryHandleTypeFlagBits::eHostMappedForeignMemoryEXT) + }; + }; + + using ExternalMemoryHandleTypeFlagsKHR = ExternalMemoryHandleTypeFlags; + + struct PhysicalDeviceExternalImageFormatInfo + { + PhysicalDeviceExternalImageFormatInfo( ExternalMemoryHandleTypeFlagBits handleType_ = ExternalMemoryHandleTypeFlagBits::eOpaqueFd ) + : handleType( handleType_ ) + { + } + + PhysicalDeviceExternalImageFormatInfo( VkPhysicalDeviceExternalImageFormatInfo const & rhs ) + { + memcpy( this, &rhs, sizeof( PhysicalDeviceExternalImageFormatInfo ) ); + } + + PhysicalDeviceExternalImageFormatInfo& operator=( VkPhysicalDeviceExternalImageFormatInfo const & rhs ) + { + memcpy( this, &rhs, sizeof( PhysicalDeviceExternalImageFormatInfo ) ); + return *this; + } + PhysicalDeviceExternalImageFormatInfo& setPNext( const void* pNext_ ) + { + pNext = pNext_; + return *this; + } + + PhysicalDeviceExternalImageFormatInfo& setHandleType( ExternalMemoryHandleTypeFlagBits handleType_ ) + { + handleType = handleType_; + return *this; + } + + operator VkPhysicalDeviceExternalImageFormatInfo const&() const + { + return *reinterpret_cast(this); + } + + operator VkPhysicalDeviceExternalImageFormatInfo &() + { + return *reinterpret_cast(this); + } + + bool operator==( PhysicalDeviceExternalImageFormatInfo const& rhs ) const + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( handleType == rhs.handleType ); + } + + bool operator!=( PhysicalDeviceExternalImageFormatInfo const& rhs ) const + { + return !operator==( rhs ); + } + + private: + StructureType sType = StructureType::ePhysicalDeviceExternalImageFormatInfo; + + public: + const void* pNext = nullptr; + ExternalMemoryHandleTypeFlagBits handleType; + }; + static_assert( sizeof( PhysicalDeviceExternalImageFormatInfo ) == sizeof( VkPhysicalDeviceExternalImageFormatInfo ), "struct and wrapper have different size!" ); + + using PhysicalDeviceExternalImageFormatInfoKHR = PhysicalDeviceExternalImageFormatInfo; + + struct PhysicalDeviceExternalBufferInfo + { + PhysicalDeviceExternalBufferInfo( BufferCreateFlags flags_ = BufferCreateFlags(), + BufferUsageFlags usage_ = BufferUsageFlags(), + ExternalMemoryHandleTypeFlagBits handleType_ = ExternalMemoryHandleTypeFlagBits::eOpaqueFd ) + : flags( flags_ ) + , usage( usage_ ) + , handleType( handleType_ ) + { + } + + PhysicalDeviceExternalBufferInfo( VkPhysicalDeviceExternalBufferInfo const & rhs ) + { + memcpy( this, &rhs, sizeof( PhysicalDeviceExternalBufferInfo ) ); + } + + PhysicalDeviceExternalBufferInfo& operator=( VkPhysicalDeviceExternalBufferInfo const & rhs ) + { + memcpy( this, &rhs, sizeof( PhysicalDeviceExternalBufferInfo ) ); + return *this; + } + PhysicalDeviceExternalBufferInfo& setPNext( const void* pNext_ ) + { + pNext = pNext_; + return *this; + } + + PhysicalDeviceExternalBufferInfo& setFlags( BufferCreateFlags flags_ ) + { + flags = flags_; + return *this; + } + + PhysicalDeviceExternalBufferInfo& setUsage( BufferUsageFlags usage_ ) + { + usage = usage_; + return *this; + } + + PhysicalDeviceExternalBufferInfo& setHandleType( ExternalMemoryHandleTypeFlagBits handleType_ ) + { + handleType = handleType_; + return *this; + } + + operator VkPhysicalDeviceExternalBufferInfo const&() const + { + return *reinterpret_cast(this); + } + + operator VkPhysicalDeviceExternalBufferInfo &() + { + return *reinterpret_cast(this); + } + + bool operator==( PhysicalDeviceExternalBufferInfo const& rhs ) const + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( flags == rhs.flags ) + && ( usage == rhs.usage ) + && ( handleType == rhs.handleType ); + } + + bool operator!=( PhysicalDeviceExternalBufferInfo const& rhs ) const + { + return !operator==( rhs ); + } + + private: + StructureType sType = StructureType::ePhysicalDeviceExternalBufferInfo; + + public: + const void* pNext = nullptr; + BufferCreateFlags flags; + BufferUsageFlags usage; + ExternalMemoryHandleTypeFlagBits handleType; + }; + static_assert( sizeof( PhysicalDeviceExternalBufferInfo ) == sizeof( VkPhysicalDeviceExternalBufferInfo ), "struct and wrapper have different size!" ); + + using PhysicalDeviceExternalBufferInfoKHR = PhysicalDeviceExternalBufferInfo; + + struct ExternalMemoryImageCreateInfo + { + ExternalMemoryImageCreateInfo( ExternalMemoryHandleTypeFlags handleTypes_ = ExternalMemoryHandleTypeFlags() ) + : handleTypes( handleTypes_ ) + { + } + + ExternalMemoryImageCreateInfo( VkExternalMemoryImageCreateInfo const & rhs ) + { + memcpy( this, &rhs, sizeof( ExternalMemoryImageCreateInfo ) ); + } + + ExternalMemoryImageCreateInfo& operator=( VkExternalMemoryImageCreateInfo const & rhs ) + { + memcpy( this, &rhs, sizeof( ExternalMemoryImageCreateInfo ) ); + return *this; + } + ExternalMemoryImageCreateInfo& setPNext( const void* pNext_ ) + { + pNext = pNext_; + return *this; + } + + ExternalMemoryImageCreateInfo& setHandleTypes( ExternalMemoryHandleTypeFlags handleTypes_ ) + { + handleTypes = handleTypes_; + return *this; + } + + operator VkExternalMemoryImageCreateInfo const&() const + { + return *reinterpret_cast(this); + } + + operator VkExternalMemoryImageCreateInfo &() + { + return *reinterpret_cast(this); + } + + bool operator==( ExternalMemoryImageCreateInfo const& rhs ) const + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( handleTypes == rhs.handleTypes ); + } + + bool operator!=( ExternalMemoryImageCreateInfo const& rhs ) const + { + return !operator==( rhs ); + } + + private: + StructureType sType = StructureType::eExternalMemoryImageCreateInfo; + + public: + const void* pNext = nullptr; + ExternalMemoryHandleTypeFlags handleTypes; + }; + static_assert( sizeof( ExternalMemoryImageCreateInfo ) == sizeof( VkExternalMemoryImageCreateInfo ), "struct and wrapper have different size!" ); + + using ExternalMemoryImageCreateInfoKHR = ExternalMemoryImageCreateInfo; + + struct ExternalMemoryBufferCreateInfo + { + ExternalMemoryBufferCreateInfo( ExternalMemoryHandleTypeFlags handleTypes_ = ExternalMemoryHandleTypeFlags() ) + : handleTypes( handleTypes_ ) + { + } + + ExternalMemoryBufferCreateInfo( VkExternalMemoryBufferCreateInfo const & rhs ) + { + memcpy( this, &rhs, sizeof( ExternalMemoryBufferCreateInfo ) ); + } + + ExternalMemoryBufferCreateInfo& operator=( VkExternalMemoryBufferCreateInfo const & rhs ) + { + memcpy( this, &rhs, sizeof( ExternalMemoryBufferCreateInfo ) ); + return *this; + } + ExternalMemoryBufferCreateInfo& setPNext( const void* pNext_ ) + { + pNext = pNext_; + return *this; + } + + ExternalMemoryBufferCreateInfo& setHandleTypes( ExternalMemoryHandleTypeFlags handleTypes_ ) + { + handleTypes = handleTypes_; + return *this; + } + + operator VkExternalMemoryBufferCreateInfo const&() const + { + return *reinterpret_cast(this); + } + + operator VkExternalMemoryBufferCreateInfo &() + { + return *reinterpret_cast(this); + } + + bool operator==( ExternalMemoryBufferCreateInfo const& rhs ) const + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( handleTypes == rhs.handleTypes ); + } + + bool operator!=( ExternalMemoryBufferCreateInfo const& rhs ) const + { + return !operator==( rhs ); + } + + private: + StructureType sType = StructureType::eExternalMemoryBufferCreateInfo; + + public: + const void* pNext = nullptr; + ExternalMemoryHandleTypeFlags handleTypes; + }; + static_assert( sizeof( ExternalMemoryBufferCreateInfo ) == sizeof( VkExternalMemoryBufferCreateInfo ), "struct and wrapper have different size!" ); + + using ExternalMemoryBufferCreateInfoKHR = ExternalMemoryBufferCreateInfo; + + struct ExportMemoryAllocateInfo + { + ExportMemoryAllocateInfo( ExternalMemoryHandleTypeFlags handleTypes_ = ExternalMemoryHandleTypeFlags() ) + : handleTypes( handleTypes_ ) + { + } + + ExportMemoryAllocateInfo( VkExportMemoryAllocateInfo const & rhs ) + { + memcpy( this, &rhs, sizeof( ExportMemoryAllocateInfo ) ); + } + + ExportMemoryAllocateInfo& operator=( VkExportMemoryAllocateInfo const & rhs ) + { + memcpy( this, &rhs, sizeof( ExportMemoryAllocateInfo ) ); + return *this; + } + ExportMemoryAllocateInfo& setPNext( const void* pNext_ ) + { + pNext = pNext_; + return *this; + } + + ExportMemoryAllocateInfo& setHandleTypes( ExternalMemoryHandleTypeFlags handleTypes_ ) + { + handleTypes = handleTypes_; + return *this; + } + + operator VkExportMemoryAllocateInfo const&() const + { + return *reinterpret_cast(this); + } + + operator VkExportMemoryAllocateInfo &() + { + return *reinterpret_cast(this); + } + + bool operator==( ExportMemoryAllocateInfo const& rhs ) const + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( handleTypes == rhs.handleTypes ); + } + + bool operator!=( ExportMemoryAllocateInfo const& rhs ) const + { + return !operator==( rhs ); + } + + private: + StructureType sType = StructureType::eExportMemoryAllocateInfo; + + public: + const void* pNext = nullptr; + ExternalMemoryHandleTypeFlags handleTypes; + }; + static_assert( sizeof( ExportMemoryAllocateInfo ) == sizeof( VkExportMemoryAllocateInfo ), "struct and wrapper have different size!" ); + + using ExportMemoryAllocateInfoKHR = ExportMemoryAllocateInfo; + +#ifdef VK_USE_PLATFORM_WIN32_KHR + struct ImportMemoryWin32HandleInfoKHR + { + ImportMemoryWin32HandleInfoKHR( ExternalMemoryHandleTypeFlagBits handleType_ = ExternalMemoryHandleTypeFlagBits::eOpaqueFd, + HANDLE handle_ = 0, + LPCWSTR name_ = 0 ) + : handleType( handleType_ ) + , handle( handle_ ) + , name( name_ ) + { + } + + ImportMemoryWin32HandleInfoKHR( VkImportMemoryWin32HandleInfoKHR const & rhs ) + { + memcpy( this, &rhs, sizeof( ImportMemoryWin32HandleInfoKHR ) ); + } + + ImportMemoryWin32HandleInfoKHR& operator=( VkImportMemoryWin32HandleInfoKHR const & rhs ) + { + memcpy( this, &rhs, sizeof( ImportMemoryWin32HandleInfoKHR ) ); + return *this; + } + ImportMemoryWin32HandleInfoKHR& setPNext( const void* pNext_ ) + { + pNext = pNext_; + return *this; + } + + ImportMemoryWin32HandleInfoKHR& setHandleType( ExternalMemoryHandleTypeFlagBits handleType_ ) + { + handleType = handleType_; + return *this; + } + + ImportMemoryWin32HandleInfoKHR& setHandle( HANDLE handle_ ) + { + handle = handle_; + return *this; + } + + ImportMemoryWin32HandleInfoKHR& setName( LPCWSTR name_ ) + { + name = name_; + return *this; + } + + operator VkImportMemoryWin32HandleInfoKHR const&() const + { + return *reinterpret_cast(this); + } + + operator VkImportMemoryWin32HandleInfoKHR &() + { + return *reinterpret_cast(this); + } + + bool operator==( ImportMemoryWin32HandleInfoKHR const& rhs ) const + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( handleType == rhs.handleType ) + && ( handle == rhs.handle ) + && ( name == rhs.name ); + } + + bool operator!=( ImportMemoryWin32HandleInfoKHR const& rhs ) const + { + return !operator==( rhs ); + } + + private: + StructureType sType = StructureType::eImportMemoryWin32HandleInfoKHR; + + public: + const void* pNext = nullptr; + ExternalMemoryHandleTypeFlagBits handleType; + HANDLE handle; + LPCWSTR name; + }; + static_assert( sizeof( ImportMemoryWin32HandleInfoKHR ) == sizeof( VkImportMemoryWin32HandleInfoKHR ), "struct and wrapper have different size!" ); +#endif /*VK_USE_PLATFORM_WIN32_KHR*/ + +#ifdef VK_USE_PLATFORM_WIN32_KHR + struct MemoryGetWin32HandleInfoKHR + { + MemoryGetWin32HandleInfoKHR( DeviceMemory memory_ = DeviceMemory(), + ExternalMemoryHandleTypeFlagBits handleType_ = ExternalMemoryHandleTypeFlagBits::eOpaqueFd ) + : memory( memory_ ) + , handleType( handleType_ ) + { + } + + MemoryGetWin32HandleInfoKHR( VkMemoryGetWin32HandleInfoKHR const & rhs ) + { + memcpy( this, &rhs, sizeof( MemoryGetWin32HandleInfoKHR ) ); + } + + MemoryGetWin32HandleInfoKHR& operator=( VkMemoryGetWin32HandleInfoKHR const & rhs ) + { + memcpy( this, &rhs, sizeof( MemoryGetWin32HandleInfoKHR ) ); + return *this; + } + MemoryGetWin32HandleInfoKHR& setPNext( const void* pNext_ ) + { + pNext = pNext_; + return *this; + } + + MemoryGetWin32HandleInfoKHR& setMemory( DeviceMemory memory_ ) + { + memory = memory_; + return *this; + } + + MemoryGetWin32HandleInfoKHR& setHandleType( ExternalMemoryHandleTypeFlagBits handleType_ ) + { + handleType = handleType_; + return *this; + } + + operator VkMemoryGetWin32HandleInfoKHR const&() const + { + return *reinterpret_cast(this); + } + + operator VkMemoryGetWin32HandleInfoKHR &() + { + return *reinterpret_cast(this); + } + + bool operator==( MemoryGetWin32HandleInfoKHR const& rhs ) const + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( memory == rhs.memory ) + && ( handleType == rhs.handleType ); + } + + bool operator!=( MemoryGetWin32HandleInfoKHR const& rhs ) const + { + return !operator==( rhs ); + } + + private: + StructureType sType = StructureType::eMemoryGetWin32HandleInfoKHR; + + public: + const void* pNext = nullptr; + DeviceMemory memory; + ExternalMemoryHandleTypeFlagBits handleType; + }; + static_assert( sizeof( MemoryGetWin32HandleInfoKHR ) == sizeof( VkMemoryGetWin32HandleInfoKHR ), "struct and wrapper have different size!" ); +#endif /*VK_USE_PLATFORM_WIN32_KHR*/ + + struct ImportMemoryFdInfoKHR + { + ImportMemoryFdInfoKHR( ExternalMemoryHandleTypeFlagBits handleType_ = ExternalMemoryHandleTypeFlagBits::eOpaqueFd, + int fd_ = 0 ) + : handleType( handleType_ ) + , fd( fd_ ) + { + } + + ImportMemoryFdInfoKHR( VkImportMemoryFdInfoKHR const & rhs ) + { + memcpy( this, &rhs, sizeof( ImportMemoryFdInfoKHR ) ); + } + + ImportMemoryFdInfoKHR& operator=( VkImportMemoryFdInfoKHR const & rhs ) + { + memcpy( this, &rhs, sizeof( ImportMemoryFdInfoKHR ) ); + return *this; + } + ImportMemoryFdInfoKHR& setPNext( const void* pNext_ ) + { + pNext = pNext_; + return *this; + } + + ImportMemoryFdInfoKHR& setHandleType( ExternalMemoryHandleTypeFlagBits handleType_ ) + { + handleType = handleType_; + return *this; + } + + ImportMemoryFdInfoKHR& setFd( int fd_ ) + { + fd = fd_; + return *this; + } + + operator VkImportMemoryFdInfoKHR const&() const + { + return *reinterpret_cast(this); + } + + operator VkImportMemoryFdInfoKHR &() + { + return *reinterpret_cast(this); + } + + bool operator==( ImportMemoryFdInfoKHR const& rhs ) const + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( handleType == rhs.handleType ) + && ( fd == rhs.fd ); + } + + bool operator!=( ImportMemoryFdInfoKHR const& rhs ) const + { + return !operator==( rhs ); + } + + private: + StructureType sType = StructureType::eImportMemoryFdInfoKHR; + + public: + const void* pNext = nullptr; + ExternalMemoryHandleTypeFlagBits handleType; + int fd; + }; + static_assert( sizeof( ImportMemoryFdInfoKHR ) == sizeof( VkImportMemoryFdInfoKHR ), "struct and wrapper have different size!" ); + + struct MemoryGetFdInfoKHR + { + MemoryGetFdInfoKHR( DeviceMemory memory_ = DeviceMemory(), + ExternalMemoryHandleTypeFlagBits handleType_ = ExternalMemoryHandleTypeFlagBits::eOpaqueFd ) + : memory( memory_ ) + , handleType( handleType_ ) + { + } + + MemoryGetFdInfoKHR( VkMemoryGetFdInfoKHR const & rhs ) + { + memcpy( this, &rhs, sizeof( MemoryGetFdInfoKHR ) ); + } + + MemoryGetFdInfoKHR& operator=( VkMemoryGetFdInfoKHR const & rhs ) + { + memcpy( this, &rhs, sizeof( MemoryGetFdInfoKHR ) ); + return *this; + } + MemoryGetFdInfoKHR& setPNext( const void* pNext_ ) + { + pNext = pNext_; + return *this; + } + + MemoryGetFdInfoKHR& setMemory( DeviceMemory memory_ ) + { + memory = memory_; + return *this; + } + + MemoryGetFdInfoKHR& setHandleType( ExternalMemoryHandleTypeFlagBits handleType_ ) + { + handleType = handleType_; + return *this; + } + + operator VkMemoryGetFdInfoKHR const&() const + { + return *reinterpret_cast(this); + } + + operator VkMemoryGetFdInfoKHR &() + { + return *reinterpret_cast(this); + } + + bool operator==( MemoryGetFdInfoKHR const& rhs ) const + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( memory == rhs.memory ) + && ( handleType == rhs.handleType ); + } + + bool operator!=( MemoryGetFdInfoKHR const& rhs ) const + { + return !operator==( rhs ); + } + + private: + StructureType sType = StructureType::eMemoryGetFdInfoKHR; + + public: + const void* pNext = nullptr; + DeviceMemory memory; + ExternalMemoryHandleTypeFlagBits handleType; + }; + static_assert( sizeof( MemoryGetFdInfoKHR ) == sizeof( VkMemoryGetFdInfoKHR ), "struct and wrapper have different size!" ); + + struct ImportMemoryHostPointerInfoEXT + { + ImportMemoryHostPointerInfoEXT( ExternalMemoryHandleTypeFlagBits handleType_ = ExternalMemoryHandleTypeFlagBits::eOpaqueFd, + void* pHostPointer_ = nullptr ) + : handleType( handleType_ ) + , pHostPointer( pHostPointer_ ) + { + } + + ImportMemoryHostPointerInfoEXT( VkImportMemoryHostPointerInfoEXT const & rhs ) + { + memcpy( this, &rhs, sizeof( ImportMemoryHostPointerInfoEXT ) ); + } + + ImportMemoryHostPointerInfoEXT& operator=( VkImportMemoryHostPointerInfoEXT const & rhs ) + { + memcpy( this, &rhs, sizeof( ImportMemoryHostPointerInfoEXT ) ); + return *this; + } + ImportMemoryHostPointerInfoEXT& setPNext( const void* pNext_ ) + { + pNext = pNext_; + return *this; + } + + ImportMemoryHostPointerInfoEXT& setHandleType( ExternalMemoryHandleTypeFlagBits handleType_ ) + { + handleType = handleType_; + return *this; + } + + ImportMemoryHostPointerInfoEXT& setPHostPointer( void* pHostPointer_ ) + { + pHostPointer = pHostPointer_; + return *this; + } + + operator VkImportMemoryHostPointerInfoEXT const&() const + { + return *reinterpret_cast(this); + } + + operator VkImportMemoryHostPointerInfoEXT &() + { + return *reinterpret_cast(this); + } + + bool operator==( ImportMemoryHostPointerInfoEXT const& rhs ) const + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( handleType == rhs.handleType ) + && ( pHostPointer == rhs.pHostPointer ); + } + + bool operator!=( ImportMemoryHostPointerInfoEXT const& rhs ) const + { + return !operator==( rhs ); + } + + private: + StructureType sType = StructureType::eImportMemoryHostPointerInfoEXT; + + public: + const void* pNext = nullptr; + ExternalMemoryHandleTypeFlagBits handleType; + void* pHostPointer; + }; + static_assert( sizeof( ImportMemoryHostPointerInfoEXT ) == sizeof( VkImportMemoryHostPointerInfoEXT ), "struct and wrapper have different size!" ); + + enum class ExternalMemoryFeatureFlagBits + { + eDedicatedOnly = VK_EXTERNAL_MEMORY_FEATURE_DEDICATED_ONLY_BIT, + eDedicatedOnlyKHR = VK_EXTERNAL_MEMORY_FEATURE_DEDICATED_ONLY_BIT, + eExportable = VK_EXTERNAL_MEMORY_FEATURE_EXPORTABLE_BIT, + eExportableKHR = VK_EXTERNAL_MEMORY_FEATURE_EXPORTABLE_BIT, + eImportable = VK_EXTERNAL_MEMORY_FEATURE_IMPORTABLE_BIT, + eImportableKHR = VK_EXTERNAL_MEMORY_FEATURE_IMPORTABLE_BIT + }; + + using ExternalMemoryFeatureFlags = Flags; + + VULKAN_HPP_INLINE ExternalMemoryFeatureFlags operator|( ExternalMemoryFeatureFlagBits bit0, ExternalMemoryFeatureFlagBits bit1 ) + { + return ExternalMemoryFeatureFlags( bit0 ) | bit1; + } + + VULKAN_HPP_INLINE ExternalMemoryFeatureFlags operator~( ExternalMemoryFeatureFlagBits bits ) + { + return ~( ExternalMemoryFeatureFlags( bits ) ); + } + + template <> struct FlagTraits + { + enum + { + allFlags = VkFlags(ExternalMemoryFeatureFlagBits::eDedicatedOnly) | VkFlags(ExternalMemoryFeatureFlagBits::eExportable) | VkFlags(ExternalMemoryFeatureFlagBits::eImportable) + }; + }; + + using ExternalMemoryFeatureFlagsKHR = ExternalMemoryFeatureFlags; + + struct ExternalMemoryProperties + { + operator VkExternalMemoryProperties const&() const + { + return *reinterpret_cast(this); + } + + operator VkExternalMemoryProperties &() + { + return *reinterpret_cast(this); + } + + bool operator==( ExternalMemoryProperties const& rhs ) const + { + return ( externalMemoryFeatures == rhs.externalMemoryFeatures ) + && ( exportFromImportedHandleTypes == rhs.exportFromImportedHandleTypes ) + && ( compatibleHandleTypes == rhs.compatibleHandleTypes ); + } + + bool operator!=( ExternalMemoryProperties const& rhs ) const + { + return !operator==( rhs ); + } + + ExternalMemoryFeatureFlags externalMemoryFeatures; + ExternalMemoryHandleTypeFlags exportFromImportedHandleTypes; + ExternalMemoryHandleTypeFlags compatibleHandleTypes; + }; + static_assert( sizeof( ExternalMemoryProperties ) == sizeof( VkExternalMemoryProperties ), "struct and wrapper have different size!" ); + + using ExternalMemoryPropertiesKHR = ExternalMemoryProperties; + + struct ExternalImageFormatProperties + { + operator VkExternalImageFormatProperties const&() const + { + return *reinterpret_cast(this); + } + + operator VkExternalImageFormatProperties &() + { + return *reinterpret_cast(this); + } + + bool operator==( ExternalImageFormatProperties const& rhs ) const + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( externalMemoryProperties == rhs.externalMemoryProperties ); + } + + bool operator!=( ExternalImageFormatProperties const& rhs ) const + { + return !operator==( rhs ); + } + + private: + StructureType sType = StructureType::eExternalImageFormatProperties; + + public: + void* pNext = nullptr; + ExternalMemoryProperties externalMemoryProperties; + }; + static_assert( sizeof( ExternalImageFormatProperties ) == sizeof( VkExternalImageFormatProperties ), "struct and wrapper have different size!" ); + + using ExternalImageFormatPropertiesKHR = ExternalImageFormatProperties; + + struct ExternalBufferProperties + { + operator VkExternalBufferProperties const&() const + { + return *reinterpret_cast(this); + } + + operator VkExternalBufferProperties &() + { + return *reinterpret_cast(this); + } + + bool operator==( ExternalBufferProperties const& rhs ) const + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( externalMemoryProperties == rhs.externalMemoryProperties ); + } + + bool operator!=( ExternalBufferProperties const& rhs ) const + { + return !operator==( rhs ); + } + + private: + StructureType sType = StructureType::eExternalBufferProperties; + + public: + void* pNext = nullptr; + ExternalMemoryProperties externalMemoryProperties; + }; + static_assert( sizeof( ExternalBufferProperties ) == sizeof( VkExternalBufferProperties ), "struct and wrapper have different size!" ); + + using ExternalBufferPropertiesKHR = ExternalBufferProperties; + + enum class ExternalSemaphoreHandleTypeFlagBits + { + eOpaqueFd = VK_EXTERNAL_SEMAPHORE_HANDLE_TYPE_OPAQUE_FD_BIT, + eOpaqueFdKHR = VK_EXTERNAL_SEMAPHORE_HANDLE_TYPE_OPAQUE_FD_BIT, + eOpaqueWin32 = VK_EXTERNAL_SEMAPHORE_HANDLE_TYPE_OPAQUE_WIN32_BIT, + eOpaqueWin32KHR = VK_EXTERNAL_SEMAPHORE_HANDLE_TYPE_OPAQUE_WIN32_BIT, + eOpaqueWin32Kmt = VK_EXTERNAL_SEMAPHORE_HANDLE_TYPE_OPAQUE_WIN32_KMT_BIT, + eOpaqueWin32KmtKHR = VK_EXTERNAL_SEMAPHORE_HANDLE_TYPE_OPAQUE_WIN32_KMT_BIT, + eD3D12Fence = VK_EXTERNAL_SEMAPHORE_HANDLE_TYPE_D3D12_FENCE_BIT, + eD3D12FenceKHR = VK_EXTERNAL_SEMAPHORE_HANDLE_TYPE_D3D12_FENCE_BIT, + eSyncFd = VK_EXTERNAL_SEMAPHORE_HANDLE_TYPE_SYNC_FD_BIT, + eSyncFdKHR = VK_EXTERNAL_SEMAPHORE_HANDLE_TYPE_SYNC_FD_BIT + }; + + using ExternalSemaphoreHandleTypeFlags = Flags; + + VULKAN_HPP_INLINE ExternalSemaphoreHandleTypeFlags operator|( ExternalSemaphoreHandleTypeFlagBits bit0, ExternalSemaphoreHandleTypeFlagBits bit1 ) + { + return ExternalSemaphoreHandleTypeFlags( bit0 ) | bit1; + } + + VULKAN_HPP_INLINE ExternalSemaphoreHandleTypeFlags operator~( ExternalSemaphoreHandleTypeFlagBits bits ) + { + return ~( ExternalSemaphoreHandleTypeFlags( bits ) ); + } + + template <> struct FlagTraits + { + enum + { + allFlags = VkFlags(ExternalSemaphoreHandleTypeFlagBits::eOpaqueFd) | VkFlags(ExternalSemaphoreHandleTypeFlagBits::eOpaqueWin32) | VkFlags(ExternalSemaphoreHandleTypeFlagBits::eOpaqueWin32Kmt) | VkFlags(ExternalSemaphoreHandleTypeFlagBits::eD3D12Fence) | VkFlags(ExternalSemaphoreHandleTypeFlagBits::eSyncFd) + }; + }; + + using ExternalSemaphoreHandleTypeFlagsKHR = ExternalSemaphoreHandleTypeFlags; + + struct PhysicalDeviceExternalSemaphoreInfo + { + PhysicalDeviceExternalSemaphoreInfo( ExternalSemaphoreHandleTypeFlagBits handleType_ = ExternalSemaphoreHandleTypeFlagBits::eOpaqueFd ) + : handleType( handleType_ ) + { + } + + PhysicalDeviceExternalSemaphoreInfo( VkPhysicalDeviceExternalSemaphoreInfo const & rhs ) + { + memcpy( this, &rhs, sizeof( PhysicalDeviceExternalSemaphoreInfo ) ); + } + + PhysicalDeviceExternalSemaphoreInfo& operator=( VkPhysicalDeviceExternalSemaphoreInfo const & rhs ) + { + memcpy( this, &rhs, sizeof( PhysicalDeviceExternalSemaphoreInfo ) ); + return *this; + } + PhysicalDeviceExternalSemaphoreInfo& setPNext( const void* pNext_ ) + { + pNext = pNext_; + return *this; + } + + PhysicalDeviceExternalSemaphoreInfo& setHandleType( ExternalSemaphoreHandleTypeFlagBits handleType_ ) + { + handleType = handleType_; + return *this; + } + + operator VkPhysicalDeviceExternalSemaphoreInfo const&() const + { + return *reinterpret_cast(this); + } + + operator VkPhysicalDeviceExternalSemaphoreInfo &() + { + return *reinterpret_cast(this); + } + + bool operator==( PhysicalDeviceExternalSemaphoreInfo const& rhs ) const + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( handleType == rhs.handleType ); + } + + bool operator!=( PhysicalDeviceExternalSemaphoreInfo const& rhs ) const + { + return !operator==( rhs ); + } + + private: + StructureType sType = StructureType::ePhysicalDeviceExternalSemaphoreInfo; + + public: + const void* pNext = nullptr; + ExternalSemaphoreHandleTypeFlagBits handleType; + }; + static_assert( sizeof( PhysicalDeviceExternalSemaphoreInfo ) == sizeof( VkPhysicalDeviceExternalSemaphoreInfo ), "struct and wrapper have different size!" ); + + using PhysicalDeviceExternalSemaphoreInfoKHR = PhysicalDeviceExternalSemaphoreInfo; + + struct ExportSemaphoreCreateInfo + { + ExportSemaphoreCreateInfo( ExternalSemaphoreHandleTypeFlags handleTypes_ = ExternalSemaphoreHandleTypeFlags() ) + : handleTypes( handleTypes_ ) + { + } + + ExportSemaphoreCreateInfo( VkExportSemaphoreCreateInfo const & rhs ) + { + memcpy( this, &rhs, sizeof( ExportSemaphoreCreateInfo ) ); + } + + ExportSemaphoreCreateInfo& operator=( VkExportSemaphoreCreateInfo const & rhs ) + { + memcpy( this, &rhs, sizeof( ExportSemaphoreCreateInfo ) ); + return *this; + } + ExportSemaphoreCreateInfo& setPNext( const void* pNext_ ) + { + pNext = pNext_; + return *this; + } + + ExportSemaphoreCreateInfo& setHandleTypes( ExternalSemaphoreHandleTypeFlags handleTypes_ ) + { + handleTypes = handleTypes_; + return *this; + } + + operator VkExportSemaphoreCreateInfo const&() const + { + return *reinterpret_cast(this); + } + + operator VkExportSemaphoreCreateInfo &() + { + return *reinterpret_cast(this); + } + + bool operator==( ExportSemaphoreCreateInfo const& rhs ) const + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( handleTypes == rhs.handleTypes ); + } + + bool operator!=( ExportSemaphoreCreateInfo const& rhs ) const + { + return !operator==( rhs ); + } + + private: + StructureType sType = StructureType::eExportSemaphoreCreateInfo; + + public: + const void* pNext = nullptr; + ExternalSemaphoreHandleTypeFlags handleTypes; + }; + static_assert( sizeof( ExportSemaphoreCreateInfo ) == sizeof( VkExportSemaphoreCreateInfo ), "struct and wrapper have different size!" ); + + using ExportSemaphoreCreateInfoKHR = ExportSemaphoreCreateInfo; + +#ifdef VK_USE_PLATFORM_WIN32_KHR + struct SemaphoreGetWin32HandleInfoKHR + { + SemaphoreGetWin32HandleInfoKHR( Semaphore semaphore_ = Semaphore(), + ExternalSemaphoreHandleTypeFlagBits handleType_ = ExternalSemaphoreHandleTypeFlagBits::eOpaqueFd ) + : semaphore( semaphore_ ) + , handleType( handleType_ ) + { + } + + SemaphoreGetWin32HandleInfoKHR( VkSemaphoreGetWin32HandleInfoKHR const & rhs ) + { + memcpy( this, &rhs, sizeof( SemaphoreGetWin32HandleInfoKHR ) ); + } + + SemaphoreGetWin32HandleInfoKHR& operator=( VkSemaphoreGetWin32HandleInfoKHR const & rhs ) + { + memcpy( this, &rhs, sizeof( SemaphoreGetWin32HandleInfoKHR ) ); + return *this; + } + SemaphoreGetWin32HandleInfoKHR& setPNext( const void* pNext_ ) + { + pNext = pNext_; + return *this; + } + + SemaphoreGetWin32HandleInfoKHR& setSemaphore( Semaphore semaphore_ ) + { + semaphore = semaphore_; + return *this; + } + + SemaphoreGetWin32HandleInfoKHR& setHandleType( ExternalSemaphoreHandleTypeFlagBits handleType_ ) + { + handleType = handleType_; + return *this; + } + + operator VkSemaphoreGetWin32HandleInfoKHR const&() const + { + return *reinterpret_cast(this); + } + + operator VkSemaphoreGetWin32HandleInfoKHR &() + { + return *reinterpret_cast(this); + } + + bool operator==( SemaphoreGetWin32HandleInfoKHR const& rhs ) const + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( semaphore == rhs.semaphore ) + && ( handleType == rhs.handleType ); + } + + bool operator!=( SemaphoreGetWin32HandleInfoKHR const& rhs ) const + { + return !operator==( rhs ); + } + + private: + StructureType sType = StructureType::eSemaphoreGetWin32HandleInfoKHR; + + public: + const void* pNext = nullptr; + Semaphore semaphore; + ExternalSemaphoreHandleTypeFlagBits handleType; + }; + static_assert( sizeof( SemaphoreGetWin32HandleInfoKHR ) == sizeof( VkSemaphoreGetWin32HandleInfoKHR ), "struct and wrapper have different size!" ); +#endif /*VK_USE_PLATFORM_WIN32_KHR*/ + + struct SemaphoreGetFdInfoKHR + { + SemaphoreGetFdInfoKHR( Semaphore semaphore_ = Semaphore(), + ExternalSemaphoreHandleTypeFlagBits handleType_ = ExternalSemaphoreHandleTypeFlagBits::eOpaqueFd ) + : semaphore( semaphore_ ) + , handleType( handleType_ ) + { + } + + SemaphoreGetFdInfoKHR( VkSemaphoreGetFdInfoKHR const & rhs ) + { + memcpy( this, &rhs, sizeof( SemaphoreGetFdInfoKHR ) ); + } + + SemaphoreGetFdInfoKHR& operator=( VkSemaphoreGetFdInfoKHR const & rhs ) + { + memcpy( this, &rhs, sizeof( SemaphoreGetFdInfoKHR ) ); + return *this; + } + SemaphoreGetFdInfoKHR& setPNext( const void* pNext_ ) + { + pNext = pNext_; + return *this; + } + + SemaphoreGetFdInfoKHR& setSemaphore( Semaphore semaphore_ ) + { + semaphore = semaphore_; + return *this; + } + + SemaphoreGetFdInfoKHR& setHandleType( ExternalSemaphoreHandleTypeFlagBits handleType_ ) + { + handleType = handleType_; + return *this; + } + + operator VkSemaphoreGetFdInfoKHR const&() const + { + return *reinterpret_cast(this); + } + + operator VkSemaphoreGetFdInfoKHR &() + { + return *reinterpret_cast(this); + } + + bool operator==( SemaphoreGetFdInfoKHR const& rhs ) const + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( semaphore == rhs.semaphore ) + && ( handleType == rhs.handleType ); + } + + bool operator!=( SemaphoreGetFdInfoKHR const& rhs ) const + { + return !operator==( rhs ); + } + + private: + StructureType sType = StructureType::eSemaphoreGetFdInfoKHR; + + public: + const void* pNext = nullptr; + Semaphore semaphore; + ExternalSemaphoreHandleTypeFlagBits handleType; + }; + static_assert( sizeof( SemaphoreGetFdInfoKHR ) == sizeof( VkSemaphoreGetFdInfoKHR ), "struct and wrapper have different size!" ); + + enum class ExternalSemaphoreFeatureFlagBits + { + eExportable = VK_EXTERNAL_SEMAPHORE_FEATURE_EXPORTABLE_BIT, + eExportableKHR = VK_EXTERNAL_SEMAPHORE_FEATURE_EXPORTABLE_BIT, + eImportable = VK_EXTERNAL_SEMAPHORE_FEATURE_IMPORTABLE_BIT, + eImportableKHR = VK_EXTERNAL_SEMAPHORE_FEATURE_IMPORTABLE_BIT + }; + + using ExternalSemaphoreFeatureFlags = Flags; + + VULKAN_HPP_INLINE ExternalSemaphoreFeatureFlags operator|( ExternalSemaphoreFeatureFlagBits bit0, ExternalSemaphoreFeatureFlagBits bit1 ) + { + return ExternalSemaphoreFeatureFlags( bit0 ) | bit1; + } + + VULKAN_HPP_INLINE ExternalSemaphoreFeatureFlags operator~( ExternalSemaphoreFeatureFlagBits bits ) + { + return ~( ExternalSemaphoreFeatureFlags( bits ) ); + } + + template <> struct FlagTraits + { + enum + { + allFlags = VkFlags(ExternalSemaphoreFeatureFlagBits::eExportable) | VkFlags(ExternalSemaphoreFeatureFlagBits::eImportable) + }; + }; + + using ExternalSemaphoreFeatureFlagsKHR = ExternalSemaphoreFeatureFlags; + + struct ExternalSemaphoreProperties + { + operator VkExternalSemaphoreProperties const&() const + { + return *reinterpret_cast(this); + } + + operator VkExternalSemaphoreProperties &() + { + return *reinterpret_cast(this); + } + + bool operator==( ExternalSemaphoreProperties const& rhs ) const + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( exportFromImportedHandleTypes == rhs.exportFromImportedHandleTypes ) + && ( compatibleHandleTypes == rhs.compatibleHandleTypes ) + && ( externalSemaphoreFeatures == rhs.externalSemaphoreFeatures ); + } + + bool operator!=( ExternalSemaphoreProperties const& rhs ) const + { + return !operator==( rhs ); + } + + private: + StructureType sType = StructureType::eExternalSemaphoreProperties; + + public: + void* pNext = nullptr; + ExternalSemaphoreHandleTypeFlags exportFromImportedHandleTypes; + ExternalSemaphoreHandleTypeFlags compatibleHandleTypes; + ExternalSemaphoreFeatureFlags externalSemaphoreFeatures; + }; + static_assert( sizeof( ExternalSemaphoreProperties ) == sizeof( VkExternalSemaphoreProperties ), "struct and wrapper have different size!" ); + + using ExternalSemaphorePropertiesKHR = ExternalSemaphoreProperties; + + enum class SemaphoreImportFlagBits + { + eTemporary = VK_SEMAPHORE_IMPORT_TEMPORARY_BIT, + eTemporaryKHR = VK_SEMAPHORE_IMPORT_TEMPORARY_BIT + }; + + using SemaphoreImportFlags = Flags; + + VULKAN_HPP_INLINE SemaphoreImportFlags operator|( SemaphoreImportFlagBits bit0, SemaphoreImportFlagBits bit1 ) + { + return SemaphoreImportFlags( bit0 ) | bit1; + } + + VULKAN_HPP_INLINE SemaphoreImportFlags operator~( SemaphoreImportFlagBits bits ) + { + return ~( SemaphoreImportFlags( bits ) ); + } + + template <> struct FlagTraits + { + enum + { + allFlags = VkFlags(SemaphoreImportFlagBits::eTemporary) + }; + }; + + using SemaphoreImportFlagsKHR = SemaphoreImportFlags; + +#ifdef VK_USE_PLATFORM_WIN32_KHR + struct ImportSemaphoreWin32HandleInfoKHR + { + ImportSemaphoreWin32HandleInfoKHR( Semaphore semaphore_ = Semaphore(), + SemaphoreImportFlags flags_ = SemaphoreImportFlags(), + ExternalSemaphoreHandleTypeFlagBits handleType_ = ExternalSemaphoreHandleTypeFlagBits::eOpaqueFd, + HANDLE handle_ = 0, + LPCWSTR name_ = 0 ) + : semaphore( semaphore_ ) + , flags( flags_ ) + , handleType( handleType_ ) + , handle( handle_ ) + , name( name_ ) + { + } + + ImportSemaphoreWin32HandleInfoKHR( VkImportSemaphoreWin32HandleInfoKHR const & rhs ) + { + memcpy( this, &rhs, sizeof( ImportSemaphoreWin32HandleInfoKHR ) ); + } + + ImportSemaphoreWin32HandleInfoKHR& operator=( VkImportSemaphoreWin32HandleInfoKHR const & rhs ) + { + memcpy( this, &rhs, sizeof( ImportSemaphoreWin32HandleInfoKHR ) ); + return *this; + } + ImportSemaphoreWin32HandleInfoKHR& setPNext( const void* pNext_ ) + { + pNext = pNext_; + return *this; + } + + ImportSemaphoreWin32HandleInfoKHR& setSemaphore( Semaphore semaphore_ ) + { + semaphore = semaphore_; + return *this; + } + + ImportSemaphoreWin32HandleInfoKHR& setFlags( SemaphoreImportFlags flags_ ) + { + flags = flags_; + return *this; + } + + ImportSemaphoreWin32HandleInfoKHR& setHandleType( ExternalSemaphoreHandleTypeFlagBits handleType_ ) + { + handleType = handleType_; + return *this; + } + + ImportSemaphoreWin32HandleInfoKHR& setHandle( HANDLE handle_ ) + { + handle = handle_; + return *this; + } + + ImportSemaphoreWin32HandleInfoKHR& setName( LPCWSTR name_ ) + { + name = name_; + return *this; + } + + operator VkImportSemaphoreWin32HandleInfoKHR const&() const + { + return *reinterpret_cast(this); + } + + operator VkImportSemaphoreWin32HandleInfoKHR &() + { + return *reinterpret_cast(this); + } + + bool operator==( ImportSemaphoreWin32HandleInfoKHR const& rhs ) const + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( semaphore == rhs.semaphore ) + && ( flags == rhs.flags ) + && ( handleType == rhs.handleType ) + && ( handle == rhs.handle ) + && ( name == rhs.name ); + } + + bool operator!=( ImportSemaphoreWin32HandleInfoKHR const& rhs ) const + { + return !operator==( rhs ); + } + + private: + StructureType sType = StructureType::eImportSemaphoreWin32HandleInfoKHR; + + public: + const void* pNext = nullptr; + Semaphore semaphore; + SemaphoreImportFlags flags; + ExternalSemaphoreHandleTypeFlagBits handleType; + HANDLE handle; + LPCWSTR name; + }; + static_assert( sizeof( ImportSemaphoreWin32HandleInfoKHR ) == sizeof( VkImportSemaphoreWin32HandleInfoKHR ), "struct and wrapper have different size!" ); +#endif /*VK_USE_PLATFORM_WIN32_KHR*/ + + struct ImportSemaphoreFdInfoKHR + { + ImportSemaphoreFdInfoKHR( Semaphore semaphore_ = Semaphore(), + SemaphoreImportFlags flags_ = SemaphoreImportFlags(), + ExternalSemaphoreHandleTypeFlagBits handleType_ = ExternalSemaphoreHandleTypeFlagBits::eOpaqueFd, + int fd_ = 0 ) + : semaphore( semaphore_ ) + , flags( flags_ ) + , handleType( handleType_ ) + , fd( fd_ ) + { + } + + ImportSemaphoreFdInfoKHR( VkImportSemaphoreFdInfoKHR const & rhs ) + { + memcpy( this, &rhs, sizeof( ImportSemaphoreFdInfoKHR ) ); + } + + ImportSemaphoreFdInfoKHR& operator=( VkImportSemaphoreFdInfoKHR const & rhs ) + { + memcpy( this, &rhs, sizeof( ImportSemaphoreFdInfoKHR ) ); + return *this; + } + ImportSemaphoreFdInfoKHR& setPNext( const void* pNext_ ) + { + pNext = pNext_; + return *this; + } + + ImportSemaphoreFdInfoKHR& setSemaphore( Semaphore semaphore_ ) + { + semaphore = semaphore_; + return *this; + } + + ImportSemaphoreFdInfoKHR& setFlags( SemaphoreImportFlags flags_ ) + { + flags = flags_; + return *this; + } + + ImportSemaphoreFdInfoKHR& setHandleType( ExternalSemaphoreHandleTypeFlagBits handleType_ ) + { + handleType = handleType_; + return *this; + } + + ImportSemaphoreFdInfoKHR& setFd( int fd_ ) + { + fd = fd_; + return *this; + } + + operator VkImportSemaphoreFdInfoKHR const&() const + { + return *reinterpret_cast(this); + } + + operator VkImportSemaphoreFdInfoKHR &() + { + return *reinterpret_cast(this); + } + + bool operator==( ImportSemaphoreFdInfoKHR const& rhs ) const + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( semaphore == rhs.semaphore ) + && ( flags == rhs.flags ) + && ( handleType == rhs.handleType ) + && ( fd == rhs.fd ); + } + + bool operator!=( ImportSemaphoreFdInfoKHR const& rhs ) const + { + return !operator==( rhs ); + } + + private: + StructureType sType = StructureType::eImportSemaphoreFdInfoKHR; + + public: + const void* pNext = nullptr; + Semaphore semaphore; + SemaphoreImportFlags flags; + ExternalSemaphoreHandleTypeFlagBits handleType; + int fd; + }; + static_assert( sizeof( ImportSemaphoreFdInfoKHR ) == sizeof( VkImportSemaphoreFdInfoKHR ), "struct and wrapper have different size!" ); + + enum class ExternalFenceHandleTypeFlagBits + { + eOpaqueFd = VK_EXTERNAL_FENCE_HANDLE_TYPE_OPAQUE_FD_BIT, + eOpaqueFdKHR = VK_EXTERNAL_FENCE_HANDLE_TYPE_OPAQUE_FD_BIT, + eOpaqueWin32 = VK_EXTERNAL_FENCE_HANDLE_TYPE_OPAQUE_WIN32_BIT, + eOpaqueWin32KHR = VK_EXTERNAL_FENCE_HANDLE_TYPE_OPAQUE_WIN32_BIT, + eOpaqueWin32Kmt = VK_EXTERNAL_FENCE_HANDLE_TYPE_OPAQUE_WIN32_KMT_BIT, + eOpaqueWin32KmtKHR = VK_EXTERNAL_FENCE_HANDLE_TYPE_OPAQUE_WIN32_KMT_BIT, + eSyncFd = VK_EXTERNAL_FENCE_HANDLE_TYPE_SYNC_FD_BIT, + eSyncFdKHR = VK_EXTERNAL_FENCE_HANDLE_TYPE_SYNC_FD_BIT + }; + + using ExternalFenceHandleTypeFlags = Flags; + + VULKAN_HPP_INLINE ExternalFenceHandleTypeFlags operator|( ExternalFenceHandleTypeFlagBits bit0, ExternalFenceHandleTypeFlagBits bit1 ) + { + return ExternalFenceHandleTypeFlags( bit0 ) | bit1; + } + + VULKAN_HPP_INLINE ExternalFenceHandleTypeFlags operator~( ExternalFenceHandleTypeFlagBits bits ) + { + return ~( ExternalFenceHandleTypeFlags( bits ) ); + } + + template <> struct FlagTraits + { + enum + { + allFlags = VkFlags(ExternalFenceHandleTypeFlagBits::eOpaqueFd) | VkFlags(ExternalFenceHandleTypeFlagBits::eOpaqueWin32) | VkFlags(ExternalFenceHandleTypeFlagBits::eOpaqueWin32Kmt) | VkFlags(ExternalFenceHandleTypeFlagBits::eSyncFd) + }; + }; + + using ExternalFenceHandleTypeFlagsKHR = ExternalFenceHandleTypeFlags; + + struct PhysicalDeviceExternalFenceInfo + { + PhysicalDeviceExternalFenceInfo( ExternalFenceHandleTypeFlagBits handleType_ = ExternalFenceHandleTypeFlagBits::eOpaqueFd ) + : handleType( handleType_ ) + { + } + + PhysicalDeviceExternalFenceInfo( VkPhysicalDeviceExternalFenceInfo const & rhs ) + { + memcpy( this, &rhs, sizeof( PhysicalDeviceExternalFenceInfo ) ); + } + + PhysicalDeviceExternalFenceInfo& operator=( VkPhysicalDeviceExternalFenceInfo const & rhs ) + { + memcpy( this, &rhs, sizeof( PhysicalDeviceExternalFenceInfo ) ); + return *this; + } + PhysicalDeviceExternalFenceInfo& setPNext( const void* pNext_ ) + { + pNext = pNext_; + return *this; + } + + PhysicalDeviceExternalFenceInfo& setHandleType( ExternalFenceHandleTypeFlagBits handleType_ ) + { + handleType = handleType_; + return *this; + } + + operator VkPhysicalDeviceExternalFenceInfo const&() const + { + return *reinterpret_cast(this); + } + + operator VkPhysicalDeviceExternalFenceInfo &() + { + return *reinterpret_cast(this); + } + + bool operator==( PhysicalDeviceExternalFenceInfo const& rhs ) const + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( handleType == rhs.handleType ); + } + + bool operator!=( PhysicalDeviceExternalFenceInfo const& rhs ) const + { + return !operator==( rhs ); + } + + private: + StructureType sType = StructureType::ePhysicalDeviceExternalFenceInfo; + + public: + const void* pNext = nullptr; + ExternalFenceHandleTypeFlagBits handleType; + }; + static_assert( sizeof( PhysicalDeviceExternalFenceInfo ) == sizeof( VkPhysicalDeviceExternalFenceInfo ), "struct and wrapper have different size!" ); + + using PhysicalDeviceExternalFenceInfoKHR = PhysicalDeviceExternalFenceInfo; + + struct ExportFenceCreateInfo + { + ExportFenceCreateInfo( ExternalFenceHandleTypeFlags handleTypes_ = ExternalFenceHandleTypeFlags() ) + : handleTypes( handleTypes_ ) + { + } + + ExportFenceCreateInfo( VkExportFenceCreateInfo const & rhs ) + { + memcpy( this, &rhs, sizeof( ExportFenceCreateInfo ) ); + } + + ExportFenceCreateInfo& operator=( VkExportFenceCreateInfo const & rhs ) + { + memcpy( this, &rhs, sizeof( ExportFenceCreateInfo ) ); + return *this; + } + ExportFenceCreateInfo& setPNext( const void* pNext_ ) + { + pNext = pNext_; + return *this; + } + + ExportFenceCreateInfo& setHandleTypes( ExternalFenceHandleTypeFlags handleTypes_ ) + { + handleTypes = handleTypes_; + return *this; + } + + operator VkExportFenceCreateInfo const&() const + { + return *reinterpret_cast(this); + } + + operator VkExportFenceCreateInfo &() + { + return *reinterpret_cast(this); + } + + bool operator==( ExportFenceCreateInfo const& rhs ) const + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( handleTypes == rhs.handleTypes ); + } + + bool operator!=( ExportFenceCreateInfo const& rhs ) const + { + return !operator==( rhs ); + } + + private: + StructureType sType = StructureType::eExportFenceCreateInfo; + + public: + const void* pNext = nullptr; + ExternalFenceHandleTypeFlags handleTypes; + }; + static_assert( sizeof( ExportFenceCreateInfo ) == sizeof( VkExportFenceCreateInfo ), "struct and wrapper have different size!" ); + + using ExportFenceCreateInfoKHR = ExportFenceCreateInfo; + +#ifdef VK_USE_PLATFORM_WIN32_KHR + struct FenceGetWin32HandleInfoKHR + { + FenceGetWin32HandleInfoKHR( Fence fence_ = Fence(), + ExternalFenceHandleTypeFlagBits handleType_ = ExternalFenceHandleTypeFlagBits::eOpaqueFd ) + : fence( fence_ ) + , handleType( handleType_ ) + { + } + + FenceGetWin32HandleInfoKHR( VkFenceGetWin32HandleInfoKHR const & rhs ) + { + memcpy( this, &rhs, sizeof( FenceGetWin32HandleInfoKHR ) ); + } + + FenceGetWin32HandleInfoKHR& operator=( VkFenceGetWin32HandleInfoKHR const & rhs ) + { + memcpy( this, &rhs, sizeof( FenceGetWin32HandleInfoKHR ) ); + return *this; + } + FenceGetWin32HandleInfoKHR& setPNext( const void* pNext_ ) + { + pNext = pNext_; + return *this; + } + + FenceGetWin32HandleInfoKHR& setFence( Fence fence_ ) + { + fence = fence_; + return *this; + } + + FenceGetWin32HandleInfoKHR& setHandleType( ExternalFenceHandleTypeFlagBits handleType_ ) + { + handleType = handleType_; + return *this; + } + + operator VkFenceGetWin32HandleInfoKHR const&() const + { + return *reinterpret_cast(this); + } + + operator VkFenceGetWin32HandleInfoKHR &() + { + return *reinterpret_cast(this); + } + + bool operator==( FenceGetWin32HandleInfoKHR const& rhs ) const + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( fence == rhs.fence ) + && ( handleType == rhs.handleType ); + } + + bool operator!=( FenceGetWin32HandleInfoKHR const& rhs ) const + { + return !operator==( rhs ); + } + + private: + StructureType sType = StructureType::eFenceGetWin32HandleInfoKHR; + + public: + const void* pNext = nullptr; + Fence fence; + ExternalFenceHandleTypeFlagBits handleType; + }; + static_assert( sizeof( FenceGetWin32HandleInfoKHR ) == sizeof( VkFenceGetWin32HandleInfoKHR ), "struct and wrapper have different size!" ); +#endif /*VK_USE_PLATFORM_WIN32_KHR*/ + + struct FenceGetFdInfoKHR + { + FenceGetFdInfoKHR( Fence fence_ = Fence(), + ExternalFenceHandleTypeFlagBits handleType_ = ExternalFenceHandleTypeFlagBits::eOpaqueFd ) + : fence( fence_ ) + , handleType( handleType_ ) + { + } + + FenceGetFdInfoKHR( VkFenceGetFdInfoKHR const & rhs ) + { + memcpy( this, &rhs, sizeof( FenceGetFdInfoKHR ) ); + } + + FenceGetFdInfoKHR& operator=( VkFenceGetFdInfoKHR const & rhs ) + { + memcpy( this, &rhs, sizeof( FenceGetFdInfoKHR ) ); + return *this; + } + FenceGetFdInfoKHR& setPNext( const void* pNext_ ) + { + pNext = pNext_; + return *this; + } + + FenceGetFdInfoKHR& setFence( Fence fence_ ) + { + fence = fence_; + return *this; + } + + FenceGetFdInfoKHR& setHandleType( ExternalFenceHandleTypeFlagBits handleType_ ) + { + handleType = handleType_; + return *this; + } + + operator VkFenceGetFdInfoKHR const&() const + { + return *reinterpret_cast(this); + } + + operator VkFenceGetFdInfoKHR &() + { + return *reinterpret_cast(this); + } + + bool operator==( FenceGetFdInfoKHR const& rhs ) const + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( fence == rhs.fence ) + && ( handleType == rhs.handleType ); + } + + bool operator!=( FenceGetFdInfoKHR const& rhs ) const + { + return !operator==( rhs ); + } + + private: + StructureType sType = StructureType::eFenceGetFdInfoKHR; + + public: + const void* pNext = nullptr; + Fence fence; + ExternalFenceHandleTypeFlagBits handleType; + }; + static_assert( sizeof( FenceGetFdInfoKHR ) == sizeof( VkFenceGetFdInfoKHR ), "struct and wrapper have different size!" ); + + enum class ExternalFenceFeatureFlagBits + { + eExportable = VK_EXTERNAL_FENCE_FEATURE_EXPORTABLE_BIT, + eExportableKHR = VK_EXTERNAL_FENCE_FEATURE_EXPORTABLE_BIT, + eImportable = VK_EXTERNAL_FENCE_FEATURE_IMPORTABLE_BIT, + eImportableKHR = VK_EXTERNAL_FENCE_FEATURE_IMPORTABLE_BIT + }; + + using ExternalFenceFeatureFlags = Flags; + + VULKAN_HPP_INLINE ExternalFenceFeatureFlags operator|( ExternalFenceFeatureFlagBits bit0, ExternalFenceFeatureFlagBits bit1 ) + { + return ExternalFenceFeatureFlags( bit0 ) | bit1; + } + + VULKAN_HPP_INLINE ExternalFenceFeatureFlags operator~( ExternalFenceFeatureFlagBits bits ) + { + return ~( ExternalFenceFeatureFlags( bits ) ); + } + + template <> struct FlagTraits + { + enum + { + allFlags = VkFlags(ExternalFenceFeatureFlagBits::eExportable) | VkFlags(ExternalFenceFeatureFlagBits::eImportable) + }; + }; + + using ExternalFenceFeatureFlagsKHR = ExternalFenceFeatureFlags; + + struct ExternalFenceProperties + { + operator VkExternalFenceProperties const&() const + { + return *reinterpret_cast(this); + } + + operator VkExternalFenceProperties &() + { + return *reinterpret_cast(this); + } + + bool operator==( ExternalFenceProperties const& rhs ) const + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( exportFromImportedHandleTypes == rhs.exportFromImportedHandleTypes ) + && ( compatibleHandleTypes == rhs.compatibleHandleTypes ) + && ( externalFenceFeatures == rhs.externalFenceFeatures ); + } + + bool operator!=( ExternalFenceProperties const& rhs ) const + { + return !operator==( rhs ); + } + + private: + StructureType sType = StructureType::eExternalFenceProperties; + + public: + void* pNext = nullptr; + ExternalFenceHandleTypeFlags exportFromImportedHandleTypes; + ExternalFenceHandleTypeFlags compatibleHandleTypes; + ExternalFenceFeatureFlags externalFenceFeatures; + }; + static_assert( sizeof( ExternalFenceProperties ) == sizeof( VkExternalFenceProperties ), "struct and wrapper have different size!" ); + + using ExternalFencePropertiesKHR = ExternalFenceProperties; + + enum class FenceImportFlagBits + { + eTemporary = VK_FENCE_IMPORT_TEMPORARY_BIT, + eTemporaryKHR = VK_FENCE_IMPORT_TEMPORARY_BIT + }; + + using FenceImportFlags = Flags; + + VULKAN_HPP_INLINE FenceImportFlags operator|( FenceImportFlagBits bit0, FenceImportFlagBits bit1 ) + { + return FenceImportFlags( bit0 ) | bit1; + } + + VULKAN_HPP_INLINE FenceImportFlags operator~( FenceImportFlagBits bits ) + { + return ~( FenceImportFlags( bits ) ); + } + + template <> struct FlagTraits + { + enum + { + allFlags = VkFlags(FenceImportFlagBits::eTemporary) + }; + }; + + using FenceImportFlagsKHR = FenceImportFlags; + +#ifdef VK_USE_PLATFORM_WIN32_KHR + struct ImportFenceWin32HandleInfoKHR + { + ImportFenceWin32HandleInfoKHR( Fence fence_ = Fence(), + FenceImportFlags flags_ = FenceImportFlags(), + ExternalFenceHandleTypeFlagBits handleType_ = ExternalFenceHandleTypeFlagBits::eOpaqueFd, + HANDLE handle_ = 0, + LPCWSTR name_ = 0 ) + : fence( fence_ ) + , flags( flags_ ) + , handleType( handleType_ ) + , handle( handle_ ) + , name( name_ ) + { + } + + ImportFenceWin32HandleInfoKHR( VkImportFenceWin32HandleInfoKHR const & rhs ) + { + memcpy( this, &rhs, sizeof( ImportFenceWin32HandleInfoKHR ) ); + } + + ImportFenceWin32HandleInfoKHR& operator=( VkImportFenceWin32HandleInfoKHR const & rhs ) + { + memcpy( this, &rhs, sizeof( ImportFenceWin32HandleInfoKHR ) ); + return *this; + } + ImportFenceWin32HandleInfoKHR& setPNext( const void* pNext_ ) + { + pNext = pNext_; + return *this; + } + + ImportFenceWin32HandleInfoKHR& setFence( Fence fence_ ) + { + fence = fence_; + return *this; + } + + ImportFenceWin32HandleInfoKHR& setFlags( FenceImportFlags flags_ ) + { + flags = flags_; + return *this; + } + + ImportFenceWin32HandleInfoKHR& setHandleType( ExternalFenceHandleTypeFlagBits handleType_ ) + { + handleType = handleType_; + return *this; + } + + ImportFenceWin32HandleInfoKHR& setHandle( HANDLE handle_ ) + { + handle = handle_; + return *this; + } + + ImportFenceWin32HandleInfoKHR& setName( LPCWSTR name_ ) + { + name = name_; + return *this; + } + + operator VkImportFenceWin32HandleInfoKHR const&() const + { + return *reinterpret_cast(this); + } + + operator VkImportFenceWin32HandleInfoKHR &() + { + return *reinterpret_cast(this); + } + + bool operator==( ImportFenceWin32HandleInfoKHR const& rhs ) const + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( fence == rhs.fence ) + && ( flags == rhs.flags ) + && ( handleType == rhs.handleType ) + && ( handle == rhs.handle ) + && ( name == rhs.name ); + } + + bool operator!=( ImportFenceWin32HandleInfoKHR const& rhs ) const + { + return !operator==( rhs ); + } + + private: + StructureType sType = StructureType::eImportFenceWin32HandleInfoKHR; + + public: + const void* pNext = nullptr; + Fence fence; + FenceImportFlags flags; + ExternalFenceHandleTypeFlagBits handleType; + HANDLE handle; + LPCWSTR name; + }; + static_assert( sizeof( ImportFenceWin32HandleInfoKHR ) == sizeof( VkImportFenceWin32HandleInfoKHR ), "struct and wrapper have different size!" ); +#endif /*VK_USE_PLATFORM_WIN32_KHR*/ + + struct ImportFenceFdInfoKHR + { + ImportFenceFdInfoKHR( Fence fence_ = Fence(), + FenceImportFlags flags_ = FenceImportFlags(), + ExternalFenceHandleTypeFlagBits handleType_ = ExternalFenceHandleTypeFlagBits::eOpaqueFd, + int fd_ = 0 ) + : fence( fence_ ) + , flags( flags_ ) + , handleType( handleType_ ) + , fd( fd_ ) + { + } + + ImportFenceFdInfoKHR( VkImportFenceFdInfoKHR const & rhs ) + { + memcpy( this, &rhs, sizeof( ImportFenceFdInfoKHR ) ); + } + + ImportFenceFdInfoKHR& operator=( VkImportFenceFdInfoKHR const & rhs ) + { + memcpy( this, &rhs, sizeof( ImportFenceFdInfoKHR ) ); + return *this; + } + ImportFenceFdInfoKHR& setPNext( const void* pNext_ ) + { + pNext = pNext_; + return *this; + } + + ImportFenceFdInfoKHR& setFence( Fence fence_ ) + { + fence = fence_; + return *this; + } + + ImportFenceFdInfoKHR& setFlags( FenceImportFlags flags_ ) + { + flags = flags_; + return *this; + } + + ImportFenceFdInfoKHR& setHandleType( ExternalFenceHandleTypeFlagBits handleType_ ) + { + handleType = handleType_; + return *this; + } + + ImportFenceFdInfoKHR& setFd( int fd_ ) + { + fd = fd_; + return *this; + } + + operator VkImportFenceFdInfoKHR const&() const + { + return *reinterpret_cast(this); + } + + operator VkImportFenceFdInfoKHR &() + { + return *reinterpret_cast(this); + } + + bool operator==( ImportFenceFdInfoKHR const& rhs ) const + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( fence == rhs.fence ) + && ( flags == rhs.flags ) + && ( handleType == rhs.handleType ) + && ( fd == rhs.fd ); + } + + bool operator!=( ImportFenceFdInfoKHR const& rhs ) const + { + return !operator==( rhs ); + } + + private: + StructureType sType = StructureType::eImportFenceFdInfoKHR; + + public: + const void* pNext = nullptr; + Fence fence; + FenceImportFlags flags; + ExternalFenceHandleTypeFlagBits handleType; + int fd; + }; + static_assert( sizeof( ImportFenceFdInfoKHR ) == sizeof( VkImportFenceFdInfoKHR ), "struct and wrapper have different size!" ); + + enum class SurfaceCounterFlagBitsEXT + { + eVblank = VK_SURFACE_COUNTER_VBLANK_EXT + }; + + using SurfaceCounterFlagsEXT = Flags; + + VULKAN_HPP_INLINE SurfaceCounterFlagsEXT operator|( SurfaceCounterFlagBitsEXT bit0, SurfaceCounterFlagBitsEXT bit1 ) + { + return SurfaceCounterFlagsEXT( bit0 ) | bit1; + } + + VULKAN_HPP_INLINE SurfaceCounterFlagsEXT operator~( SurfaceCounterFlagBitsEXT bits ) + { + return ~( SurfaceCounterFlagsEXT( bits ) ); + } + + template <> struct FlagTraits + { + enum + { + allFlags = VkFlags(SurfaceCounterFlagBitsEXT::eVblank) + }; + }; + + struct SurfaceCapabilities2EXT + { + operator VkSurfaceCapabilities2EXT const&() const + { + return *reinterpret_cast(this); + } + + operator VkSurfaceCapabilities2EXT &() + { + return *reinterpret_cast(this); + } + + bool operator==( SurfaceCapabilities2EXT const& rhs ) const + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( minImageCount == rhs.minImageCount ) + && ( maxImageCount == rhs.maxImageCount ) + && ( currentExtent == rhs.currentExtent ) + && ( minImageExtent == rhs.minImageExtent ) + && ( maxImageExtent == rhs.maxImageExtent ) + && ( maxImageArrayLayers == rhs.maxImageArrayLayers ) + && ( supportedTransforms == rhs.supportedTransforms ) + && ( currentTransform == rhs.currentTransform ) + && ( supportedCompositeAlpha == rhs.supportedCompositeAlpha ) + && ( supportedUsageFlags == rhs.supportedUsageFlags ) + && ( supportedSurfaceCounters == rhs.supportedSurfaceCounters ); + } + + bool operator!=( SurfaceCapabilities2EXT const& rhs ) const + { + return !operator==( rhs ); + } + + private: + StructureType sType = StructureType::eSurfaceCapabilities2EXT; + + public: + void* pNext = nullptr; + uint32_t minImageCount; + uint32_t maxImageCount; + Extent2D currentExtent; + Extent2D minImageExtent; + Extent2D maxImageExtent; + uint32_t maxImageArrayLayers; + SurfaceTransformFlagsKHR supportedTransforms; + SurfaceTransformFlagBitsKHR currentTransform; + CompositeAlphaFlagsKHR supportedCompositeAlpha; + ImageUsageFlags supportedUsageFlags; + SurfaceCounterFlagsEXT supportedSurfaceCounters; + }; + static_assert( sizeof( SurfaceCapabilities2EXT ) == sizeof( VkSurfaceCapabilities2EXT ), "struct and wrapper have different size!" ); + + struct SwapchainCounterCreateInfoEXT + { + SwapchainCounterCreateInfoEXT( SurfaceCounterFlagsEXT surfaceCounters_ = SurfaceCounterFlagsEXT() ) + : surfaceCounters( surfaceCounters_ ) + { + } + + SwapchainCounterCreateInfoEXT( VkSwapchainCounterCreateInfoEXT const & rhs ) + { + memcpy( this, &rhs, sizeof( SwapchainCounterCreateInfoEXT ) ); + } + + SwapchainCounterCreateInfoEXT& operator=( VkSwapchainCounterCreateInfoEXT const & rhs ) + { + memcpy( this, &rhs, sizeof( SwapchainCounterCreateInfoEXT ) ); + return *this; + } + SwapchainCounterCreateInfoEXT& setPNext( const void* pNext_ ) + { + pNext = pNext_; + return *this; + } + + SwapchainCounterCreateInfoEXT& setSurfaceCounters( SurfaceCounterFlagsEXT surfaceCounters_ ) + { + surfaceCounters = surfaceCounters_; + return *this; + } + + operator VkSwapchainCounterCreateInfoEXT const&() const + { + return *reinterpret_cast(this); + } + + operator VkSwapchainCounterCreateInfoEXT &() + { + return *reinterpret_cast(this); + } + + bool operator==( SwapchainCounterCreateInfoEXT const& rhs ) const + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( surfaceCounters == rhs.surfaceCounters ); + } + + bool operator!=( SwapchainCounterCreateInfoEXT const& rhs ) const + { + return !operator==( rhs ); + } + + private: + StructureType sType = StructureType::eSwapchainCounterCreateInfoEXT; + + public: + const void* pNext = nullptr; + SurfaceCounterFlagsEXT surfaceCounters; + }; + static_assert( sizeof( SwapchainCounterCreateInfoEXT ) == sizeof( VkSwapchainCounterCreateInfoEXT ), "struct and wrapper have different size!" ); + + enum class DisplayPowerStateEXT + { + eOff = VK_DISPLAY_POWER_STATE_OFF_EXT, + eSuspend = VK_DISPLAY_POWER_STATE_SUSPEND_EXT, + eOn = VK_DISPLAY_POWER_STATE_ON_EXT + }; + + struct DisplayPowerInfoEXT + { + DisplayPowerInfoEXT( DisplayPowerStateEXT powerState_ = DisplayPowerStateEXT::eOff ) + : powerState( powerState_ ) + { + } + + DisplayPowerInfoEXT( VkDisplayPowerInfoEXT const & rhs ) + { + memcpy( this, &rhs, sizeof( DisplayPowerInfoEXT ) ); + } + + DisplayPowerInfoEXT& operator=( VkDisplayPowerInfoEXT const & rhs ) + { + memcpy( this, &rhs, sizeof( DisplayPowerInfoEXT ) ); + return *this; + } + DisplayPowerInfoEXT& setPNext( const void* pNext_ ) + { + pNext = pNext_; + return *this; + } + + DisplayPowerInfoEXT& setPowerState( DisplayPowerStateEXT powerState_ ) + { + powerState = powerState_; + return *this; + } + + operator VkDisplayPowerInfoEXT const&() const + { + return *reinterpret_cast(this); + } + + operator VkDisplayPowerInfoEXT &() + { + return *reinterpret_cast(this); + } + + bool operator==( DisplayPowerInfoEXT const& rhs ) const + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( powerState == rhs.powerState ); + } + + bool operator!=( DisplayPowerInfoEXT const& rhs ) const + { + return !operator==( rhs ); + } + + private: + StructureType sType = StructureType::eDisplayPowerInfoEXT; + + public: + const void* pNext = nullptr; + DisplayPowerStateEXT powerState; + }; + static_assert( sizeof( DisplayPowerInfoEXT ) == sizeof( VkDisplayPowerInfoEXT ), "struct and wrapper have different size!" ); + + enum class DeviceEventTypeEXT + { + eDisplayHotplug = VK_DEVICE_EVENT_TYPE_DISPLAY_HOTPLUG_EXT + }; + + struct DeviceEventInfoEXT + { + DeviceEventInfoEXT( DeviceEventTypeEXT deviceEvent_ = DeviceEventTypeEXT::eDisplayHotplug ) + : deviceEvent( deviceEvent_ ) + { + } + + DeviceEventInfoEXT( VkDeviceEventInfoEXT const & rhs ) + { + memcpy( this, &rhs, sizeof( DeviceEventInfoEXT ) ); + } + + DeviceEventInfoEXT& operator=( VkDeviceEventInfoEXT const & rhs ) + { + memcpy( this, &rhs, sizeof( DeviceEventInfoEXT ) ); + return *this; + } + DeviceEventInfoEXT& setPNext( const void* pNext_ ) + { + pNext = pNext_; + return *this; + } + + DeviceEventInfoEXT& setDeviceEvent( DeviceEventTypeEXT deviceEvent_ ) + { + deviceEvent = deviceEvent_; + return *this; + } + + operator VkDeviceEventInfoEXT const&() const + { + return *reinterpret_cast(this); + } + + operator VkDeviceEventInfoEXT &() + { + return *reinterpret_cast(this); + } + + bool operator==( DeviceEventInfoEXT const& rhs ) const + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( deviceEvent == rhs.deviceEvent ); + } + + bool operator!=( DeviceEventInfoEXT const& rhs ) const + { + return !operator==( rhs ); + } + + private: + StructureType sType = StructureType::eDeviceEventInfoEXT; + + public: + const void* pNext = nullptr; + DeviceEventTypeEXT deviceEvent; + }; + static_assert( sizeof( DeviceEventInfoEXT ) == sizeof( VkDeviceEventInfoEXT ), "struct and wrapper have different size!" ); + + enum class DisplayEventTypeEXT + { + eFirstPixelOut = VK_DISPLAY_EVENT_TYPE_FIRST_PIXEL_OUT_EXT + }; + + struct DisplayEventInfoEXT + { + DisplayEventInfoEXT( DisplayEventTypeEXT displayEvent_ = DisplayEventTypeEXT::eFirstPixelOut ) + : displayEvent( displayEvent_ ) + { + } + + DisplayEventInfoEXT( VkDisplayEventInfoEXT const & rhs ) + { + memcpy( this, &rhs, sizeof( DisplayEventInfoEXT ) ); + } + + DisplayEventInfoEXT& operator=( VkDisplayEventInfoEXT const & rhs ) + { + memcpy( this, &rhs, sizeof( DisplayEventInfoEXT ) ); + return *this; + } + DisplayEventInfoEXT& setPNext( const void* pNext_ ) + { + pNext = pNext_; + return *this; + } + + DisplayEventInfoEXT& setDisplayEvent( DisplayEventTypeEXT displayEvent_ ) + { + displayEvent = displayEvent_; + return *this; + } + + operator VkDisplayEventInfoEXT const&() const + { + return *reinterpret_cast(this); + } + + operator VkDisplayEventInfoEXT &() + { + return *reinterpret_cast(this); + } + + bool operator==( DisplayEventInfoEXT const& rhs ) const + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( displayEvent == rhs.displayEvent ); + } + + bool operator!=( DisplayEventInfoEXT const& rhs ) const + { + return !operator==( rhs ); + } + + private: + StructureType sType = StructureType::eDisplayEventInfoEXT; + + public: + const void* pNext = nullptr; + DisplayEventTypeEXT displayEvent; + }; + static_assert( sizeof( DisplayEventInfoEXT ) == sizeof( VkDisplayEventInfoEXT ), "struct and wrapper have different size!" ); + + enum class PeerMemoryFeatureFlagBits + { + eCopySrc = VK_PEER_MEMORY_FEATURE_COPY_SRC_BIT, + eCopySrcKHR = VK_PEER_MEMORY_FEATURE_COPY_SRC_BIT, + eCopyDst = VK_PEER_MEMORY_FEATURE_COPY_DST_BIT, + eCopyDstKHR = VK_PEER_MEMORY_FEATURE_COPY_DST_BIT, + eGenericSrc = VK_PEER_MEMORY_FEATURE_GENERIC_SRC_BIT, + eGenericSrcKHR = VK_PEER_MEMORY_FEATURE_GENERIC_SRC_BIT, + eGenericDst = VK_PEER_MEMORY_FEATURE_GENERIC_DST_BIT, + eGenericDstKHR = VK_PEER_MEMORY_FEATURE_GENERIC_DST_BIT + }; + + using PeerMemoryFeatureFlags = Flags; + + VULKAN_HPP_INLINE PeerMemoryFeatureFlags operator|( PeerMemoryFeatureFlagBits bit0, PeerMemoryFeatureFlagBits bit1 ) + { + return PeerMemoryFeatureFlags( bit0 ) | bit1; + } + + VULKAN_HPP_INLINE PeerMemoryFeatureFlags operator~( PeerMemoryFeatureFlagBits bits ) + { + return ~( PeerMemoryFeatureFlags( bits ) ); + } + + template <> struct FlagTraits + { + enum + { + allFlags = VkFlags(PeerMemoryFeatureFlagBits::eCopySrc) | VkFlags(PeerMemoryFeatureFlagBits::eCopyDst) | VkFlags(PeerMemoryFeatureFlagBits::eGenericSrc) | VkFlags(PeerMemoryFeatureFlagBits::eGenericDst) + }; + }; + + using PeerMemoryFeatureFlagsKHR = PeerMemoryFeatureFlags; + + enum class MemoryAllocateFlagBits + { + eDeviceMask = VK_MEMORY_ALLOCATE_DEVICE_MASK_BIT, + eDeviceMaskKHR = VK_MEMORY_ALLOCATE_DEVICE_MASK_BIT + }; + + using MemoryAllocateFlags = Flags; + + VULKAN_HPP_INLINE MemoryAllocateFlags operator|( MemoryAllocateFlagBits bit0, MemoryAllocateFlagBits bit1 ) + { + return MemoryAllocateFlags( bit0 ) | bit1; + } + + VULKAN_HPP_INLINE MemoryAllocateFlags operator~( MemoryAllocateFlagBits bits ) + { + return ~( MemoryAllocateFlags( bits ) ); + } + + template <> struct FlagTraits + { + enum + { + allFlags = VkFlags(MemoryAllocateFlagBits::eDeviceMask) + }; + }; + + using MemoryAllocateFlagsKHR = MemoryAllocateFlags; + + struct MemoryAllocateFlagsInfo + { + MemoryAllocateFlagsInfo( MemoryAllocateFlags flags_ = MemoryAllocateFlags(), + uint32_t deviceMask_ = 0 ) + : flags( flags_ ) + , deviceMask( deviceMask_ ) + { + } + + MemoryAllocateFlagsInfo( VkMemoryAllocateFlagsInfo const & rhs ) + { + memcpy( this, &rhs, sizeof( MemoryAllocateFlagsInfo ) ); + } + + MemoryAllocateFlagsInfo& operator=( VkMemoryAllocateFlagsInfo const & rhs ) + { + memcpy( this, &rhs, sizeof( MemoryAllocateFlagsInfo ) ); + return *this; + } + MemoryAllocateFlagsInfo& setPNext( const void* pNext_ ) + { + pNext = pNext_; + return *this; + } + + MemoryAllocateFlagsInfo& setFlags( MemoryAllocateFlags flags_ ) + { + flags = flags_; + return *this; + } + + MemoryAllocateFlagsInfo& setDeviceMask( uint32_t deviceMask_ ) + { + deviceMask = deviceMask_; + return *this; + } + + operator VkMemoryAllocateFlagsInfo const&() const + { + return *reinterpret_cast(this); + } + + operator VkMemoryAllocateFlagsInfo &() + { + return *reinterpret_cast(this); + } + + bool operator==( MemoryAllocateFlagsInfo const& rhs ) const + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( flags == rhs.flags ) + && ( deviceMask == rhs.deviceMask ); + } + + bool operator!=( MemoryAllocateFlagsInfo const& rhs ) const + { + return !operator==( rhs ); + } + + private: + StructureType sType = StructureType::eMemoryAllocateFlagsInfo; + + public: + const void* pNext = nullptr; + MemoryAllocateFlags flags; + uint32_t deviceMask; + }; + static_assert( sizeof( MemoryAllocateFlagsInfo ) == sizeof( VkMemoryAllocateFlagsInfo ), "struct and wrapper have different size!" ); + + using MemoryAllocateFlagsInfoKHR = MemoryAllocateFlagsInfo; + + enum class DeviceGroupPresentModeFlagBitsKHR + { + eLocal = VK_DEVICE_GROUP_PRESENT_MODE_LOCAL_BIT_KHR, + eRemote = VK_DEVICE_GROUP_PRESENT_MODE_REMOTE_BIT_KHR, + eSum = VK_DEVICE_GROUP_PRESENT_MODE_SUM_BIT_KHR, + eLocalMultiDevice = VK_DEVICE_GROUP_PRESENT_MODE_LOCAL_MULTI_DEVICE_BIT_KHR + }; + + using DeviceGroupPresentModeFlagsKHR = Flags; + + VULKAN_HPP_INLINE DeviceGroupPresentModeFlagsKHR operator|( DeviceGroupPresentModeFlagBitsKHR bit0, DeviceGroupPresentModeFlagBitsKHR bit1 ) + { + return DeviceGroupPresentModeFlagsKHR( bit0 ) | bit1; + } + + VULKAN_HPP_INLINE DeviceGroupPresentModeFlagsKHR operator~( DeviceGroupPresentModeFlagBitsKHR bits ) + { + return ~( DeviceGroupPresentModeFlagsKHR( bits ) ); + } + + template <> struct FlagTraits + { + enum + { + allFlags = VkFlags(DeviceGroupPresentModeFlagBitsKHR::eLocal) | VkFlags(DeviceGroupPresentModeFlagBitsKHR::eRemote) | VkFlags(DeviceGroupPresentModeFlagBitsKHR::eSum) | VkFlags(DeviceGroupPresentModeFlagBitsKHR::eLocalMultiDevice) + }; + }; + + struct DeviceGroupPresentCapabilitiesKHR + { + operator VkDeviceGroupPresentCapabilitiesKHR const&() const + { + return *reinterpret_cast(this); + } + + operator VkDeviceGroupPresentCapabilitiesKHR &() + { + return *reinterpret_cast(this); + } + + bool operator==( DeviceGroupPresentCapabilitiesKHR const& rhs ) const + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( memcmp( presentMask, rhs.presentMask, VK_MAX_DEVICE_GROUP_SIZE * sizeof( uint32_t ) ) == 0 ) + && ( modes == rhs.modes ); + } + + bool operator!=( DeviceGroupPresentCapabilitiesKHR const& rhs ) const + { + return !operator==( rhs ); + } + + private: + StructureType sType = StructureType::eDeviceGroupPresentCapabilitiesKHR; + + public: + const void* pNext = nullptr; + uint32_t presentMask[VK_MAX_DEVICE_GROUP_SIZE]; + DeviceGroupPresentModeFlagsKHR modes; + }; + static_assert( sizeof( DeviceGroupPresentCapabilitiesKHR ) == sizeof( VkDeviceGroupPresentCapabilitiesKHR ), "struct and wrapper have different size!" ); + + struct DeviceGroupPresentInfoKHR + { + DeviceGroupPresentInfoKHR( uint32_t swapchainCount_ = 0, + const uint32_t* pDeviceMasks_ = nullptr, + DeviceGroupPresentModeFlagBitsKHR mode_ = DeviceGroupPresentModeFlagBitsKHR::eLocal ) + : swapchainCount( swapchainCount_ ) + , pDeviceMasks( pDeviceMasks_ ) + , mode( mode_ ) + { + } + + DeviceGroupPresentInfoKHR( VkDeviceGroupPresentInfoKHR const & rhs ) + { + memcpy( this, &rhs, sizeof( DeviceGroupPresentInfoKHR ) ); + } + + DeviceGroupPresentInfoKHR& operator=( VkDeviceGroupPresentInfoKHR const & rhs ) + { + memcpy( this, &rhs, sizeof( DeviceGroupPresentInfoKHR ) ); + return *this; + } + DeviceGroupPresentInfoKHR& setPNext( const void* pNext_ ) + { + pNext = pNext_; + return *this; + } + + DeviceGroupPresentInfoKHR& setSwapchainCount( uint32_t swapchainCount_ ) + { + swapchainCount = swapchainCount_; + return *this; + } + + DeviceGroupPresentInfoKHR& setPDeviceMasks( const uint32_t* pDeviceMasks_ ) + { + pDeviceMasks = pDeviceMasks_; + return *this; + } + + DeviceGroupPresentInfoKHR& setMode( DeviceGroupPresentModeFlagBitsKHR mode_ ) + { + mode = mode_; + return *this; + } + + operator VkDeviceGroupPresentInfoKHR const&() const + { + return *reinterpret_cast(this); + } + + operator VkDeviceGroupPresentInfoKHR &() + { + return *reinterpret_cast(this); + } + + bool operator==( DeviceGroupPresentInfoKHR const& rhs ) const + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( swapchainCount == rhs.swapchainCount ) + && ( pDeviceMasks == rhs.pDeviceMasks ) + && ( mode == rhs.mode ); + } + + bool operator!=( DeviceGroupPresentInfoKHR const& rhs ) const + { + return !operator==( rhs ); + } + + private: + StructureType sType = StructureType::eDeviceGroupPresentInfoKHR; + + public: + const void* pNext = nullptr; + uint32_t swapchainCount; + const uint32_t* pDeviceMasks; + DeviceGroupPresentModeFlagBitsKHR mode; + }; + static_assert( sizeof( DeviceGroupPresentInfoKHR ) == sizeof( VkDeviceGroupPresentInfoKHR ), "struct and wrapper have different size!" ); + + struct DeviceGroupSwapchainCreateInfoKHR + { + DeviceGroupSwapchainCreateInfoKHR( DeviceGroupPresentModeFlagsKHR modes_ = DeviceGroupPresentModeFlagsKHR() ) + : modes( modes_ ) + { + } + + DeviceGroupSwapchainCreateInfoKHR( VkDeviceGroupSwapchainCreateInfoKHR const & rhs ) + { + memcpy( this, &rhs, sizeof( DeviceGroupSwapchainCreateInfoKHR ) ); + } + + DeviceGroupSwapchainCreateInfoKHR& operator=( VkDeviceGroupSwapchainCreateInfoKHR const & rhs ) + { + memcpy( this, &rhs, sizeof( DeviceGroupSwapchainCreateInfoKHR ) ); + return *this; + } + DeviceGroupSwapchainCreateInfoKHR& setPNext( const void* pNext_ ) + { + pNext = pNext_; + return *this; + } + + DeviceGroupSwapchainCreateInfoKHR& setModes( DeviceGroupPresentModeFlagsKHR modes_ ) + { + modes = modes_; + return *this; + } + + operator VkDeviceGroupSwapchainCreateInfoKHR const&() const + { + return *reinterpret_cast(this); + } + + operator VkDeviceGroupSwapchainCreateInfoKHR &() + { + return *reinterpret_cast(this); + } + + bool operator==( DeviceGroupSwapchainCreateInfoKHR const& rhs ) const + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( modes == rhs.modes ); + } + + bool operator!=( DeviceGroupSwapchainCreateInfoKHR const& rhs ) const + { + return !operator==( rhs ); + } + + private: + StructureType sType = StructureType::eDeviceGroupSwapchainCreateInfoKHR; + + public: + const void* pNext = nullptr; + DeviceGroupPresentModeFlagsKHR modes; + }; + static_assert( sizeof( DeviceGroupSwapchainCreateInfoKHR ) == sizeof( VkDeviceGroupSwapchainCreateInfoKHR ), "struct and wrapper have different size!" ); + + enum class SwapchainCreateFlagBitsKHR + { + eSplitInstanceBindRegions = VK_SWAPCHAIN_CREATE_SPLIT_INSTANCE_BIND_REGIONS_BIT_KHR, + eProtected = VK_SWAPCHAIN_CREATE_PROTECTED_BIT_KHR + }; + + using SwapchainCreateFlagsKHR = Flags; + + VULKAN_HPP_INLINE SwapchainCreateFlagsKHR operator|( SwapchainCreateFlagBitsKHR bit0, SwapchainCreateFlagBitsKHR bit1 ) + { + return SwapchainCreateFlagsKHR( bit0 ) | bit1; + } + + VULKAN_HPP_INLINE SwapchainCreateFlagsKHR operator~( SwapchainCreateFlagBitsKHR bits ) + { + return ~( SwapchainCreateFlagsKHR( bits ) ); + } + + template <> struct FlagTraits + { + enum + { + allFlags = VkFlags(SwapchainCreateFlagBitsKHR::eSplitInstanceBindRegions) | VkFlags(SwapchainCreateFlagBitsKHR::eProtected) + }; + }; + + struct SwapchainCreateInfoKHR + { + SwapchainCreateInfoKHR( SwapchainCreateFlagsKHR flags_ = SwapchainCreateFlagsKHR(), + SurfaceKHR surface_ = SurfaceKHR(), + uint32_t minImageCount_ = 0, + Format imageFormat_ = Format::eUndefined, + ColorSpaceKHR imageColorSpace_ = ColorSpaceKHR::eSrgbNonlinear, + Extent2D imageExtent_ = Extent2D(), + uint32_t imageArrayLayers_ = 0, + ImageUsageFlags imageUsage_ = ImageUsageFlags(), + SharingMode imageSharingMode_ = SharingMode::eExclusive, + uint32_t queueFamilyIndexCount_ = 0, + const uint32_t* pQueueFamilyIndices_ = nullptr, + SurfaceTransformFlagBitsKHR preTransform_ = SurfaceTransformFlagBitsKHR::eIdentity, + CompositeAlphaFlagBitsKHR compositeAlpha_ = CompositeAlphaFlagBitsKHR::eOpaque, + PresentModeKHR presentMode_ = PresentModeKHR::eImmediate, + Bool32 clipped_ = 0, + SwapchainKHR oldSwapchain_ = SwapchainKHR() ) + : flags( flags_ ) + , surface( surface_ ) + , minImageCount( minImageCount_ ) + , imageFormat( imageFormat_ ) + , imageColorSpace( imageColorSpace_ ) + , imageExtent( imageExtent_ ) + , imageArrayLayers( imageArrayLayers_ ) + , imageUsage( imageUsage_ ) + , imageSharingMode( imageSharingMode_ ) + , queueFamilyIndexCount( queueFamilyIndexCount_ ) + , pQueueFamilyIndices( pQueueFamilyIndices_ ) + , preTransform( preTransform_ ) + , compositeAlpha( compositeAlpha_ ) + , presentMode( presentMode_ ) + , clipped( clipped_ ) + , oldSwapchain( oldSwapchain_ ) + { + } + + SwapchainCreateInfoKHR( VkSwapchainCreateInfoKHR const & rhs ) + { + memcpy( this, &rhs, sizeof( SwapchainCreateInfoKHR ) ); + } + + SwapchainCreateInfoKHR& operator=( VkSwapchainCreateInfoKHR const & rhs ) + { + memcpy( this, &rhs, sizeof( SwapchainCreateInfoKHR ) ); + return *this; + } + SwapchainCreateInfoKHR& setPNext( const void* pNext_ ) + { + pNext = pNext_; + return *this; + } + + SwapchainCreateInfoKHR& setFlags( SwapchainCreateFlagsKHR flags_ ) + { + flags = flags_; + return *this; + } + + SwapchainCreateInfoKHR& setSurface( SurfaceKHR surface_ ) + { + surface = surface_; + return *this; + } + + SwapchainCreateInfoKHR& setMinImageCount( uint32_t minImageCount_ ) + { + minImageCount = minImageCount_; + return *this; + } + + SwapchainCreateInfoKHR& setImageFormat( Format imageFormat_ ) + { + imageFormat = imageFormat_; + return *this; + } + + SwapchainCreateInfoKHR& setImageColorSpace( ColorSpaceKHR imageColorSpace_ ) + { + imageColorSpace = imageColorSpace_; + return *this; + } + + SwapchainCreateInfoKHR& setImageExtent( Extent2D imageExtent_ ) + { + imageExtent = imageExtent_; + return *this; + } + + SwapchainCreateInfoKHR& setImageArrayLayers( uint32_t imageArrayLayers_ ) + { + imageArrayLayers = imageArrayLayers_; + return *this; + } + + SwapchainCreateInfoKHR& setImageUsage( ImageUsageFlags imageUsage_ ) + { + imageUsage = imageUsage_; + return *this; + } + + SwapchainCreateInfoKHR& setImageSharingMode( SharingMode imageSharingMode_ ) + { + imageSharingMode = imageSharingMode_; + return *this; + } + + SwapchainCreateInfoKHR& setQueueFamilyIndexCount( uint32_t queueFamilyIndexCount_ ) + { + queueFamilyIndexCount = queueFamilyIndexCount_; + return *this; + } + + SwapchainCreateInfoKHR& setPQueueFamilyIndices( const uint32_t* pQueueFamilyIndices_ ) + { + pQueueFamilyIndices = pQueueFamilyIndices_; + return *this; + } + + SwapchainCreateInfoKHR& setPreTransform( SurfaceTransformFlagBitsKHR preTransform_ ) + { + preTransform = preTransform_; + return *this; + } + + SwapchainCreateInfoKHR& setCompositeAlpha( CompositeAlphaFlagBitsKHR compositeAlpha_ ) + { + compositeAlpha = compositeAlpha_; + return *this; + } + + SwapchainCreateInfoKHR& setPresentMode( PresentModeKHR presentMode_ ) + { + presentMode = presentMode_; + return *this; + } + + SwapchainCreateInfoKHR& setClipped( Bool32 clipped_ ) + { + clipped = clipped_; + return *this; + } + + SwapchainCreateInfoKHR& setOldSwapchain( SwapchainKHR oldSwapchain_ ) + { + oldSwapchain = oldSwapchain_; + return *this; + } + + operator VkSwapchainCreateInfoKHR const&() const + { + return *reinterpret_cast(this); + } + + operator VkSwapchainCreateInfoKHR &() + { + return *reinterpret_cast(this); + } + + bool operator==( SwapchainCreateInfoKHR const& rhs ) const + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( flags == rhs.flags ) + && ( surface == rhs.surface ) + && ( minImageCount == rhs.minImageCount ) + && ( imageFormat == rhs.imageFormat ) + && ( imageColorSpace == rhs.imageColorSpace ) + && ( imageExtent == rhs.imageExtent ) + && ( imageArrayLayers == rhs.imageArrayLayers ) + && ( imageUsage == rhs.imageUsage ) + && ( imageSharingMode == rhs.imageSharingMode ) + && ( queueFamilyIndexCount == rhs.queueFamilyIndexCount ) + && ( pQueueFamilyIndices == rhs.pQueueFamilyIndices ) + && ( preTransform == rhs.preTransform ) + && ( compositeAlpha == rhs.compositeAlpha ) + && ( presentMode == rhs.presentMode ) + && ( clipped == rhs.clipped ) + && ( oldSwapchain == rhs.oldSwapchain ); + } + + bool operator!=( SwapchainCreateInfoKHR const& rhs ) const + { + return !operator==( rhs ); + } + + private: + StructureType sType = StructureType::eSwapchainCreateInfoKHR; + + public: + const void* pNext = nullptr; + SwapchainCreateFlagsKHR flags; + SurfaceKHR surface; + uint32_t minImageCount; + Format imageFormat; + ColorSpaceKHR imageColorSpace; + Extent2D imageExtent; + uint32_t imageArrayLayers; + ImageUsageFlags imageUsage; + SharingMode imageSharingMode; + uint32_t queueFamilyIndexCount; + const uint32_t* pQueueFamilyIndices; + SurfaceTransformFlagBitsKHR preTransform; + CompositeAlphaFlagBitsKHR compositeAlpha; + PresentModeKHR presentMode; + Bool32 clipped; + SwapchainKHR oldSwapchain; + }; + static_assert( sizeof( SwapchainCreateInfoKHR ) == sizeof( VkSwapchainCreateInfoKHR ), "struct and wrapper have different size!" ); + + enum class ViewportCoordinateSwizzleNV + { + ePositiveX = VK_VIEWPORT_COORDINATE_SWIZZLE_POSITIVE_X_NV, + eNegativeX = VK_VIEWPORT_COORDINATE_SWIZZLE_NEGATIVE_X_NV, + ePositiveY = VK_VIEWPORT_COORDINATE_SWIZZLE_POSITIVE_Y_NV, + eNegativeY = VK_VIEWPORT_COORDINATE_SWIZZLE_NEGATIVE_Y_NV, + ePositiveZ = VK_VIEWPORT_COORDINATE_SWIZZLE_POSITIVE_Z_NV, + eNegativeZ = VK_VIEWPORT_COORDINATE_SWIZZLE_NEGATIVE_Z_NV, + ePositiveW = VK_VIEWPORT_COORDINATE_SWIZZLE_POSITIVE_W_NV, + eNegativeW = VK_VIEWPORT_COORDINATE_SWIZZLE_NEGATIVE_W_NV + }; + + struct ViewportSwizzleNV + { + ViewportSwizzleNV( ViewportCoordinateSwizzleNV x_ = ViewportCoordinateSwizzleNV::ePositiveX, + ViewportCoordinateSwizzleNV y_ = ViewportCoordinateSwizzleNV::ePositiveX, + ViewportCoordinateSwizzleNV z_ = ViewportCoordinateSwizzleNV::ePositiveX, + ViewportCoordinateSwizzleNV w_ = ViewportCoordinateSwizzleNV::ePositiveX ) + : x( x_ ) + , y( y_ ) + , z( z_ ) + , w( w_ ) + { + } + + ViewportSwizzleNV( VkViewportSwizzleNV const & rhs ) + { + memcpy( this, &rhs, sizeof( ViewportSwizzleNV ) ); + } + + ViewportSwizzleNV& operator=( VkViewportSwizzleNV const & rhs ) + { + memcpy( this, &rhs, sizeof( ViewportSwizzleNV ) ); + return *this; + } + ViewportSwizzleNV& setX( ViewportCoordinateSwizzleNV x_ ) + { + x = x_; + return *this; + } + + ViewportSwizzleNV& setY( ViewportCoordinateSwizzleNV y_ ) + { + y = y_; + return *this; + } + + ViewportSwizzleNV& setZ( ViewportCoordinateSwizzleNV z_ ) + { + z = z_; + return *this; + } + + ViewportSwizzleNV& setW( ViewportCoordinateSwizzleNV w_ ) + { + w = w_; + return *this; + } + + operator VkViewportSwizzleNV const&() const + { + return *reinterpret_cast(this); + } + + operator VkViewportSwizzleNV &() + { + return *reinterpret_cast(this); + } + + bool operator==( ViewportSwizzleNV const& rhs ) const + { + return ( x == rhs.x ) + && ( y == rhs.y ) + && ( z == rhs.z ) + && ( w == rhs.w ); + } + + bool operator!=( ViewportSwizzleNV const& rhs ) const + { + return !operator==( rhs ); + } + + ViewportCoordinateSwizzleNV x; + ViewportCoordinateSwizzleNV y; + ViewportCoordinateSwizzleNV z; + ViewportCoordinateSwizzleNV w; + }; + static_assert( sizeof( ViewportSwizzleNV ) == sizeof( VkViewportSwizzleNV ), "struct and wrapper have different size!" ); + + struct PipelineViewportSwizzleStateCreateInfoNV + { + PipelineViewportSwizzleStateCreateInfoNV( PipelineViewportSwizzleStateCreateFlagsNV flags_ = PipelineViewportSwizzleStateCreateFlagsNV(), + uint32_t viewportCount_ = 0, + const ViewportSwizzleNV* pViewportSwizzles_ = nullptr ) + : flags( flags_ ) + , viewportCount( viewportCount_ ) + , pViewportSwizzles( pViewportSwizzles_ ) + { + } + + PipelineViewportSwizzleStateCreateInfoNV( VkPipelineViewportSwizzleStateCreateInfoNV const & rhs ) + { + memcpy( this, &rhs, sizeof( PipelineViewportSwizzleStateCreateInfoNV ) ); + } + + PipelineViewportSwizzleStateCreateInfoNV& operator=( VkPipelineViewportSwizzleStateCreateInfoNV const & rhs ) + { + memcpy( this, &rhs, sizeof( PipelineViewportSwizzleStateCreateInfoNV ) ); + return *this; + } + PipelineViewportSwizzleStateCreateInfoNV& setPNext( const void* pNext_ ) + { + pNext = pNext_; + return *this; + } + + PipelineViewportSwizzleStateCreateInfoNV& setFlags( PipelineViewportSwizzleStateCreateFlagsNV flags_ ) + { + flags = flags_; + return *this; + } + + PipelineViewportSwizzleStateCreateInfoNV& setViewportCount( uint32_t viewportCount_ ) + { + viewportCount = viewportCount_; + return *this; + } + + PipelineViewportSwizzleStateCreateInfoNV& setPViewportSwizzles( const ViewportSwizzleNV* pViewportSwizzles_ ) + { + pViewportSwizzles = pViewportSwizzles_; + return *this; + } + + operator VkPipelineViewportSwizzleStateCreateInfoNV const&() const + { + return *reinterpret_cast(this); + } + + operator VkPipelineViewportSwizzleStateCreateInfoNV &() + { + return *reinterpret_cast(this); + } + + bool operator==( PipelineViewportSwizzleStateCreateInfoNV const& rhs ) const + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( flags == rhs.flags ) + && ( viewportCount == rhs.viewportCount ) + && ( pViewportSwizzles == rhs.pViewportSwizzles ); + } + + bool operator!=( PipelineViewportSwizzleStateCreateInfoNV const& rhs ) const + { + return !operator==( rhs ); + } + + private: + StructureType sType = StructureType::ePipelineViewportSwizzleStateCreateInfoNV; + + public: + const void* pNext = nullptr; + PipelineViewportSwizzleStateCreateFlagsNV flags; + uint32_t viewportCount; + const ViewportSwizzleNV* pViewportSwizzles; + }; + static_assert( sizeof( PipelineViewportSwizzleStateCreateInfoNV ) == sizeof( VkPipelineViewportSwizzleStateCreateInfoNV ), "struct and wrapper have different size!" ); + + enum class DiscardRectangleModeEXT + { + eInclusive = VK_DISCARD_RECTANGLE_MODE_INCLUSIVE_EXT, + eExclusive = VK_DISCARD_RECTANGLE_MODE_EXCLUSIVE_EXT + }; + + struct PipelineDiscardRectangleStateCreateInfoEXT + { + PipelineDiscardRectangleStateCreateInfoEXT( PipelineDiscardRectangleStateCreateFlagsEXT flags_ = PipelineDiscardRectangleStateCreateFlagsEXT(), + DiscardRectangleModeEXT discardRectangleMode_ = DiscardRectangleModeEXT::eInclusive, + uint32_t discardRectangleCount_ = 0, + const Rect2D* pDiscardRectangles_ = nullptr ) + : flags( flags_ ) + , discardRectangleMode( discardRectangleMode_ ) + , discardRectangleCount( discardRectangleCount_ ) + , pDiscardRectangles( pDiscardRectangles_ ) + { + } + + PipelineDiscardRectangleStateCreateInfoEXT( VkPipelineDiscardRectangleStateCreateInfoEXT const & rhs ) + { + memcpy( this, &rhs, sizeof( PipelineDiscardRectangleStateCreateInfoEXT ) ); + } + + PipelineDiscardRectangleStateCreateInfoEXT& operator=( VkPipelineDiscardRectangleStateCreateInfoEXT const & rhs ) + { + memcpy( this, &rhs, sizeof( PipelineDiscardRectangleStateCreateInfoEXT ) ); + return *this; + } + PipelineDiscardRectangleStateCreateInfoEXT& setPNext( const void* pNext_ ) + { + pNext = pNext_; + return *this; + } + + PipelineDiscardRectangleStateCreateInfoEXT& setFlags( PipelineDiscardRectangleStateCreateFlagsEXT flags_ ) + { + flags = flags_; + return *this; + } + + PipelineDiscardRectangleStateCreateInfoEXT& setDiscardRectangleMode( DiscardRectangleModeEXT discardRectangleMode_ ) + { + discardRectangleMode = discardRectangleMode_; + return *this; + } + + PipelineDiscardRectangleStateCreateInfoEXT& setDiscardRectangleCount( uint32_t discardRectangleCount_ ) + { + discardRectangleCount = discardRectangleCount_; + return *this; + } + + PipelineDiscardRectangleStateCreateInfoEXT& setPDiscardRectangles( const Rect2D* pDiscardRectangles_ ) + { + pDiscardRectangles = pDiscardRectangles_; + return *this; + } + + operator VkPipelineDiscardRectangleStateCreateInfoEXT const&() const + { + return *reinterpret_cast(this); + } + + operator VkPipelineDiscardRectangleStateCreateInfoEXT &() + { + return *reinterpret_cast(this); + } + + bool operator==( PipelineDiscardRectangleStateCreateInfoEXT const& rhs ) const + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( flags == rhs.flags ) + && ( discardRectangleMode == rhs.discardRectangleMode ) + && ( discardRectangleCount == rhs.discardRectangleCount ) + && ( pDiscardRectangles == rhs.pDiscardRectangles ); + } + + bool operator!=( PipelineDiscardRectangleStateCreateInfoEXT const& rhs ) const + { + return !operator==( rhs ); + } + + private: + StructureType sType = StructureType::ePipelineDiscardRectangleStateCreateInfoEXT; + + public: + const void* pNext = nullptr; + PipelineDiscardRectangleStateCreateFlagsEXT flags; + DiscardRectangleModeEXT discardRectangleMode; + uint32_t discardRectangleCount; + const Rect2D* pDiscardRectangles; + }; + static_assert( sizeof( PipelineDiscardRectangleStateCreateInfoEXT ) == sizeof( VkPipelineDiscardRectangleStateCreateInfoEXT ), "struct and wrapper have different size!" ); + + enum class SubpassDescriptionFlagBits + { + ePerViewAttributesNVX = VK_SUBPASS_DESCRIPTION_PER_VIEW_ATTRIBUTES_BIT_NVX, + ePerViewPositionXOnlyNVX = VK_SUBPASS_DESCRIPTION_PER_VIEW_POSITION_X_ONLY_BIT_NVX + }; + + using SubpassDescriptionFlags = Flags; + + VULKAN_HPP_INLINE SubpassDescriptionFlags operator|( SubpassDescriptionFlagBits bit0, SubpassDescriptionFlagBits bit1 ) + { + return SubpassDescriptionFlags( bit0 ) | bit1; + } + + VULKAN_HPP_INLINE SubpassDescriptionFlags operator~( SubpassDescriptionFlagBits bits ) + { + return ~( SubpassDescriptionFlags( bits ) ); + } + + template <> struct FlagTraits + { + enum + { + allFlags = VkFlags(SubpassDescriptionFlagBits::ePerViewAttributesNVX) | VkFlags(SubpassDescriptionFlagBits::ePerViewPositionXOnlyNVX) + }; + }; + + struct SubpassDescription + { + SubpassDescription( SubpassDescriptionFlags flags_ = SubpassDescriptionFlags(), + PipelineBindPoint pipelineBindPoint_ = PipelineBindPoint::eGraphics, + uint32_t inputAttachmentCount_ = 0, + const AttachmentReference* pInputAttachments_ = nullptr, + uint32_t colorAttachmentCount_ = 0, + const AttachmentReference* pColorAttachments_ = nullptr, + const AttachmentReference* pResolveAttachments_ = nullptr, + const AttachmentReference* pDepthStencilAttachment_ = nullptr, + uint32_t preserveAttachmentCount_ = 0, + const uint32_t* pPreserveAttachments_ = nullptr ) + : flags( flags_ ) + , pipelineBindPoint( pipelineBindPoint_ ) + , inputAttachmentCount( inputAttachmentCount_ ) + , pInputAttachments( pInputAttachments_ ) + , colorAttachmentCount( colorAttachmentCount_ ) + , pColorAttachments( pColorAttachments_ ) + , pResolveAttachments( pResolveAttachments_ ) + , pDepthStencilAttachment( pDepthStencilAttachment_ ) + , preserveAttachmentCount( preserveAttachmentCount_ ) + , pPreserveAttachments( pPreserveAttachments_ ) + { + } + + SubpassDescription( VkSubpassDescription const & rhs ) + { + memcpy( this, &rhs, sizeof( SubpassDescription ) ); + } + + SubpassDescription& operator=( VkSubpassDescription const & rhs ) + { + memcpy( this, &rhs, sizeof( SubpassDescription ) ); + return *this; + } + SubpassDescription& setFlags( SubpassDescriptionFlags flags_ ) + { + flags = flags_; + return *this; + } + + SubpassDescription& setPipelineBindPoint( PipelineBindPoint pipelineBindPoint_ ) + { + pipelineBindPoint = pipelineBindPoint_; + return *this; + } + + SubpassDescription& setInputAttachmentCount( uint32_t inputAttachmentCount_ ) + { + inputAttachmentCount = inputAttachmentCount_; + return *this; + } + + SubpassDescription& setPInputAttachments( const AttachmentReference* pInputAttachments_ ) + { + pInputAttachments = pInputAttachments_; + return *this; + } + + SubpassDescription& setColorAttachmentCount( uint32_t colorAttachmentCount_ ) + { + colorAttachmentCount = colorAttachmentCount_; + return *this; + } + + SubpassDescription& setPColorAttachments( const AttachmentReference* pColorAttachments_ ) + { + pColorAttachments = pColorAttachments_; + return *this; + } + + SubpassDescription& setPResolveAttachments( const AttachmentReference* pResolveAttachments_ ) + { + pResolveAttachments = pResolveAttachments_; + return *this; + } + + SubpassDescription& setPDepthStencilAttachment( const AttachmentReference* pDepthStencilAttachment_ ) + { + pDepthStencilAttachment = pDepthStencilAttachment_; + return *this; + } + + SubpassDescription& setPreserveAttachmentCount( uint32_t preserveAttachmentCount_ ) + { + preserveAttachmentCount = preserveAttachmentCount_; + return *this; + } + + SubpassDescription& setPPreserveAttachments( const uint32_t* pPreserveAttachments_ ) + { + pPreserveAttachments = pPreserveAttachments_; + return *this; + } + + operator VkSubpassDescription const&() const + { + return *reinterpret_cast(this); + } + + operator VkSubpassDescription &() + { + return *reinterpret_cast(this); + } + + bool operator==( SubpassDescription const& rhs ) const + { + return ( flags == rhs.flags ) + && ( pipelineBindPoint == rhs.pipelineBindPoint ) + && ( inputAttachmentCount == rhs.inputAttachmentCount ) + && ( pInputAttachments == rhs.pInputAttachments ) + && ( colorAttachmentCount == rhs.colorAttachmentCount ) + && ( pColorAttachments == rhs.pColorAttachments ) + && ( pResolveAttachments == rhs.pResolveAttachments ) + && ( pDepthStencilAttachment == rhs.pDepthStencilAttachment ) + && ( preserveAttachmentCount == rhs.preserveAttachmentCount ) + && ( pPreserveAttachments == rhs.pPreserveAttachments ); + } + + bool operator!=( SubpassDescription const& rhs ) const + { + return !operator==( rhs ); + } + + SubpassDescriptionFlags flags; + PipelineBindPoint pipelineBindPoint; + uint32_t inputAttachmentCount; + const AttachmentReference* pInputAttachments; + uint32_t colorAttachmentCount; + const AttachmentReference* pColorAttachments; + const AttachmentReference* pResolveAttachments; + const AttachmentReference* pDepthStencilAttachment; + uint32_t preserveAttachmentCount; + const uint32_t* pPreserveAttachments; + }; + static_assert( sizeof( SubpassDescription ) == sizeof( VkSubpassDescription ), "struct and wrapper have different size!" ); + + struct RenderPassCreateInfo + { + RenderPassCreateInfo( RenderPassCreateFlags flags_ = RenderPassCreateFlags(), + uint32_t attachmentCount_ = 0, + const AttachmentDescription* pAttachments_ = nullptr, + uint32_t subpassCount_ = 0, + const SubpassDescription* pSubpasses_ = nullptr, + uint32_t dependencyCount_ = 0, + const SubpassDependency* pDependencies_ = nullptr ) + : flags( flags_ ) + , attachmentCount( attachmentCount_ ) + , pAttachments( pAttachments_ ) + , subpassCount( subpassCount_ ) + , pSubpasses( pSubpasses_ ) + , dependencyCount( dependencyCount_ ) + , pDependencies( pDependencies_ ) + { + } + + RenderPassCreateInfo( VkRenderPassCreateInfo const & rhs ) + { + memcpy( this, &rhs, sizeof( RenderPassCreateInfo ) ); + } + + RenderPassCreateInfo& operator=( VkRenderPassCreateInfo const & rhs ) + { + memcpy( this, &rhs, sizeof( RenderPassCreateInfo ) ); + return *this; + } + RenderPassCreateInfo& setPNext( const void* pNext_ ) + { + pNext = pNext_; + return *this; + } + + RenderPassCreateInfo& setFlags( RenderPassCreateFlags flags_ ) + { + flags = flags_; + return *this; + } + + RenderPassCreateInfo& setAttachmentCount( uint32_t attachmentCount_ ) + { + attachmentCount = attachmentCount_; + return *this; + } + + RenderPassCreateInfo& setPAttachments( const AttachmentDescription* pAttachments_ ) + { + pAttachments = pAttachments_; + return *this; + } + + RenderPassCreateInfo& setSubpassCount( uint32_t subpassCount_ ) + { + subpassCount = subpassCount_; + return *this; + } + + RenderPassCreateInfo& setPSubpasses( const SubpassDescription* pSubpasses_ ) + { + pSubpasses = pSubpasses_; + return *this; + } + + RenderPassCreateInfo& setDependencyCount( uint32_t dependencyCount_ ) + { + dependencyCount = dependencyCount_; + return *this; + } + + RenderPassCreateInfo& setPDependencies( const SubpassDependency* pDependencies_ ) + { + pDependencies = pDependencies_; + return *this; + } + + operator VkRenderPassCreateInfo const&() const + { + return *reinterpret_cast(this); + } + + operator VkRenderPassCreateInfo &() + { + return *reinterpret_cast(this); + } + + bool operator==( RenderPassCreateInfo const& rhs ) const + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( flags == rhs.flags ) + && ( attachmentCount == rhs.attachmentCount ) + && ( pAttachments == rhs.pAttachments ) + && ( subpassCount == rhs.subpassCount ) + && ( pSubpasses == rhs.pSubpasses ) + && ( dependencyCount == rhs.dependencyCount ) + && ( pDependencies == rhs.pDependencies ); + } + + bool operator!=( RenderPassCreateInfo const& rhs ) const + { + return !operator==( rhs ); + } + + private: + StructureType sType = StructureType::eRenderPassCreateInfo; + + public: + const void* pNext = nullptr; + RenderPassCreateFlags flags; + uint32_t attachmentCount; + const AttachmentDescription* pAttachments; + uint32_t subpassCount; + const SubpassDescription* pSubpasses; + uint32_t dependencyCount; + const SubpassDependency* pDependencies; + }; + static_assert( sizeof( RenderPassCreateInfo ) == sizeof( VkRenderPassCreateInfo ), "struct and wrapper have different size!" ); + + struct SubpassDescription2KHR + { + SubpassDescription2KHR( SubpassDescriptionFlags flags_ = SubpassDescriptionFlags(), + PipelineBindPoint pipelineBindPoint_ = PipelineBindPoint::eGraphics, + uint32_t viewMask_ = 0, + uint32_t inputAttachmentCount_ = 0, + const AttachmentReference2KHR* pInputAttachments_ = nullptr, + uint32_t colorAttachmentCount_ = 0, + const AttachmentReference2KHR* pColorAttachments_ = nullptr, + const AttachmentReference2KHR* pResolveAttachments_ = nullptr, + const AttachmentReference2KHR* pDepthStencilAttachment_ = nullptr, + uint32_t preserveAttachmentCount_ = 0, + const uint32_t* pPreserveAttachments_ = nullptr ) + : flags( flags_ ) + , pipelineBindPoint( pipelineBindPoint_ ) + , viewMask( viewMask_ ) + , inputAttachmentCount( inputAttachmentCount_ ) + , pInputAttachments( pInputAttachments_ ) + , colorAttachmentCount( colorAttachmentCount_ ) + , pColorAttachments( pColorAttachments_ ) + , pResolveAttachments( pResolveAttachments_ ) + , pDepthStencilAttachment( pDepthStencilAttachment_ ) + , preserveAttachmentCount( preserveAttachmentCount_ ) + , pPreserveAttachments( pPreserveAttachments_ ) + { + } + + SubpassDescription2KHR( VkSubpassDescription2KHR const & rhs ) + { + memcpy( this, &rhs, sizeof( SubpassDescription2KHR ) ); + } + + SubpassDescription2KHR& operator=( VkSubpassDescription2KHR const & rhs ) + { + memcpy( this, &rhs, sizeof( SubpassDescription2KHR ) ); + return *this; + } + SubpassDescription2KHR& setPNext( const void* pNext_ ) + { + pNext = pNext_; + return *this; + } + + SubpassDescription2KHR& setFlags( SubpassDescriptionFlags flags_ ) + { + flags = flags_; + return *this; + } + + SubpassDescription2KHR& setPipelineBindPoint( PipelineBindPoint pipelineBindPoint_ ) + { + pipelineBindPoint = pipelineBindPoint_; + return *this; + } + + SubpassDescription2KHR& setViewMask( uint32_t viewMask_ ) + { + viewMask = viewMask_; + return *this; + } + + SubpassDescription2KHR& setInputAttachmentCount( uint32_t inputAttachmentCount_ ) + { + inputAttachmentCount = inputAttachmentCount_; + return *this; + } + + SubpassDescription2KHR& setPInputAttachments( const AttachmentReference2KHR* pInputAttachments_ ) + { + pInputAttachments = pInputAttachments_; + return *this; + } + + SubpassDescription2KHR& setColorAttachmentCount( uint32_t colorAttachmentCount_ ) + { + colorAttachmentCount = colorAttachmentCount_; + return *this; + } + + SubpassDescription2KHR& setPColorAttachments( const AttachmentReference2KHR* pColorAttachments_ ) + { + pColorAttachments = pColorAttachments_; + return *this; + } + + SubpassDescription2KHR& setPResolveAttachments( const AttachmentReference2KHR* pResolveAttachments_ ) + { + pResolveAttachments = pResolveAttachments_; + return *this; + } + + SubpassDescription2KHR& setPDepthStencilAttachment( const AttachmentReference2KHR* pDepthStencilAttachment_ ) + { + pDepthStencilAttachment = pDepthStencilAttachment_; + return *this; + } + + SubpassDescription2KHR& setPreserveAttachmentCount( uint32_t preserveAttachmentCount_ ) + { + preserveAttachmentCount = preserveAttachmentCount_; + return *this; + } + + SubpassDescription2KHR& setPPreserveAttachments( const uint32_t* pPreserveAttachments_ ) + { + pPreserveAttachments = pPreserveAttachments_; + return *this; + } + + operator VkSubpassDescription2KHR const&() const + { + return *reinterpret_cast(this); + } + + operator VkSubpassDescription2KHR &() + { + return *reinterpret_cast(this); + } + + bool operator==( SubpassDescription2KHR const& rhs ) const + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( flags == rhs.flags ) + && ( pipelineBindPoint == rhs.pipelineBindPoint ) + && ( viewMask == rhs.viewMask ) + && ( inputAttachmentCount == rhs.inputAttachmentCount ) + && ( pInputAttachments == rhs.pInputAttachments ) + && ( colorAttachmentCount == rhs.colorAttachmentCount ) + && ( pColorAttachments == rhs.pColorAttachments ) + && ( pResolveAttachments == rhs.pResolveAttachments ) + && ( pDepthStencilAttachment == rhs.pDepthStencilAttachment ) + && ( preserveAttachmentCount == rhs.preserveAttachmentCount ) + && ( pPreserveAttachments == rhs.pPreserveAttachments ); + } + + bool operator!=( SubpassDescription2KHR const& rhs ) const + { + return !operator==( rhs ); + } + + private: + StructureType sType = StructureType::eSubpassDescription2KHR; + + public: + const void* pNext = nullptr; + SubpassDescriptionFlags flags; + PipelineBindPoint pipelineBindPoint; + uint32_t viewMask; + uint32_t inputAttachmentCount; + const AttachmentReference2KHR* pInputAttachments; + uint32_t colorAttachmentCount; + const AttachmentReference2KHR* pColorAttachments; + const AttachmentReference2KHR* pResolveAttachments; + const AttachmentReference2KHR* pDepthStencilAttachment; + uint32_t preserveAttachmentCount; + const uint32_t* pPreserveAttachments; + }; + static_assert( sizeof( SubpassDescription2KHR ) == sizeof( VkSubpassDescription2KHR ), "struct and wrapper have different size!" ); + + struct RenderPassCreateInfo2KHR + { + RenderPassCreateInfo2KHR( RenderPassCreateFlags flags_ = RenderPassCreateFlags(), + uint32_t attachmentCount_ = 0, + const AttachmentDescription2KHR* pAttachments_ = nullptr, + uint32_t subpassCount_ = 0, + const SubpassDescription2KHR* pSubpasses_ = nullptr, + uint32_t dependencyCount_ = 0, + const SubpassDependency2KHR* pDependencies_ = nullptr, + uint32_t correlatedViewMaskCount_ = 0, + const uint32_t* pCorrelatedViewMasks_ = nullptr ) + : flags( flags_ ) + , attachmentCount( attachmentCount_ ) + , pAttachments( pAttachments_ ) + , subpassCount( subpassCount_ ) + , pSubpasses( pSubpasses_ ) + , dependencyCount( dependencyCount_ ) + , pDependencies( pDependencies_ ) + , correlatedViewMaskCount( correlatedViewMaskCount_ ) + , pCorrelatedViewMasks( pCorrelatedViewMasks_ ) + { + } + + RenderPassCreateInfo2KHR( VkRenderPassCreateInfo2KHR const & rhs ) + { + memcpy( this, &rhs, sizeof( RenderPassCreateInfo2KHR ) ); + } + + RenderPassCreateInfo2KHR& operator=( VkRenderPassCreateInfo2KHR const & rhs ) + { + memcpy( this, &rhs, sizeof( RenderPassCreateInfo2KHR ) ); + return *this; + } + RenderPassCreateInfo2KHR& setPNext( const void* pNext_ ) + { + pNext = pNext_; + return *this; + } + + RenderPassCreateInfo2KHR& setFlags( RenderPassCreateFlags flags_ ) + { + flags = flags_; + return *this; + } + + RenderPassCreateInfo2KHR& setAttachmentCount( uint32_t attachmentCount_ ) + { + attachmentCount = attachmentCount_; + return *this; + } + + RenderPassCreateInfo2KHR& setPAttachments( const AttachmentDescription2KHR* pAttachments_ ) + { + pAttachments = pAttachments_; + return *this; + } + + RenderPassCreateInfo2KHR& setSubpassCount( uint32_t subpassCount_ ) + { + subpassCount = subpassCount_; + return *this; + } + + RenderPassCreateInfo2KHR& setPSubpasses( const SubpassDescription2KHR* pSubpasses_ ) + { + pSubpasses = pSubpasses_; + return *this; + } + + RenderPassCreateInfo2KHR& setDependencyCount( uint32_t dependencyCount_ ) + { + dependencyCount = dependencyCount_; + return *this; + } + + RenderPassCreateInfo2KHR& setPDependencies( const SubpassDependency2KHR* pDependencies_ ) + { + pDependencies = pDependencies_; + return *this; + } + + RenderPassCreateInfo2KHR& setCorrelatedViewMaskCount( uint32_t correlatedViewMaskCount_ ) + { + correlatedViewMaskCount = correlatedViewMaskCount_; + return *this; + } + + RenderPassCreateInfo2KHR& setPCorrelatedViewMasks( const uint32_t* pCorrelatedViewMasks_ ) + { + pCorrelatedViewMasks = pCorrelatedViewMasks_; + return *this; + } + + operator VkRenderPassCreateInfo2KHR const&() const + { + return *reinterpret_cast(this); + } + + operator VkRenderPassCreateInfo2KHR &() + { + return *reinterpret_cast(this); + } + + bool operator==( RenderPassCreateInfo2KHR const& rhs ) const + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( flags == rhs.flags ) + && ( attachmentCount == rhs.attachmentCount ) + && ( pAttachments == rhs.pAttachments ) + && ( subpassCount == rhs.subpassCount ) + && ( pSubpasses == rhs.pSubpasses ) + && ( dependencyCount == rhs.dependencyCount ) + && ( pDependencies == rhs.pDependencies ) + && ( correlatedViewMaskCount == rhs.correlatedViewMaskCount ) + && ( pCorrelatedViewMasks == rhs.pCorrelatedViewMasks ); + } + + bool operator!=( RenderPassCreateInfo2KHR const& rhs ) const + { + return !operator==( rhs ); + } + + private: + StructureType sType = StructureType::eRenderPassCreateInfo2KHR; + + public: + const void* pNext = nullptr; + RenderPassCreateFlags flags; + uint32_t attachmentCount; + const AttachmentDescription2KHR* pAttachments; + uint32_t subpassCount; + const SubpassDescription2KHR* pSubpasses; + uint32_t dependencyCount; + const SubpassDependency2KHR* pDependencies; + uint32_t correlatedViewMaskCount; + const uint32_t* pCorrelatedViewMasks; + }; + static_assert( sizeof( RenderPassCreateInfo2KHR ) == sizeof( VkRenderPassCreateInfo2KHR ), "struct and wrapper have different size!" ); + + enum class PointClippingBehavior + { + eAllClipPlanes = VK_POINT_CLIPPING_BEHAVIOR_ALL_CLIP_PLANES, + eAllClipPlanesKHR = VK_POINT_CLIPPING_BEHAVIOR_ALL_CLIP_PLANES, + eUserClipPlanesOnly = VK_POINT_CLIPPING_BEHAVIOR_USER_CLIP_PLANES_ONLY, + eUserClipPlanesOnlyKHR = VK_POINT_CLIPPING_BEHAVIOR_USER_CLIP_PLANES_ONLY + }; + + struct PhysicalDevicePointClippingProperties + { + operator VkPhysicalDevicePointClippingProperties const&() const + { + return *reinterpret_cast(this); + } + + operator VkPhysicalDevicePointClippingProperties &() + { + return *reinterpret_cast(this); + } + + bool operator==( PhysicalDevicePointClippingProperties const& rhs ) const + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( pointClippingBehavior == rhs.pointClippingBehavior ); + } + + bool operator!=( PhysicalDevicePointClippingProperties const& rhs ) const + { + return !operator==( rhs ); + } + + private: + StructureType sType = StructureType::ePhysicalDevicePointClippingProperties; + + public: + void* pNext = nullptr; + PointClippingBehavior pointClippingBehavior; + }; + static_assert( sizeof( PhysicalDevicePointClippingProperties ) == sizeof( VkPhysicalDevicePointClippingProperties ), "struct and wrapper have different size!" ); + + using PhysicalDevicePointClippingPropertiesKHR = PhysicalDevicePointClippingProperties; + + enum class SamplerReductionModeEXT + { + eWeightedAverage = VK_SAMPLER_REDUCTION_MODE_WEIGHTED_AVERAGE_EXT, + eMin = VK_SAMPLER_REDUCTION_MODE_MIN_EXT, + eMax = VK_SAMPLER_REDUCTION_MODE_MAX_EXT + }; + + struct SamplerReductionModeCreateInfoEXT + { + SamplerReductionModeCreateInfoEXT( SamplerReductionModeEXT reductionMode_ = SamplerReductionModeEXT::eWeightedAverage ) + : reductionMode( reductionMode_ ) + { + } + + SamplerReductionModeCreateInfoEXT( VkSamplerReductionModeCreateInfoEXT const & rhs ) + { + memcpy( this, &rhs, sizeof( SamplerReductionModeCreateInfoEXT ) ); + } + + SamplerReductionModeCreateInfoEXT& operator=( VkSamplerReductionModeCreateInfoEXT const & rhs ) + { + memcpy( this, &rhs, sizeof( SamplerReductionModeCreateInfoEXT ) ); + return *this; + } + SamplerReductionModeCreateInfoEXT& setPNext( const void* pNext_ ) + { + pNext = pNext_; + return *this; + } + + SamplerReductionModeCreateInfoEXT& setReductionMode( SamplerReductionModeEXT reductionMode_ ) + { + reductionMode = reductionMode_; + return *this; + } + + operator VkSamplerReductionModeCreateInfoEXT const&() const + { + return *reinterpret_cast(this); + } + + operator VkSamplerReductionModeCreateInfoEXT &() + { + return *reinterpret_cast(this); + } + + bool operator==( SamplerReductionModeCreateInfoEXT const& rhs ) const + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( reductionMode == rhs.reductionMode ); + } + + bool operator!=( SamplerReductionModeCreateInfoEXT const& rhs ) const + { + return !operator==( rhs ); + } + + private: + StructureType sType = StructureType::eSamplerReductionModeCreateInfoEXT; + + public: + const void* pNext = nullptr; + SamplerReductionModeEXT reductionMode; + }; + static_assert( sizeof( SamplerReductionModeCreateInfoEXT ) == sizeof( VkSamplerReductionModeCreateInfoEXT ), "struct and wrapper have different size!" ); + + enum class TessellationDomainOrigin + { + eUpperLeft = VK_TESSELLATION_DOMAIN_ORIGIN_UPPER_LEFT, + eUpperLeftKHR = VK_TESSELLATION_DOMAIN_ORIGIN_UPPER_LEFT, + eLowerLeft = VK_TESSELLATION_DOMAIN_ORIGIN_LOWER_LEFT, + eLowerLeftKHR = VK_TESSELLATION_DOMAIN_ORIGIN_LOWER_LEFT + }; + + struct PipelineTessellationDomainOriginStateCreateInfo + { + PipelineTessellationDomainOriginStateCreateInfo( TessellationDomainOrigin domainOrigin_ = TessellationDomainOrigin::eUpperLeft ) + : domainOrigin( domainOrigin_ ) + { + } + + PipelineTessellationDomainOriginStateCreateInfo( VkPipelineTessellationDomainOriginStateCreateInfo const & rhs ) + { + memcpy( this, &rhs, sizeof( PipelineTessellationDomainOriginStateCreateInfo ) ); + } + + PipelineTessellationDomainOriginStateCreateInfo& operator=( VkPipelineTessellationDomainOriginStateCreateInfo const & rhs ) + { + memcpy( this, &rhs, sizeof( PipelineTessellationDomainOriginStateCreateInfo ) ); + return *this; + } + PipelineTessellationDomainOriginStateCreateInfo& setPNext( const void* pNext_ ) + { + pNext = pNext_; + return *this; + } + + PipelineTessellationDomainOriginStateCreateInfo& setDomainOrigin( TessellationDomainOrigin domainOrigin_ ) + { + domainOrigin = domainOrigin_; + return *this; + } + + operator VkPipelineTessellationDomainOriginStateCreateInfo const&() const + { + return *reinterpret_cast(this); + } + + operator VkPipelineTessellationDomainOriginStateCreateInfo &() + { + return *reinterpret_cast(this); + } + + bool operator==( PipelineTessellationDomainOriginStateCreateInfo const& rhs ) const + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( domainOrigin == rhs.domainOrigin ); + } + + bool operator!=( PipelineTessellationDomainOriginStateCreateInfo const& rhs ) const + { + return !operator==( rhs ); + } + + private: + StructureType sType = StructureType::ePipelineTessellationDomainOriginStateCreateInfo; + + public: + const void* pNext = nullptr; + TessellationDomainOrigin domainOrigin; + }; + static_assert( sizeof( PipelineTessellationDomainOriginStateCreateInfo ) == sizeof( VkPipelineTessellationDomainOriginStateCreateInfo ), "struct and wrapper have different size!" ); + + using PipelineTessellationDomainOriginStateCreateInfoKHR = PipelineTessellationDomainOriginStateCreateInfo; + + enum class SamplerYcbcrModelConversion + { + eRgbIdentity = VK_SAMPLER_YCBCR_MODEL_CONVERSION_RGB_IDENTITY, + eRgbIdentityKHR = VK_SAMPLER_YCBCR_MODEL_CONVERSION_RGB_IDENTITY, + eYcbcrIdentity = VK_SAMPLER_YCBCR_MODEL_CONVERSION_YCBCR_IDENTITY, + eYcbcrIdentityKHR = VK_SAMPLER_YCBCR_MODEL_CONVERSION_YCBCR_IDENTITY, + eYcbcr709 = VK_SAMPLER_YCBCR_MODEL_CONVERSION_YCBCR_709, + eYcbcr709KHR = VK_SAMPLER_YCBCR_MODEL_CONVERSION_YCBCR_709, + eYcbcr601 = VK_SAMPLER_YCBCR_MODEL_CONVERSION_YCBCR_601, + eYcbcr601KHR = VK_SAMPLER_YCBCR_MODEL_CONVERSION_YCBCR_601, + eYcbcr2020 = VK_SAMPLER_YCBCR_MODEL_CONVERSION_YCBCR_2020, + eYcbcr2020KHR = VK_SAMPLER_YCBCR_MODEL_CONVERSION_YCBCR_2020 + }; + + enum class SamplerYcbcrRange + { + eItuFull = VK_SAMPLER_YCBCR_RANGE_ITU_FULL, + eItuFullKHR = VK_SAMPLER_YCBCR_RANGE_ITU_FULL, + eItuNarrow = VK_SAMPLER_YCBCR_RANGE_ITU_NARROW, + eItuNarrowKHR = VK_SAMPLER_YCBCR_RANGE_ITU_NARROW + }; + + enum class ChromaLocation + { + eCositedEven = VK_CHROMA_LOCATION_COSITED_EVEN, + eCositedEvenKHR = VK_CHROMA_LOCATION_COSITED_EVEN, + eMidpoint = VK_CHROMA_LOCATION_MIDPOINT, + eMidpointKHR = VK_CHROMA_LOCATION_MIDPOINT + }; + + struct SamplerYcbcrConversionCreateInfo + { + SamplerYcbcrConversionCreateInfo( Format format_ = Format::eUndefined, + SamplerYcbcrModelConversion ycbcrModel_ = SamplerYcbcrModelConversion::eRgbIdentity, + SamplerYcbcrRange ycbcrRange_ = SamplerYcbcrRange::eItuFull, + ComponentMapping components_ = ComponentMapping(), + ChromaLocation xChromaOffset_ = ChromaLocation::eCositedEven, + ChromaLocation yChromaOffset_ = ChromaLocation::eCositedEven, + Filter chromaFilter_ = Filter::eNearest, + Bool32 forceExplicitReconstruction_ = 0 ) + : format( format_ ) + , ycbcrModel( ycbcrModel_ ) + , ycbcrRange( ycbcrRange_ ) + , components( components_ ) + , xChromaOffset( xChromaOffset_ ) + , yChromaOffset( yChromaOffset_ ) + , chromaFilter( chromaFilter_ ) + , forceExplicitReconstruction( forceExplicitReconstruction_ ) + { + } + + SamplerYcbcrConversionCreateInfo( VkSamplerYcbcrConversionCreateInfo const & rhs ) + { + memcpy( this, &rhs, sizeof( SamplerYcbcrConversionCreateInfo ) ); + } + + SamplerYcbcrConversionCreateInfo& operator=( VkSamplerYcbcrConversionCreateInfo const & rhs ) + { + memcpy( this, &rhs, sizeof( SamplerYcbcrConversionCreateInfo ) ); + return *this; + } + SamplerYcbcrConversionCreateInfo& setPNext( const void* pNext_ ) + { + pNext = pNext_; + return *this; + } + + SamplerYcbcrConversionCreateInfo& setFormat( Format format_ ) + { + format = format_; + return *this; + } + + SamplerYcbcrConversionCreateInfo& setYcbcrModel( SamplerYcbcrModelConversion ycbcrModel_ ) + { + ycbcrModel = ycbcrModel_; + return *this; + } + + SamplerYcbcrConversionCreateInfo& setYcbcrRange( SamplerYcbcrRange ycbcrRange_ ) + { + ycbcrRange = ycbcrRange_; + return *this; + } + + SamplerYcbcrConversionCreateInfo& setComponents( ComponentMapping components_ ) + { + components = components_; + return *this; + } + + SamplerYcbcrConversionCreateInfo& setXChromaOffset( ChromaLocation xChromaOffset_ ) + { + xChromaOffset = xChromaOffset_; + return *this; + } + + SamplerYcbcrConversionCreateInfo& setYChromaOffset( ChromaLocation yChromaOffset_ ) + { + yChromaOffset = yChromaOffset_; + return *this; + } + + SamplerYcbcrConversionCreateInfo& setChromaFilter( Filter chromaFilter_ ) + { + chromaFilter = chromaFilter_; + return *this; + } + + SamplerYcbcrConversionCreateInfo& setForceExplicitReconstruction( Bool32 forceExplicitReconstruction_ ) + { + forceExplicitReconstruction = forceExplicitReconstruction_; + return *this; + } + + operator VkSamplerYcbcrConversionCreateInfo const&() const + { + return *reinterpret_cast(this); + } + + operator VkSamplerYcbcrConversionCreateInfo &() + { + return *reinterpret_cast(this); + } + + bool operator==( SamplerYcbcrConversionCreateInfo const& rhs ) const + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( format == rhs.format ) + && ( ycbcrModel == rhs.ycbcrModel ) + && ( ycbcrRange == rhs.ycbcrRange ) + && ( components == rhs.components ) + && ( xChromaOffset == rhs.xChromaOffset ) + && ( yChromaOffset == rhs.yChromaOffset ) + && ( chromaFilter == rhs.chromaFilter ) + && ( forceExplicitReconstruction == rhs.forceExplicitReconstruction ); + } + + bool operator!=( SamplerYcbcrConversionCreateInfo const& rhs ) const + { + return !operator==( rhs ); + } + + private: + StructureType sType = StructureType::eSamplerYcbcrConversionCreateInfo; + + public: + const void* pNext = nullptr; + Format format; + SamplerYcbcrModelConversion ycbcrModel; + SamplerYcbcrRange ycbcrRange; + ComponentMapping components; + ChromaLocation xChromaOffset; + ChromaLocation yChromaOffset; + Filter chromaFilter; + Bool32 forceExplicitReconstruction; + }; + static_assert( sizeof( SamplerYcbcrConversionCreateInfo ) == sizeof( VkSamplerYcbcrConversionCreateInfo ), "struct and wrapper have different size!" ); + + using SamplerYcbcrConversionCreateInfoKHR = SamplerYcbcrConversionCreateInfo; + +#ifdef VK_USE_PLATFORM_ANDROID_ANDROID + struct AndroidHardwareBufferFormatPropertiesANDROID + { + operator VkAndroidHardwareBufferFormatPropertiesANDROID const&() const + { + return *reinterpret_cast(this); + } + + operator VkAndroidHardwareBufferFormatPropertiesANDROID &() + { + return *reinterpret_cast(this); + } + + bool operator==( AndroidHardwareBufferFormatPropertiesANDROID const& rhs ) const + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( format == rhs.format ) + && ( externalFormat == rhs.externalFormat ) + && ( formatFeatures == rhs.formatFeatures ) + && ( samplerYcbcrConversionComponents == rhs.samplerYcbcrConversionComponents ) + && ( suggestedYcbcrModel == rhs.suggestedYcbcrModel ) + && ( suggestedYcbcrRange == rhs.suggestedYcbcrRange ) + && ( suggestedXChromaOffset == rhs.suggestedXChromaOffset ) + && ( suggestedYChromaOffset == rhs.suggestedYChromaOffset ); + } + + bool operator!=( AndroidHardwareBufferFormatPropertiesANDROID const& rhs ) const + { + return !operator==( rhs ); + } + + private: + StructureType sType = StructureType::eAndroidHardwareBufferFormatPropertiesANDROID; + + public: + void* pNext = nullptr; + Format format; + uint64_t externalFormat; + FormatFeatureFlags formatFeatures; + ComponentMapping samplerYcbcrConversionComponents; + SamplerYcbcrModelConversion suggestedYcbcrModel; + SamplerYcbcrRange suggestedYcbcrRange; + ChromaLocation suggestedXChromaOffset; + ChromaLocation suggestedYChromaOffset; + }; + static_assert( sizeof( AndroidHardwareBufferFormatPropertiesANDROID ) == sizeof( VkAndroidHardwareBufferFormatPropertiesANDROID ), "struct and wrapper have different size!" ); +#endif /*VK_USE_PLATFORM_ANDROID_ANDROID*/ + + enum class BlendOverlapEXT + { + eUncorrelated = VK_BLEND_OVERLAP_UNCORRELATED_EXT, + eDisjoint = VK_BLEND_OVERLAP_DISJOINT_EXT, + eConjoint = VK_BLEND_OVERLAP_CONJOINT_EXT + }; + + struct PipelineColorBlendAdvancedStateCreateInfoEXT + { + PipelineColorBlendAdvancedStateCreateInfoEXT( Bool32 srcPremultiplied_ = 0, + Bool32 dstPremultiplied_ = 0, + BlendOverlapEXT blendOverlap_ = BlendOverlapEXT::eUncorrelated ) + : srcPremultiplied( srcPremultiplied_ ) + , dstPremultiplied( dstPremultiplied_ ) + , blendOverlap( blendOverlap_ ) + { + } + + PipelineColorBlendAdvancedStateCreateInfoEXT( VkPipelineColorBlendAdvancedStateCreateInfoEXT const & rhs ) + { + memcpy( this, &rhs, sizeof( PipelineColorBlendAdvancedStateCreateInfoEXT ) ); + } + + PipelineColorBlendAdvancedStateCreateInfoEXT& operator=( VkPipelineColorBlendAdvancedStateCreateInfoEXT const & rhs ) + { + memcpy( this, &rhs, sizeof( PipelineColorBlendAdvancedStateCreateInfoEXT ) ); + return *this; + } + PipelineColorBlendAdvancedStateCreateInfoEXT& setPNext( const void* pNext_ ) + { + pNext = pNext_; + return *this; + } + + PipelineColorBlendAdvancedStateCreateInfoEXT& setSrcPremultiplied( Bool32 srcPremultiplied_ ) + { + srcPremultiplied = srcPremultiplied_; + return *this; + } + + PipelineColorBlendAdvancedStateCreateInfoEXT& setDstPremultiplied( Bool32 dstPremultiplied_ ) + { + dstPremultiplied = dstPremultiplied_; + return *this; + } + + PipelineColorBlendAdvancedStateCreateInfoEXT& setBlendOverlap( BlendOverlapEXT blendOverlap_ ) + { + blendOverlap = blendOverlap_; + return *this; + } + + operator VkPipelineColorBlendAdvancedStateCreateInfoEXT const&() const + { + return *reinterpret_cast(this); + } + + operator VkPipelineColorBlendAdvancedStateCreateInfoEXT &() + { + return *reinterpret_cast(this); + } + + bool operator==( PipelineColorBlendAdvancedStateCreateInfoEXT const& rhs ) const + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( srcPremultiplied == rhs.srcPremultiplied ) + && ( dstPremultiplied == rhs.dstPremultiplied ) + && ( blendOverlap == rhs.blendOverlap ); + } + + bool operator!=( PipelineColorBlendAdvancedStateCreateInfoEXT const& rhs ) const + { + return !operator==( rhs ); + } + + private: + StructureType sType = StructureType::ePipelineColorBlendAdvancedStateCreateInfoEXT; + + public: + const void* pNext = nullptr; + Bool32 srcPremultiplied; + Bool32 dstPremultiplied; + BlendOverlapEXT blendOverlap; + }; + static_assert( sizeof( PipelineColorBlendAdvancedStateCreateInfoEXT ) == sizeof( VkPipelineColorBlendAdvancedStateCreateInfoEXT ), "struct and wrapper have different size!" ); + + enum class CoverageModulationModeNV + { + eNone = VK_COVERAGE_MODULATION_MODE_NONE_NV, + eRgb = VK_COVERAGE_MODULATION_MODE_RGB_NV, + eAlpha = VK_COVERAGE_MODULATION_MODE_ALPHA_NV, + eRgba = VK_COVERAGE_MODULATION_MODE_RGBA_NV + }; + + struct PipelineCoverageModulationStateCreateInfoNV + { + PipelineCoverageModulationStateCreateInfoNV( PipelineCoverageModulationStateCreateFlagsNV flags_ = PipelineCoverageModulationStateCreateFlagsNV(), + CoverageModulationModeNV coverageModulationMode_ = CoverageModulationModeNV::eNone, + Bool32 coverageModulationTableEnable_ = 0, + uint32_t coverageModulationTableCount_ = 0, + const float* pCoverageModulationTable_ = nullptr ) + : flags( flags_ ) + , coverageModulationMode( coverageModulationMode_ ) + , coverageModulationTableEnable( coverageModulationTableEnable_ ) + , coverageModulationTableCount( coverageModulationTableCount_ ) + , pCoverageModulationTable( pCoverageModulationTable_ ) + { + } + + PipelineCoverageModulationStateCreateInfoNV( VkPipelineCoverageModulationStateCreateInfoNV const & rhs ) + { + memcpy( this, &rhs, sizeof( PipelineCoverageModulationStateCreateInfoNV ) ); + } + + PipelineCoverageModulationStateCreateInfoNV& operator=( VkPipelineCoverageModulationStateCreateInfoNV const & rhs ) + { + memcpy( this, &rhs, sizeof( PipelineCoverageModulationStateCreateInfoNV ) ); + return *this; + } + PipelineCoverageModulationStateCreateInfoNV& setPNext( const void* pNext_ ) + { + pNext = pNext_; + return *this; + } + + PipelineCoverageModulationStateCreateInfoNV& setFlags( PipelineCoverageModulationStateCreateFlagsNV flags_ ) + { + flags = flags_; + return *this; + } + + PipelineCoverageModulationStateCreateInfoNV& setCoverageModulationMode( CoverageModulationModeNV coverageModulationMode_ ) + { + coverageModulationMode = coverageModulationMode_; + return *this; + } + + PipelineCoverageModulationStateCreateInfoNV& setCoverageModulationTableEnable( Bool32 coverageModulationTableEnable_ ) + { + coverageModulationTableEnable = coverageModulationTableEnable_; + return *this; + } + + PipelineCoverageModulationStateCreateInfoNV& setCoverageModulationTableCount( uint32_t coverageModulationTableCount_ ) + { + coverageModulationTableCount = coverageModulationTableCount_; + return *this; + } + + PipelineCoverageModulationStateCreateInfoNV& setPCoverageModulationTable( const float* pCoverageModulationTable_ ) + { + pCoverageModulationTable = pCoverageModulationTable_; + return *this; + } + + operator VkPipelineCoverageModulationStateCreateInfoNV const&() const + { + return *reinterpret_cast(this); + } + + operator VkPipelineCoverageModulationStateCreateInfoNV &() + { + return *reinterpret_cast(this); + } + + bool operator==( PipelineCoverageModulationStateCreateInfoNV const& rhs ) const + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( flags == rhs.flags ) + && ( coverageModulationMode == rhs.coverageModulationMode ) + && ( coverageModulationTableEnable == rhs.coverageModulationTableEnable ) + && ( coverageModulationTableCount == rhs.coverageModulationTableCount ) + && ( pCoverageModulationTable == rhs.pCoverageModulationTable ); + } + + bool operator!=( PipelineCoverageModulationStateCreateInfoNV const& rhs ) const + { + return !operator==( rhs ); + } + + private: + StructureType sType = StructureType::ePipelineCoverageModulationStateCreateInfoNV; + + public: + const void* pNext = nullptr; + PipelineCoverageModulationStateCreateFlagsNV flags; + CoverageModulationModeNV coverageModulationMode; + Bool32 coverageModulationTableEnable; + uint32_t coverageModulationTableCount; + const float* pCoverageModulationTable; + }; + static_assert( sizeof( PipelineCoverageModulationStateCreateInfoNV ) == sizeof( VkPipelineCoverageModulationStateCreateInfoNV ), "struct and wrapper have different size!" ); + + enum class ValidationCacheHeaderVersionEXT + { + eOne = VK_VALIDATION_CACHE_HEADER_VERSION_ONE_EXT + }; + + enum class ShaderInfoTypeAMD + { + eStatistics = VK_SHADER_INFO_TYPE_STATISTICS_AMD, + eBinary = VK_SHADER_INFO_TYPE_BINARY_AMD, + eDisassembly = VK_SHADER_INFO_TYPE_DISASSEMBLY_AMD + }; + + enum class QueueGlobalPriorityEXT + { + eLow = VK_QUEUE_GLOBAL_PRIORITY_LOW_EXT, + eMedium = VK_QUEUE_GLOBAL_PRIORITY_MEDIUM_EXT, + eHigh = VK_QUEUE_GLOBAL_PRIORITY_HIGH_EXT, + eRealtime = VK_QUEUE_GLOBAL_PRIORITY_REALTIME_EXT + }; + + struct DeviceQueueGlobalPriorityCreateInfoEXT + { + DeviceQueueGlobalPriorityCreateInfoEXT( QueueGlobalPriorityEXT globalPriority_ = QueueGlobalPriorityEXT::eLow ) + : globalPriority( globalPriority_ ) + { + } + + DeviceQueueGlobalPriorityCreateInfoEXT( VkDeviceQueueGlobalPriorityCreateInfoEXT const & rhs ) + { + memcpy( this, &rhs, sizeof( DeviceQueueGlobalPriorityCreateInfoEXT ) ); + } + + DeviceQueueGlobalPriorityCreateInfoEXT& operator=( VkDeviceQueueGlobalPriorityCreateInfoEXT const & rhs ) + { + memcpy( this, &rhs, sizeof( DeviceQueueGlobalPriorityCreateInfoEXT ) ); + return *this; + } + DeviceQueueGlobalPriorityCreateInfoEXT& setPNext( const void* pNext_ ) + { + pNext = pNext_; + return *this; + } + + DeviceQueueGlobalPriorityCreateInfoEXT& setGlobalPriority( QueueGlobalPriorityEXT globalPriority_ ) + { + globalPriority = globalPriority_; + return *this; + } + + operator VkDeviceQueueGlobalPriorityCreateInfoEXT const&() const + { + return *reinterpret_cast(this); + } + + operator VkDeviceQueueGlobalPriorityCreateInfoEXT &() + { + return *reinterpret_cast(this); + } + + bool operator==( DeviceQueueGlobalPriorityCreateInfoEXT const& rhs ) const + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( globalPriority == rhs.globalPriority ); + } + + bool operator!=( DeviceQueueGlobalPriorityCreateInfoEXT const& rhs ) const + { + return !operator==( rhs ); + } + + private: + StructureType sType = StructureType::eDeviceQueueGlobalPriorityCreateInfoEXT; + + public: + const void* pNext = nullptr; + QueueGlobalPriorityEXT globalPriority; + }; + static_assert( sizeof( DeviceQueueGlobalPriorityCreateInfoEXT ) == sizeof( VkDeviceQueueGlobalPriorityCreateInfoEXT ), "struct and wrapper have different size!" ); + + enum class DebugUtilsMessageSeverityFlagBitsEXT + { + eVerbose = VK_DEBUG_UTILS_MESSAGE_SEVERITY_VERBOSE_BIT_EXT, + eInfo = VK_DEBUG_UTILS_MESSAGE_SEVERITY_INFO_BIT_EXT, + eWarning = VK_DEBUG_UTILS_MESSAGE_SEVERITY_WARNING_BIT_EXT, + eError = VK_DEBUG_UTILS_MESSAGE_SEVERITY_ERROR_BIT_EXT + }; + + using DebugUtilsMessageSeverityFlagsEXT = Flags; + + VULKAN_HPP_INLINE DebugUtilsMessageSeverityFlagsEXT operator|( DebugUtilsMessageSeverityFlagBitsEXT bit0, DebugUtilsMessageSeverityFlagBitsEXT bit1 ) + { + return DebugUtilsMessageSeverityFlagsEXT( bit0 ) | bit1; + } + + VULKAN_HPP_INLINE DebugUtilsMessageSeverityFlagsEXT operator~( DebugUtilsMessageSeverityFlagBitsEXT bits ) + { + return ~( DebugUtilsMessageSeverityFlagsEXT( bits ) ); + } + + template <> struct FlagTraits + { + enum + { + allFlags = VkFlags(DebugUtilsMessageSeverityFlagBitsEXT::eVerbose) | VkFlags(DebugUtilsMessageSeverityFlagBitsEXT::eInfo) | VkFlags(DebugUtilsMessageSeverityFlagBitsEXT::eWarning) | VkFlags(DebugUtilsMessageSeverityFlagBitsEXT::eError) + }; + }; + + enum class DebugUtilsMessageTypeFlagBitsEXT + { + eGeneral = VK_DEBUG_UTILS_MESSAGE_TYPE_GENERAL_BIT_EXT, + eValidation = VK_DEBUG_UTILS_MESSAGE_TYPE_VALIDATION_BIT_EXT, + ePerformance = VK_DEBUG_UTILS_MESSAGE_TYPE_PERFORMANCE_BIT_EXT + }; + + using DebugUtilsMessageTypeFlagsEXT = Flags; + + VULKAN_HPP_INLINE DebugUtilsMessageTypeFlagsEXT operator|( DebugUtilsMessageTypeFlagBitsEXT bit0, DebugUtilsMessageTypeFlagBitsEXT bit1 ) + { + return DebugUtilsMessageTypeFlagsEXT( bit0 ) | bit1; + } + + VULKAN_HPP_INLINE DebugUtilsMessageTypeFlagsEXT operator~( DebugUtilsMessageTypeFlagBitsEXT bits ) + { + return ~( DebugUtilsMessageTypeFlagsEXT( bits ) ); + } + + template <> struct FlagTraits + { + enum + { + allFlags = VkFlags(DebugUtilsMessageTypeFlagBitsEXT::eGeneral) | VkFlags(DebugUtilsMessageTypeFlagBitsEXT::eValidation) | VkFlags(DebugUtilsMessageTypeFlagBitsEXT::ePerformance) + }; + }; + + struct DebugUtilsMessengerCreateInfoEXT + { + DebugUtilsMessengerCreateInfoEXT( DebugUtilsMessengerCreateFlagsEXT flags_ = DebugUtilsMessengerCreateFlagsEXT(), + DebugUtilsMessageSeverityFlagsEXT messageSeverity_ = DebugUtilsMessageSeverityFlagsEXT(), + DebugUtilsMessageTypeFlagsEXT messageType_ = DebugUtilsMessageTypeFlagsEXT(), + PFN_vkDebugUtilsMessengerCallbackEXT pfnUserCallback_ = nullptr, + void* pUserData_ = nullptr ) + : flags( flags_ ) + , messageSeverity( messageSeverity_ ) + , messageType( messageType_ ) + , pfnUserCallback( pfnUserCallback_ ) + , pUserData( pUserData_ ) + { + } + + DebugUtilsMessengerCreateInfoEXT( VkDebugUtilsMessengerCreateInfoEXT const & rhs ) + { + memcpy( this, &rhs, sizeof( DebugUtilsMessengerCreateInfoEXT ) ); + } + + DebugUtilsMessengerCreateInfoEXT& operator=( VkDebugUtilsMessengerCreateInfoEXT const & rhs ) + { + memcpy( this, &rhs, sizeof( DebugUtilsMessengerCreateInfoEXT ) ); + return *this; + } + DebugUtilsMessengerCreateInfoEXT& setPNext( const void* pNext_ ) + { + pNext = pNext_; + return *this; + } + + DebugUtilsMessengerCreateInfoEXT& setFlags( DebugUtilsMessengerCreateFlagsEXT flags_ ) + { + flags = flags_; + return *this; + } + + DebugUtilsMessengerCreateInfoEXT& setMessageSeverity( DebugUtilsMessageSeverityFlagsEXT messageSeverity_ ) + { + messageSeverity = messageSeverity_; + return *this; + } + + DebugUtilsMessengerCreateInfoEXT& setMessageType( DebugUtilsMessageTypeFlagsEXT messageType_ ) + { + messageType = messageType_; + return *this; + } + + DebugUtilsMessengerCreateInfoEXT& setPfnUserCallback( PFN_vkDebugUtilsMessengerCallbackEXT pfnUserCallback_ ) + { + pfnUserCallback = pfnUserCallback_; + return *this; + } + + DebugUtilsMessengerCreateInfoEXT& setPUserData( void* pUserData_ ) + { + pUserData = pUserData_; + return *this; + } + + operator VkDebugUtilsMessengerCreateInfoEXT const&() const + { + return *reinterpret_cast(this); + } + + operator VkDebugUtilsMessengerCreateInfoEXT &() + { + return *reinterpret_cast(this); + } + + bool operator==( DebugUtilsMessengerCreateInfoEXT const& rhs ) const + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( flags == rhs.flags ) + && ( messageSeverity == rhs.messageSeverity ) + && ( messageType == rhs.messageType ) + && ( pfnUserCallback == rhs.pfnUserCallback ) + && ( pUserData == rhs.pUserData ); + } + + bool operator!=( DebugUtilsMessengerCreateInfoEXT const& rhs ) const + { + return !operator==( rhs ); + } + + private: + StructureType sType = StructureType::eDebugUtilsMessengerCreateInfoEXT; + + public: + const void* pNext = nullptr; + DebugUtilsMessengerCreateFlagsEXT flags; + DebugUtilsMessageSeverityFlagsEXT messageSeverity; + DebugUtilsMessageTypeFlagsEXT messageType; + PFN_vkDebugUtilsMessengerCallbackEXT pfnUserCallback; + void* pUserData; + }; + static_assert( sizeof( DebugUtilsMessengerCreateInfoEXT ) == sizeof( VkDebugUtilsMessengerCreateInfoEXT ), "struct and wrapper have different size!" ); + + enum class ConservativeRasterizationModeEXT + { + eDisabled = VK_CONSERVATIVE_RASTERIZATION_MODE_DISABLED_EXT, + eOverestimate = VK_CONSERVATIVE_RASTERIZATION_MODE_OVERESTIMATE_EXT, + eUnderestimate = VK_CONSERVATIVE_RASTERIZATION_MODE_UNDERESTIMATE_EXT + }; + + struct PipelineRasterizationConservativeStateCreateInfoEXT + { + PipelineRasterizationConservativeStateCreateInfoEXT( PipelineRasterizationConservativeStateCreateFlagsEXT flags_ = PipelineRasterizationConservativeStateCreateFlagsEXT(), + ConservativeRasterizationModeEXT conservativeRasterizationMode_ = ConservativeRasterizationModeEXT::eDisabled, + float extraPrimitiveOverestimationSize_ = 0 ) + : flags( flags_ ) + , conservativeRasterizationMode( conservativeRasterizationMode_ ) + , extraPrimitiveOverestimationSize( extraPrimitiveOverestimationSize_ ) + { + } + + PipelineRasterizationConservativeStateCreateInfoEXT( VkPipelineRasterizationConservativeStateCreateInfoEXT const & rhs ) + { + memcpy( this, &rhs, sizeof( PipelineRasterizationConservativeStateCreateInfoEXT ) ); + } + + PipelineRasterizationConservativeStateCreateInfoEXT& operator=( VkPipelineRasterizationConservativeStateCreateInfoEXT const & rhs ) + { + memcpy( this, &rhs, sizeof( PipelineRasterizationConservativeStateCreateInfoEXT ) ); + return *this; + } + PipelineRasterizationConservativeStateCreateInfoEXT& setPNext( const void* pNext_ ) + { + pNext = pNext_; + return *this; + } + + PipelineRasterizationConservativeStateCreateInfoEXT& setFlags( PipelineRasterizationConservativeStateCreateFlagsEXT flags_ ) + { + flags = flags_; + return *this; + } + + PipelineRasterizationConservativeStateCreateInfoEXT& setConservativeRasterizationMode( ConservativeRasterizationModeEXT conservativeRasterizationMode_ ) + { + conservativeRasterizationMode = conservativeRasterizationMode_; + return *this; + } + + PipelineRasterizationConservativeStateCreateInfoEXT& setExtraPrimitiveOverestimationSize( float extraPrimitiveOverestimationSize_ ) + { + extraPrimitiveOverestimationSize = extraPrimitiveOverestimationSize_; + return *this; + } + + operator VkPipelineRasterizationConservativeStateCreateInfoEXT const&() const + { + return *reinterpret_cast(this); + } + + operator VkPipelineRasterizationConservativeStateCreateInfoEXT &() + { + return *reinterpret_cast(this); + } + + bool operator==( PipelineRasterizationConservativeStateCreateInfoEXT const& rhs ) const + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( flags == rhs.flags ) + && ( conservativeRasterizationMode == rhs.conservativeRasterizationMode ) + && ( extraPrimitiveOverestimationSize == rhs.extraPrimitiveOverestimationSize ); + } + + bool operator!=( PipelineRasterizationConservativeStateCreateInfoEXT const& rhs ) const + { + return !operator==( rhs ); + } + + private: + StructureType sType = StructureType::ePipelineRasterizationConservativeStateCreateInfoEXT; + + public: + const void* pNext = nullptr; + PipelineRasterizationConservativeStateCreateFlagsEXT flags; + ConservativeRasterizationModeEXT conservativeRasterizationMode; + float extraPrimitiveOverestimationSize; + }; + static_assert( sizeof( PipelineRasterizationConservativeStateCreateInfoEXT ) == sizeof( VkPipelineRasterizationConservativeStateCreateInfoEXT ), "struct and wrapper have different size!" ); + + enum class DescriptorBindingFlagBitsEXT + { + eUpdateAfterBind = VK_DESCRIPTOR_BINDING_UPDATE_AFTER_BIND_BIT_EXT, + eUpdateUnusedWhilePending = VK_DESCRIPTOR_BINDING_UPDATE_UNUSED_WHILE_PENDING_BIT_EXT, + ePartiallyBound = VK_DESCRIPTOR_BINDING_PARTIALLY_BOUND_BIT_EXT, + eVariableDescriptorCount = VK_DESCRIPTOR_BINDING_VARIABLE_DESCRIPTOR_COUNT_BIT_EXT + }; + + using DescriptorBindingFlagsEXT = Flags; + + VULKAN_HPP_INLINE DescriptorBindingFlagsEXT operator|( DescriptorBindingFlagBitsEXT bit0, DescriptorBindingFlagBitsEXT bit1 ) + { + return DescriptorBindingFlagsEXT( bit0 ) | bit1; + } + + VULKAN_HPP_INLINE DescriptorBindingFlagsEXT operator~( DescriptorBindingFlagBitsEXT bits ) + { + return ~( DescriptorBindingFlagsEXT( bits ) ); + } + + template <> struct FlagTraits + { + enum + { + allFlags = VkFlags(DescriptorBindingFlagBitsEXT::eUpdateAfterBind) | VkFlags(DescriptorBindingFlagBitsEXT::eUpdateUnusedWhilePending) | VkFlags(DescriptorBindingFlagBitsEXT::ePartiallyBound) | VkFlags(DescriptorBindingFlagBitsEXT::eVariableDescriptorCount) + }; + }; + + struct DescriptorSetLayoutBindingFlagsCreateInfoEXT + { + DescriptorSetLayoutBindingFlagsCreateInfoEXT( uint32_t bindingCount_ = 0, + const DescriptorBindingFlagsEXT* pBindingFlags_ = nullptr ) + : bindingCount( bindingCount_ ) + , pBindingFlags( pBindingFlags_ ) + { + } + + DescriptorSetLayoutBindingFlagsCreateInfoEXT( VkDescriptorSetLayoutBindingFlagsCreateInfoEXT const & rhs ) + { + memcpy( this, &rhs, sizeof( DescriptorSetLayoutBindingFlagsCreateInfoEXT ) ); + } + + DescriptorSetLayoutBindingFlagsCreateInfoEXT& operator=( VkDescriptorSetLayoutBindingFlagsCreateInfoEXT const & rhs ) + { + memcpy( this, &rhs, sizeof( DescriptorSetLayoutBindingFlagsCreateInfoEXT ) ); + return *this; + } + DescriptorSetLayoutBindingFlagsCreateInfoEXT& setPNext( const void* pNext_ ) + { + pNext = pNext_; + return *this; + } + + DescriptorSetLayoutBindingFlagsCreateInfoEXT& setBindingCount( uint32_t bindingCount_ ) + { + bindingCount = bindingCount_; + return *this; + } + + DescriptorSetLayoutBindingFlagsCreateInfoEXT& setPBindingFlags( const DescriptorBindingFlagsEXT* pBindingFlags_ ) + { + pBindingFlags = pBindingFlags_; + return *this; + } + + operator VkDescriptorSetLayoutBindingFlagsCreateInfoEXT const&() const + { + return *reinterpret_cast(this); + } + + operator VkDescriptorSetLayoutBindingFlagsCreateInfoEXT &() + { + return *reinterpret_cast(this); + } + + bool operator==( DescriptorSetLayoutBindingFlagsCreateInfoEXT const& rhs ) const + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( bindingCount == rhs.bindingCount ) + && ( pBindingFlags == rhs.pBindingFlags ); + } + + bool operator!=( DescriptorSetLayoutBindingFlagsCreateInfoEXT const& rhs ) const + { + return !operator==( rhs ); + } + + private: + StructureType sType = StructureType::eDescriptorSetLayoutBindingFlagsCreateInfoEXT; + + public: + const void* pNext = nullptr; + uint32_t bindingCount; + const DescriptorBindingFlagsEXT* pBindingFlags; + }; + static_assert( sizeof( DescriptorSetLayoutBindingFlagsCreateInfoEXT ) == sizeof( VkDescriptorSetLayoutBindingFlagsCreateInfoEXT ), "struct and wrapper have different size!" ); + + enum class VendorId + { + eViv = VK_VENDOR_ID_VIV, + eVsi = VK_VENDOR_ID_VSI, + eKazan = VK_VENDOR_ID_KAZAN + }; + + enum class DriverIdKHR + { + eAmdProprietary = VK_DRIVER_ID_AMD_PROPRIETARY_KHR, + eAmdOpenSource = VK_DRIVER_ID_AMD_OPEN_SOURCE_KHR, + eMesaRadv = VK_DRIVER_ID_MESA_RADV_KHR, + eNvidiaProprietary = VK_DRIVER_ID_NVIDIA_PROPRIETARY_KHR, + eIntelProprietaryWindows = VK_DRIVER_ID_INTEL_PROPRIETARY_WINDOWS_KHR, + eIntelOpenSourceMesa = VK_DRIVER_ID_INTEL_OPEN_SOURCE_MESA_KHR, + eImaginationProprietary = VK_DRIVER_ID_IMAGINATION_PROPRIETARY_KHR, + eQualcommProprietary = VK_DRIVER_ID_QUALCOMM_PROPRIETARY_KHR, + eArmProprietary = VK_DRIVER_ID_ARM_PROPRIETARY_KHR + }; + + struct PhysicalDeviceDriverPropertiesKHR + { + operator VkPhysicalDeviceDriverPropertiesKHR const&() const + { + return *reinterpret_cast(this); + } + + operator VkPhysicalDeviceDriverPropertiesKHR &() + { + return *reinterpret_cast(this); + } + + bool operator==( PhysicalDeviceDriverPropertiesKHR const& rhs ) const + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( driverID == rhs.driverID ) + && ( memcmp( driverName, rhs.driverName, VK_MAX_DRIVER_NAME_SIZE_KHR * sizeof( char ) ) == 0 ) + && ( memcmp( driverInfo, rhs.driverInfo, VK_MAX_DRIVER_INFO_SIZE_KHR * sizeof( char ) ) == 0 ) + && ( conformanceVersion == rhs.conformanceVersion ); + } + + bool operator!=( PhysicalDeviceDriverPropertiesKHR const& rhs ) const + { + return !operator==( rhs ); + } + + private: + StructureType sType = StructureType::ePhysicalDeviceDriverPropertiesKHR; + + public: + void* pNext = nullptr; + DriverIdKHR driverID; + char driverName[VK_MAX_DRIVER_NAME_SIZE_KHR]; + char driverInfo[VK_MAX_DRIVER_INFO_SIZE_KHR]; + ConformanceVersionKHR conformanceVersion; + }; + static_assert( sizeof( PhysicalDeviceDriverPropertiesKHR ) == sizeof( VkPhysicalDeviceDriverPropertiesKHR ), "struct and wrapper have different size!" ); + + enum class ConditionalRenderingFlagBitsEXT + { + eInverted = VK_CONDITIONAL_RENDERING_INVERTED_BIT_EXT + }; + + using ConditionalRenderingFlagsEXT = Flags; + + VULKAN_HPP_INLINE ConditionalRenderingFlagsEXT operator|( ConditionalRenderingFlagBitsEXT bit0, ConditionalRenderingFlagBitsEXT bit1 ) + { + return ConditionalRenderingFlagsEXT( bit0 ) | bit1; + } + + VULKAN_HPP_INLINE ConditionalRenderingFlagsEXT operator~( ConditionalRenderingFlagBitsEXT bits ) + { + return ~( ConditionalRenderingFlagsEXT( bits ) ); + } + + template <> struct FlagTraits + { + enum + { + allFlags = VkFlags(ConditionalRenderingFlagBitsEXT::eInverted) + }; + }; + + struct ConditionalRenderingBeginInfoEXT + { + ConditionalRenderingBeginInfoEXT( Buffer buffer_ = Buffer(), + DeviceSize offset_ = 0, + ConditionalRenderingFlagsEXT flags_ = ConditionalRenderingFlagsEXT() ) + : buffer( buffer_ ) + , offset( offset_ ) + , flags( flags_ ) + { + } + + ConditionalRenderingBeginInfoEXT( VkConditionalRenderingBeginInfoEXT const & rhs ) + { + memcpy( this, &rhs, sizeof( ConditionalRenderingBeginInfoEXT ) ); + } + + ConditionalRenderingBeginInfoEXT& operator=( VkConditionalRenderingBeginInfoEXT const & rhs ) + { + memcpy( this, &rhs, sizeof( ConditionalRenderingBeginInfoEXT ) ); + return *this; + } + ConditionalRenderingBeginInfoEXT& setPNext( const void* pNext_ ) + { + pNext = pNext_; + return *this; + } + + ConditionalRenderingBeginInfoEXT& setBuffer( Buffer buffer_ ) + { + buffer = buffer_; + return *this; + } + + ConditionalRenderingBeginInfoEXT& setOffset( DeviceSize offset_ ) + { + offset = offset_; + return *this; + } + + ConditionalRenderingBeginInfoEXT& setFlags( ConditionalRenderingFlagsEXT flags_ ) + { + flags = flags_; + return *this; + } + + operator VkConditionalRenderingBeginInfoEXT const&() const + { + return *reinterpret_cast(this); + } + + operator VkConditionalRenderingBeginInfoEXT &() + { + return *reinterpret_cast(this); + } + + bool operator==( ConditionalRenderingBeginInfoEXT const& rhs ) const + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( buffer == rhs.buffer ) + && ( offset == rhs.offset ) + && ( flags == rhs.flags ); + } + + bool operator!=( ConditionalRenderingBeginInfoEXT const& rhs ) const + { + return !operator==( rhs ); + } + + private: + StructureType sType = StructureType::eConditionalRenderingBeginInfoEXT; + + public: + const void* pNext = nullptr; + Buffer buffer; + DeviceSize offset; + ConditionalRenderingFlagsEXT flags; + }; + static_assert( sizeof( ConditionalRenderingBeginInfoEXT ) == sizeof( VkConditionalRenderingBeginInfoEXT ), "struct and wrapper have different size!" ); + + enum class ShadingRatePaletteEntryNV + { + eNoInvocations = VK_SHADING_RATE_PALETTE_ENTRY_NO_INVOCATIONS_NV, + e16InvocationsPerPixel = VK_SHADING_RATE_PALETTE_ENTRY_16_INVOCATIONS_PER_PIXEL_NV, + e8InvocationsPerPixel = VK_SHADING_RATE_PALETTE_ENTRY_8_INVOCATIONS_PER_PIXEL_NV, + e4InvocationsPerPixel = VK_SHADING_RATE_PALETTE_ENTRY_4_INVOCATIONS_PER_PIXEL_NV, + e2InvocationsPerPixel = VK_SHADING_RATE_PALETTE_ENTRY_2_INVOCATIONS_PER_PIXEL_NV, + e1InvocationPerPixel = VK_SHADING_RATE_PALETTE_ENTRY_1_INVOCATION_PER_PIXEL_NV, + e1InvocationPer2X1Pixels = VK_SHADING_RATE_PALETTE_ENTRY_1_INVOCATION_PER_2X1_PIXELS_NV, + e1InvocationPer1X2Pixels = VK_SHADING_RATE_PALETTE_ENTRY_1_INVOCATION_PER_1X2_PIXELS_NV, + e1InvocationPer2X2Pixels = VK_SHADING_RATE_PALETTE_ENTRY_1_INVOCATION_PER_2X2_PIXELS_NV, + e1InvocationPer4X2Pixels = VK_SHADING_RATE_PALETTE_ENTRY_1_INVOCATION_PER_4X2_PIXELS_NV, + e1InvocationPer2X4Pixels = VK_SHADING_RATE_PALETTE_ENTRY_1_INVOCATION_PER_2X4_PIXELS_NV, + e1InvocationPer4X4Pixels = VK_SHADING_RATE_PALETTE_ENTRY_1_INVOCATION_PER_4X4_PIXELS_NV + }; + + struct ShadingRatePaletteNV + { + ShadingRatePaletteNV( uint32_t shadingRatePaletteEntryCount_ = 0, + const ShadingRatePaletteEntryNV* pShadingRatePaletteEntries_ = nullptr ) + : shadingRatePaletteEntryCount( shadingRatePaletteEntryCount_ ) + , pShadingRatePaletteEntries( pShadingRatePaletteEntries_ ) + { + } + + ShadingRatePaletteNV( VkShadingRatePaletteNV const & rhs ) + { + memcpy( this, &rhs, sizeof( ShadingRatePaletteNV ) ); + } + + ShadingRatePaletteNV& operator=( VkShadingRatePaletteNV const & rhs ) + { + memcpy( this, &rhs, sizeof( ShadingRatePaletteNV ) ); + return *this; + } + ShadingRatePaletteNV& setShadingRatePaletteEntryCount( uint32_t shadingRatePaletteEntryCount_ ) + { + shadingRatePaletteEntryCount = shadingRatePaletteEntryCount_; + return *this; + } + + ShadingRatePaletteNV& setPShadingRatePaletteEntries( const ShadingRatePaletteEntryNV* pShadingRatePaletteEntries_ ) + { + pShadingRatePaletteEntries = pShadingRatePaletteEntries_; + return *this; + } + + operator VkShadingRatePaletteNV const&() const + { + return *reinterpret_cast(this); + } + + operator VkShadingRatePaletteNV &() + { + return *reinterpret_cast(this); + } + + bool operator==( ShadingRatePaletteNV const& rhs ) const + { + return ( shadingRatePaletteEntryCount == rhs.shadingRatePaletteEntryCount ) + && ( pShadingRatePaletteEntries == rhs.pShadingRatePaletteEntries ); + } + + bool operator!=( ShadingRatePaletteNV const& rhs ) const + { + return !operator==( rhs ); + } + + uint32_t shadingRatePaletteEntryCount; + const ShadingRatePaletteEntryNV* pShadingRatePaletteEntries; + }; + static_assert( sizeof( ShadingRatePaletteNV ) == sizeof( VkShadingRatePaletteNV ), "struct and wrapper have different size!" ); + + struct PipelineViewportShadingRateImageStateCreateInfoNV + { + PipelineViewportShadingRateImageStateCreateInfoNV( Bool32 shadingRateImageEnable_ = 0, + uint32_t viewportCount_ = 0, + const ShadingRatePaletteNV* pShadingRatePalettes_ = nullptr ) + : shadingRateImageEnable( shadingRateImageEnable_ ) + , viewportCount( viewportCount_ ) + , pShadingRatePalettes( pShadingRatePalettes_ ) + { + } + + PipelineViewportShadingRateImageStateCreateInfoNV( VkPipelineViewportShadingRateImageStateCreateInfoNV const & rhs ) + { + memcpy( this, &rhs, sizeof( PipelineViewportShadingRateImageStateCreateInfoNV ) ); + } + + PipelineViewportShadingRateImageStateCreateInfoNV& operator=( VkPipelineViewportShadingRateImageStateCreateInfoNV const & rhs ) + { + memcpy( this, &rhs, sizeof( PipelineViewportShadingRateImageStateCreateInfoNV ) ); + return *this; + } + PipelineViewportShadingRateImageStateCreateInfoNV& setPNext( const void* pNext_ ) + { + pNext = pNext_; + return *this; + } + + PipelineViewportShadingRateImageStateCreateInfoNV& setShadingRateImageEnable( Bool32 shadingRateImageEnable_ ) + { + shadingRateImageEnable = shadingRateImageEnable_; + return *this; + } + + PipelineViewportShadingRateImageStateCreateInfoNV& setViewportCount( uint32_t viewportCount_ ) + { + viewportCount = viewportCount_; + return *this; + } + + PipelineViewportShadingRateImageStateCreateInfoNV& setPShadingRatePalettes( const ShadingRatePaletteNV* pShadingRatePalettes_ ) + { + pShadingRatePalettes = pShadingRatePalettes_; + return *this; + } + + operator VkPipelineViewportShadingRateImageStateCreateInfoNV const&() const + { + return *reinterpret_cast(this); + } + + operator VkPipelineViewportShadingRateImageStateCreateInfoNV &() + { + return *reinterpret_cast(this); + } + + bool operator==( PipelineViewportShadingRateImageStateCreateInfoNV const& rhs ) const + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( shadingRateImageEnable == rhs.shadingRateImageEnable ) + && ( viewportCount == rhs.viewportCount ) + && ( pShadingRatePalettes == rhs.pShadingRatePalettes ); + } + + bool operator!=( PipelineViewportShadingRateImageStateCreateInfoNV const& rhs ) const + { + return !operator==( rhs ); + } + + private: + StructureType sType = StructureType::ePipelineViewportShadingRateImageStateCreateInfoNV; + + public: + const void* pNext = nullptr; + Bool32 shadingRateImageEnable; + uint32_t viewportCount; + const ShadingRatePaletteNV* pShadingRatePalettes; + }; + static_assert( sizeof( PipelineViewportShadingRateImageStateCreateInfoNV ) == sizeof( VkPipelineViewportShadingRateImageStateCreateInfoNV ), "struct and wrapper have different size!" ); + + struct CoarseSampleOrderCustomNV + { + CoarseSampleOrderCustomNV( ShadingRatePaletteEntryNV shadingRate_ = ShadingRatePaletteEntryNV::eNoInvocations, + uint32_t sampleCount_ = 0, + uint32_t sampleLocationCount_ = 0, + const CoarseSampleLocationNV* pSampleLocations_ = nullptr ) + : shadingRate( shadingRate_ ) + , sampleCount( sampleCount_ ) + , sampleLocationCount( sampleLocationCount_ ) + , pSampleLocations( pSampleLocations_ ) + { + } + + CoarseSampleOrderCustomNV( VkCoarseSampleOrderCustomNV const & rhs ) + { + memcpy( this, &rhs, sizeof( CoarseSampleOrderCustomNV ) ); + } + + CoarseSampleOrderCustomNV& operator=( VkCoarseSampleOrderCustomNV const & rhs ) + { + memcpy( this, &rhs, sizeof( CoarseSampleOrderCustomNV ) ); + return *this; + } + CoarseSampleOrderCustomNV& setShadingRate( ShadingRatePaletteEntryNV shadingRate_ ) + { + shadingRate = shadingRate_; + return *this; + } + + CoarseSampleOrderCustomNV& setSampleCount( uint32_t sampleCount_ ) + { + sampleCount = sampleCount_; + return *this; + } + + CoarseSampleOrderCustomNV& setSampleLocationCount( uint32_t sampleLocationCount_ ) + { + sampleLocationCount = sampleLocationCount_; + return *this; + } + + CoarseSampleOrderCustomNV& setPSampleLocations( const CoarseSampleLocationNV* pSampleLocations_ ) + { + pSampleLocations = pSampleLocations_; + return *this; + } + + operator VkCoarseSampleOrderCustomNV const&() const + { + return *reinterpret_cast(this); + } + + operator VkCoarseSampleOrderCustomNV &() + { + return *reinterpret_cast(this); + } + + bool operator==( CoarseSampleOrderCustomNV const& rhs ) const + { + return ( shadingRate == rhs.shadingRate ) + && ( sampleCount == rhs.sampleCount ) + && ( sampleLocationCount == rhs.sampleLocationCount ) + && ( pSampleLocations == rhs.pSampleLocations ); + } + + bool operator!=( CoarseSampleOrderCustomNV const& rhs ) const + { + return !operator==( rhs ); + } + + ShadingRatePaletteEntryNV shadingRate; + uint32_t sampleCount; + uint32_t sampleLocationCount; + const CoarseSampleLocationNV* pSampleLocations; + }; + static_assert( sizeof( CoarseSampleOrderCustomNV ) == sizeof( VkCoarseSampleOrderCustomNV ), "struct and wrapper have different size!" ); + + enum class CoarseSampleOrderTypeNV + { + eDefault = VK_COARSE_SAMPLE_ORDER_TYPE_DEFAULT_NV, + eCustom = VK_COARSE_SAMPLE_ORDER_TYPE_CUSTOM_NV, + ePixelMajor = VK_COARSE_SAMPLE_ORDER_TYPE_PIXEL_MAJOR_NV, + eSampleMajor = VK_COARSE_SAMPLE_ORDER_TYPE_SAMPLE_MAJOR_NV + }; + + struct PipelineViewportCoarseSampleOrderStateCreateInfoNV + { + PipelineViewportCoarseSampleOrderStateCreateInfoNV( CoarseSampleOrderTypeNV sampleOrderType_ = CoarseSampleOrderTypeNV::eDefault, + uint32_t customSampleOrderCount_ = 0, + const CoarseSampleOrderCustomNV* pCustomSampleOrders_ = nullptr ) + : sampleOrderType( sampleOrderType_ ) + , customSampleOrderCount( customSampleOrderCount_ ) + , pCustomSampleOrders( pCustomSampleOrders_ ) + { + } + + PipelineViewportCoarseSampleOrderStateCreateInfoNV( VkPipelineViewportCoarseSampleOrderStateCreateInfoNV const & rhs ) + { + memcpy( this, &rhs, sizeof( PipelineViewportCoarseSampleOrderStateCreateInfoNV ) ); + } + + PipelineViewportCoarseSampleOrderStateCreateInfoNV& operator=( VkPipelineViewportCoarseSampleOrderStateCreateInfoNV const & rhs ) + { + memcpy( this, &rhs, sizeof( PipelineViewportCoarseSampleOrderStateCreateInfoNV ) ); + return *this; + } + PipelineViewportCoarseSampleOrderStateCreateInfoNV& setPNext( const void* pNext_ ) + { + pNext = pNext_; + return *this; + } + + PipelineViewportCoarseSampleOrderStateCreateInfoNV& setSampleOrderType( CoarseSampleOrderTypeNV sampleOrderType_ ) + { + sampleOrderType = sampleOrderType_; + return *this; + } + + PipelineViewportCoarseSampleOrderStateCreateInfoNV& setCustomSampleOrderCount( uint32_t customSampleOrderCount_ ) + { + customSampleOrderCount = customSampleOrderCount_; + return *this; + } + + PipelineViewportCoarseSampleOrderStateCreateInfoNV& setPCustomSampleOrders( const CoarseSampleOrderCustomNV* pCustomSampleOrders_ ) + { + pCustomSampleOrders = pCustomSampleOrders_; + return *this; + } + + operator VkPipelineViewportCoarseSampleOrderStateCreateInfoNV const&() const + { + return *reinterpret_cast(this); + } + + operator VkPipelineViewportCoarseSampleOrderStateCreateInfoNV &() + { + return *reinterpret_cast(this); + } + + bool operator==( PipelineViewportCoarseSampleOrderStateCreateInfoNV const& rhs ) const + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( sampleOrderType == rhs.sampleOrderType ) + && ( customSampleOrderCount == rhs.customSampleOrderCount ) + && ( pCustomSampleOrders == rhs.pCustomSampleOrders ); + } + + bool operator!=( PipelineViewportCoarseSampleOrderStateCreateInfoNV const& rhs ) const + { + return !operator==( rhs ); + } + + private: + StructureType sType = StructureType::ePipelineViewportCoarseSampleOrderStateCreateInfoNV; + + public: + const void* pNext = nullptr; + CoarseSampleOrderTypeNV sampleOrderType; + uint32_t customSampleOrderCount; + const CoarseSampleOrderCustomNV* pCustomSampleOrders; + }; + static_assert( sizeof( PipelineViewportCoarseSampleOrderStateCreateInfoNV ) == sizeof( VkPipelineViewportCoarseSampleOrderStateCreateInfoNV ), "struct and wrapper have different size!" ); + + enum class GeometryInstanceFlagBitsNV + { + eTriangleCullDisable = VK_GEOMETRY_INSTANCE_TRIANGLE_CULL_DISABLE_BIT_NV, + eTriangleFrontCounterclockwise = VK_GEOMETRY_INSTANCE_TRIANGLE_FRONT_COUNTERCLOCKWISE_BIT_NV, + eForceOpaque = VK_GEOMETRY_INSTANCE_FORCE_OPAQUE_BIT_NV, + eForceNoOpaque = VK_GEOMETRY_INSTANCE_FORCE_NO_OPAQUE_BIT_NV + }; + + using GeometryInstanceFlagsNV = Flags; + + VULKAN_HPP_INLINE GeometryInstanceFlagsNV operator|( GeometryInstanceFlagBitsNV bit0, GeometryInstanceFlagBitsNV bit1 ) + { + return GeometryInstanceFlagsNV( bit0 ) | bit1; + } + + VULKAN_HPP_INLINE GeometryInstanceFlagsNV operator~( GeometryInstanceFlagBitsNV bits ) + { + return ~( GeometryInstanceFlagsNV( bits ) ); + } + + template <> struct FlagTraits + { + enum + { + allFlags = VkFlags(GeometryInstanceFlagBitsNV::eTriangleCullDisable) | VkFlags(GeometryInstanceFlagBitsNV::eTriangleFrontCounterclockwise) | VkFlags(GeometryInstanceFlagBitsNV::eForceOpaque) | VkFlags(GeometryInstanceFlagBitsNV::eForceNoOpaque) + }; + }; + + enum class GeometryFlagBitsNV + { + eOpaque = VK_GEOMETRY_OPAQUE_BIT_NV, + eNoDuplicateAnyHitInvocation = VK_GEOMETRY_NO_DUPLICATE_ANY_HIT_INVOCATION_BIT_NV + }; + + using GeometryFlagsNV = Flags; + + VULKAN_HPP_INLINE GeometryFlagsNV operator|( GeometryFlagBitsNV bit0, GeometryFlagBitsNV bit1 ) + { + return GeometryFlagsNV( bit0 ) | bit1; + } + + VULKAN_HPP_INLINE GeometryFlagsNV operator~( GeometryFlagBitsNV bits ) + { + return ~( GeometryFlagsNV( bits ) ); + } + + template <> struct FlagTraits + { + enum + { + allFlags = VkFlags(GeometryFlagBitsNV::eOpaque) | VkFlags(GeometryFlagBitsNV::eNoDuplicateAnyHitInvocation) + }; + }; + + enum class BuildAccelerationStructureFlagBitsNV + { + eAllowUpdate = VK_BUILD_ACCELERATION_STRUCTURE_ALLOW_UPDATE_BIT_NV, + eAllowCompaction = VK_BUILD_ACCELERATION_STRUCTURE_ALLOW_COMPACTION_BIT_NV, + ePreferFastTrace = VK_BUILD_ACCELERATION_STRUCTURE_PREFER_FAST_TRACE_BIT_NV, + ePreferFastBuild = VK_BUILD_ACCELERATION_STRUCTURE_PREFER_FAST_BUILD_BIT_NV, + eLowMemory = VK_BUILD_ACCELERATION_STRUCTURE_LOW_MEMORY_BIT_NV + }; + + using BuildAccelerationStructureFlagsNV = Flags; + + VULKAN_HPP_INLINE BuildAccelerationStructureFlagsNV operator|( BuildAccelerationStructureFlagBitsNV bit0, BuildAccelerationStructureFlagBitsNV bit1 ) + { + return BuildAccelerationStructureFlagsNV( bit0 ) | bit1; + } + + VULKAN_HPP_INLINE BuildAccelerationStructureFlagsNV operator~( BuildAccelerationStructureFlagBitsNV bits ) + { + return ~( BuildAccelerationStructureFlagsNV( bits ) ); + } + + template <> struct FlagTraits + { + enum + { + allFlags = VkFlags(BuildAccelerationStructureFlagBitsNV::eAllowUpdate) | VkFlags(BuildAccelerationStructureFlagBitsNV::eAllowCompaction) | VkFlags(BuildAccelerationStructureFlagBitsNV::ePreferFastTrace) | VkFlags(BuildAccelerationStructureFlagBitsNV::ePreferFastBuild) | VkFlags(BuildAccelerationStructureFlagBitsNV::eLowMemory) + }; + }; + + enum class CopyAccelerationStructureModeNV + { + eClone = VK_COPY_ACCELERATION_STRUCTURE_MODE_CLONE_NV, + eCompact = VK_COPY_ACCELERATION_STRUCTURE_MODE_COMPACT_NV + }; + + enum class AccelerationStructureTypeNV + { + eTopLevel = VK_ACCELERATION_STRUCTURE_TYPE_TOP_LEVEL_NV, + eBottomLevel = VK_ACCELERATION_STRUCTURE_TYPE_BOTTOM_LEVEL_NV + }; + + enum class GeometryTypeNV + { + eTriangles = VK_GEOMETRY_TYPE_TRIANGLES_NV, + eAabbs = VK_GEOMETRY_TYPE_AABBS_NV + }; + + struct GeometryNV + { + GeometryNV( GeometryTypeNV geometryType_ = GeometryTypeNV::eTriangles, + GeometryDataNV geometry_ = GeometryDataNV(), + GeometryFlagsNV flags_ = GeometryFlagsNV() ) + : geometryType( geometryType_ ) + , geometry( geometry_ ) + , flags( flags_ ) + { + } + + GeometryNV( VkGeometryNV const & rhs ) + { + memcpy( this, &rhs, sizeof( GeometryNV ) ); + } + + GeometryNV& operator=( VkGeometryNV const & rhs ) + { + memcpy( this, &rhs, sizeof( GeometryNV ) ); + return *this; + } + GeometryNV& setPNext( const void* pNext_ ) + { + pNext = pNext_; + return *this; + } + + GeometryNV& setGeometryType( GeometryTypeNV geometryType_ ) + { + geometryType = geometryType_; + return *this; + } + + GeometryNV& setGeometry( GeometryDataNV geometry_ ) + { + geometry = geometry_; + return *this; + } + + GeometryNV& setFlags( GeometryFlagsNV flags_ ) + { + flags = flags_; + return *this; + } + + operator VkGeometryNV const&() const + { + return *reinterpret_cast(this); + } + + operator VkGeometryNV &() + { + return *reinterpret_cast(this); + } + + bool operator==( GeometryNV const& rhs ) const + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( geometryType == rhs.geometryType ) + && ( geometry == rhs.geometry ) + && ( flags == rhs.flags ); + } + + bool operator!=( GeometryNV const& rhs ) const + { + return !operator==( rhs ); + } + + private: + StructureType sType = StructureType::eGeometryNV; + + public: + const void* pNext = nullptr; + GeometryTypeNV geometryType; + GeometryDataNV geometry; + GeometryFlagsNV flags; + }; + static_assert( sizeof( GeometryNV ) == sizeof( VkGeometryNV ), "struct and wrapper have different size!" ); + + struct AccelerationStructureInfoNV + { + AccelerationStructureInfoNV( AccelerationStructureTypeNV type_ = AccelerationStructureTypeNV::eTopLevel, + BuildAccelerationStructureFlagsNV flags_ = BuildAccelerationStructureFlagsNV(), + uint32_t instanceCount_ = 0, + uint32_t geometryCount_ = 0, + const GeometryNV* pGeometries_ = nullptr ) + : type( type_ ) + , flags( flags_ ) + , instanceCount( instanceCount_ ) + , geometryCount( geometryCount_ ) + , pGeometries( pGeometries_ ) + { + } + + AccelerationStructureInfoNV( VkAccelerationStructureInfoNV const & rhs ) + { + memcpy( this, &rhs, sizeof( AccelerationStructureInfoNV ) ); + } + + AccelerationStructureInfoNV& operator=( VkAccelerationStructureInfoNV const & rhs ) + { + memcpy( this, &rhs, sizeof( AccelerationStructureInfoNV ) ); + return *this; + } + AccelerationStructureInfoNV& setPNext( const void* pNext_ ) + { + pNext = pNext_; + return *this; + } + + AccelerationStructureInfoNV& setType( AccelerationStructureTypeNV type_ ) + { + type = type_; + return *this; + } + + AccelerationStructureInfoNV& setFlags( BuildAccelerationStructureFlagsNV flags_ ) + { + flags = flags_; + return *this; + } + + AccelerationStructureInfoNV& setInstanceCount( uint32_t instanceCount_ ) + { + instanceCount = instanceCount_; + return *this; + } + + AccelerationStructureInfoNV& setGeometryCount( uint32_t geometryCount_ ) + { + geometryCount = geometryCount_; + return *this; + } + + AccelerationStructureInfoNV& setPGeometries( const GeometryNV* pGeometries_ ) + { + pGeometries = pGeometries_; + return *this; + } + + operator VkAccelerationStructureInfoNV const&() const + { + return *reinterpret_cast(this); + } + + operator VkAccelerationStructureInfoNV &() + { + return *reinterpret_cast(this); + } + + bool operator==( AccelerationStructureInfoNV const& rhs ) const + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( type == rhs.type ) + && ( flags == rhs.flags ) + && ( instanceCount == rhs.instanceCount ) + && ( geometryCount == rhs.geometryCount ) + && ( pGeometries == rhs.pGeometries ); + } + + bool operator!=( AccelerationStructureInfoNV const& rhs ) const + { + return !operator==( rhs ); + } + + private: + StructureType sType = StructureType::eAccelerationStructureInfoNV; + + public: + const void* pNext = nullptr; + AccelerationStructureTypeNV type; + BuildAccelerationStructureFlagsNV flags; + uint32_t instanceCount; + uint32_t geometryCount; + const GeometryNV* pGeometries; + }; + static_assert( sizeof( AccelerationStructureInfoNV ) == sizeof( VkAccelerationStructureInfoNV ), "struct and wrapper have different size!" ); + + struct AccelerationStructureCreateInfoNV + { + AccelerationStructureCreateInfoNV( DeviceSize compactedSize_ = 0, + AccelerationStructureInfoNV info_ = AccelerationStructureInfoNV() ) + : compactedSize( compactedSize_ ) + , info( info_ ) + { + } + + AccelerationStructureCreateInfoNV( VkAccelerationStructureCreateInfoNV const & rhs ) + { + memcpy( this, &rhs, sizeof( AccelerationStructureCreateInfoNV ) ); + } + + AccelerationStructureCreateInfoNV& operator=( VkAccelerationStructureCreateInfoNV const & rhs ) + { + memcpy( this, &rhs, sizeof( AccelerationStructureCreateInfoNV ) ); + return *this; + } + AccelerationStructureCreateInfoNV& setPNext( const void* pNext_ ) + { + pNext = pNext_; + return *this; + } + + AccelerationStructureCreateInfoNV& setCompactedSize( DeviceSize compactedSize_ ) + { + compactedSize = compactedSize_; + return *this; + } + + AccelerationStructureCreateInfoNV& setInfo( AccelerationStructureInfoNV info_ ) + { + info = info_; + return *this; + } + + operator VkAccelerationStructureCreateInfoNV const&() const + { + return *reinterpret_cast(this); + } + + operator VkAccelerationStructureCreateInfoNV &() + { + return *reinterpret_cast(this); + } + + bool operator==( AccelerationStructureCreateInfoNV const& rhs ) const + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( compactedSize == rhs.compactedSize ) + && ( info == rhs.info ); + } + + bool operator!=( AccelerationStructureCreateInfoNV const& rhs ) const + { + return !operator==( rhs ); + } + + private: + StructureType sType = StructureType::eAccelerationStructureCreateInfoNV; + + public: + const void* pNext = nullptr; + DeviceSize compactedSize; + AccelerationStructureInfoNV info; + }; + static_assert( sizeof( AccelerationStructureCreateInfoNV ) == sizeof( VkAccelerationStructureCreateInfoNV ), "struct and wrapper have different size!" ); + + enum class AccelerationStructureMemoryRequirementsTypeNV + { + eObject = VK_ACCELERATION_STRUCTURE_MEMORY_REQUIREMENTS_TYPE_OBJECT_NV, + eBuildScratch = VK_ACCELERATION_STRUCTURE_MEMORY_REQUIREMENTS_TYPE_BUILD_SCRATCH_NV, + eUpdateScratch = VK_ACCELERATION_STRUCTURE_MEMORY_REQUIREMENTS_TYPE_UPDATE_SCRATCH_NV + }; + + struct AccelerationStructureMemoryRequirementsInfoNV + { + AccelerationStructureMemoryRequirementsInfoNV( AccelerationStructureMemoryRequirementsTypeNV type_ = AccelerationStructureMemoryRequirementsTypeNV::eObject, + AccelerationStructureNV accelerationStructure_ = AccelerationStructureNV() ) + : type( type_ ) + , accelerationStructure( accelerationStructure_ ) + { + } + + AccelerationStructureMemoryRequirementsInfoNV( VkAccelerationStructureMemoryRequirementsInfoNV const & rhs ) + { + memcpy( this, &rhs, sizeof( AccelerationStructureMemoryRequirementsInfoNV ) ); + } + + AccelerationStructureMemoryRequirementsInfoNV& operator=( VkAccelerationStructureMemoryRequirementsInfoNV const & rhs ) + { + memcpy( this, &rhs, sizeof( AccelerationStructureMemoryRequirementsInfoNV ) ); + return *this; + } + AccelerationStructureMemoryRequirementsInfoNV& setPNext( const void* pNext_ ) + { + pNext = pNext_; + return *this; + } + + AccelerationStructureMemoryRequirementsInfoNV& setType( AccelerationStructureMemoryRequirementsTypeNV type_ ) + { + type = type_; + return *this; + } + + AccelerationStructureMemoryRequirementsInfoNV& setAccelerationStructure( AccelerationStructureNV accelerationStructure_ ) + { + accelerationStructure = accelerationStructure_; + return *this; + } + + operator VkAccelerationStructureMemoryRequirementsInfoNV const&() const + { + return *reinterpret_cast(this); + } + + operator VkAccelerationStructureMemoryRequirementsInfoNV &() + { + return *reinterpret_cast(this); + } + + bool operator==( AccelerationStructureMemoryRequirementsInfoNV const& rhs ) const + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( type == rhs.type ) + && ( accelerationStructure == rhs.accelerationStructure ); + } + + bool operator!=( AccelerationStructureMemoryRequirementsInfoNV const& rhs ) const + { + return !operator==( rhs ); + } + + private: + StructureType sType = StructureType::eAccelerationStructureMemoryRequirementsInfoNV; + + public: + const void* pNext = nullptr; + AccelerationStructureMemoryRequirementsTypeNV type; + AccelerationStructureNV accelerationStructure; + }; + static_assert( sizeof( AccelerationStructureMemoryRequirementsInfoNV ) == sizeof( VkAccelerationStructureMemoryRequirementsInfoNV ), "struct and wrapper have different size!" ); + + enum class RayTracingShaderGroupTypeNV + { + eGeneral = VK_RAY_TRACING_SHADER_GROUP_TYPE_GENERAL_NV, + eTrianglesHitGroup = VK_RAY_TRACING_SHADER_GROUP_TYPE_TRIANGLES_HIT_GROUP_NV, + eProceduralHitGroup = VK_RAY_TRACING_SHADER_GROUP_TYPE_PROCEDURAL_HIT_GROUP_NV + }; + + struct RayTracingShaderGroupCreateInfoNV + { + RayTracingShaderGroupCreateInfoNV( RayTracingShaderGroupTypeNV type_ = RayTracingShaderGroupTypeNV::eGeneral, + uint32_t generalShader_ = 0, + uint32_t closestHitShader_ = 0, + uint32_t anyHitShader_ = 0, + uint32_t intersectionShader_ = 0 ) + : type( type_ ) + , generalShader( generalShader_ ) + , closestHitShader( closestHitShader_ ) + , anyHitShader( anyHitShader_ ) + , intersectionShader( intersectionShader_ ) + { + } + + RayTracingShaderGroupCreateInfoNV( VkRayTracingShaderGroupCreateInfoNV const & rhs ) + { + memcpy( this, &rhs, sizeof( RayTracingShaderGroupCreateInfoNV ) ); + } + + RayTracingShaderGroupCreateInfoNV& operator=( VkRayTracingShaderGroupCreateInfoNV const & rhs ) + { + memcpy( this, &rhs, sizeof( RayTracingShaderGroupCreateInfoNV ) ); + return *this; + } + RayTracingShaderGroupCreateInfoNV& setPNext( const void* pNext_ ) + { + pNext = pNext_; + return *this; + } + + RayTracingShaderGroupCreateInfoNV& setType( RayTracingShaderGroupTypeNV type_ ) + { + type = type_; + return *this; + } + + RayTracingShaderGroupCreateInfoNV& setGeneralShader( uint32_t generalShader_ ) + { + generalShader = generalShader_; + return *this; + } + + RayTracingShaderGroupCreateInfoNV& setClosestHitShader( uint32_t closestHitShader_ ) + { + closestHitShader = closestHitShader_; + return *this; + } + + RayTracingShaderGroupCreateInfoNV& setAnyHitShader( uint32_t anyHitShader_ ) + { + anyHitShader = anyHitShader_; + return *this; + } + + RayTracingShaderGroupCreateInfoNV& setIntersectionShader( uint32_t intersectionShader_ ) + { + intersectionShader = intersectionShader_; + return *this; + } + + operator VkRayTracingShaderGroupCreateInfoNV const&() const + { + return *reinterpret_cast(this); + } + + operator VkRayTracingShaderGroupCreateInfoNV &() + { + return *reinterpret_cast(this); + } + + bool operator==( RayTracingShaderGroupCreateInfoNV const& rhs ) const + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( type == rhs.type ) + && ( generalShader == rhs.generalShader ) + && ( closestHitShader == rhs.closestHitShader ) + && ( anyHitShader == rhs.anyHitShader ) + && ( intersectionShader == rhs.intersectionShader ); + } + + bool operator!=( RayTracingShaderGroupCreateInfoNV const& rhs ) const + { + return !operator==( rhs ); + } + + private: + StructureType sType = StructureType::eRayTracingShaderGroupCreateInfoNV; + + public: + const void* pNext = nullptr; + RayTracingShaderGroupTypeNV type; + uint32_t generalShader; + uint32_t closestHitShader; + uint32_t anyHitShader; + uint32_t intersectionShader; + }; + static_assert( sizeof( RayTracingShaderGroupCreateInfoNV ) == sizeof( VkRayTracingShaderGroupCreateInfoNV ), "struct and wrapper have different size!" ); + + struct RayTracingPipelineCreateInfoNV + { + RayTracingPipelineCreateInfoNV( PipelineCreateFlags flags_ = PipelineCreateFlags(), + uint32_t stageCount_ = 0, + const PipelineShaderStageCreateInfo* pStages_ = nullptr, + uint32_t groupCount_ = 0, + const RayTracingShaderGroupCreateInfoNV* pGroups_ = nullptr, + uint32_t maxRecursionDepth_ = 0, + PipelineLayout layout_ = PipelineLayout(), + Pipeline basePipelineHandle_ = Pipeline(), + int32_t basePipelineIndex_ = 0 ) + : flags( flags_ ) + , stageCount( stageCount_ ) + , pStages( pStages_ ) + , groupCount( groupCount_ ) + , pGroups( pGroups_ ) + , maxRecursionDepth( maxRecursionDepth_ ) + , layout( layout_ ) + , basePipelineHandle( basePipelineHandle_ ) + , basePipelineIndex( basePipelineIndex_ ) + { + } + + RayTracingPipelineCreateInfoNV( VkRayTracingPipelineCreateInfoNV const & rhs ) + { + memcpy( this, &rhs, sizeof( RayTracingPipelineCreateInfoNV ) ); + } + + RayTracingPipelineCreateInfoNV& operator=( VkRayTracingPipelineCreateInfoNV const & rhs ) + { + memcpy( this, &rhs, sizeof( RayTracingPipelineCreateInfoNV ) ); + return *this; + } + RayTracingPipelineCreateInfoNV& setPNext( const void* pNext_ ) + { + pNext = pNext_; + return *this; + } + + RayTracingPipelineCreateInfoNV& setFlags( PipelineCreateFlags flags_ ) + { + flags = flags_; + return *this; + } + + RayTracingPipelineCreateInfoNV& setStageCount( uint32_t stageCount_ ) + { + stageCount = stageCount_; + return *this; + } + + RayTracingPipelineCreateInfoNV& setPStages( const PipelineShaderStageCreateInfo* pStages_ ) + { + pStages = pStages_; + return *this; + } + + RayTracingPipelineCreateInfoNV& setGroupCount( uint32_t groupCount_ ) + { + groupCount = groupCount_; + return *this; + } + + RayTracingPipelineCreateInfoNV& setPGroups( const RayTracingShaderGroupCreateInfoNV* pGroups_ ) + { + pGroups = pGroups_; + return *this; + } + + RayTracingPipelineCreateInfoNV& setMaxRecursionDepth( uint32_t maxRecursionDepth_ ) + { + maxRecursionDepth = maxRecursionDepth_; + return *this; + } + + RayTracingPipelineCreateInfoNV& setLayout( PipelineLayout layout_ ) + { + layout = layout_; + return *this; + } + + RayTracingPipelineCreateInfoNV& setBasePipelineHandle( Pipeline basePipelineHandle_ ) + { + basePipelineHandle = basePipelineHandle_; + return *this; + } + + RayTracingPipelineCreateInfoNV& setBasePipelineIndex( int32_t basePipelineIndex_ ) + { + basePipelineIndex = basePipelineIndex_; + return *this; + } + + operator VkRayTracingPipelineCreateInfoNV const&() const + { + return *reinterpret_cast(this); + } + + operator VkRayTracingPipelineCreateInfoNV &() + { + return *reinterpret_cast(this); + } + + bool operator==( RayTracingPipelineCreateInfoNV const& rhs ) const + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( flags == rhs.flags ) + && ( stageCount == rhs.stageCount ) + && ( pStages == rhs.pStages ) + && ( groupCount == rhs.groupCount ) + && ( pGroups == rhs.pGroups ) + && ( maxRecursionDepth == rhs.maxRecursionDepth ) + && ( layout == rhs.layout ) + && ( basePipelineHandle == rhs.basePipelineHandle ) + && ( basePipelineIndex == rhs.basePipelineIndex ); + } + + bool operator!=( RayTracingPipelineCreateInfoNV const& rhs ) const + { + return !operator==( rhs ); + } + + private: + StructureType sType = StructureType::eRayTracingPipelineCreateInfoNV; + + public: + const void* pNext = nullptr; + PipelineCreateFlags flags; + uint32_t stageCount; + const PipelineShaderStageCreateInfo* pStages; + uint32_t groupCount; + const RayTracingShaderGroupCreateInfoNV* pGroups; + uint32_t maxRecursionDepth; + PipelineLayout layout; + Pipeline basePipelineHandle; + int32_t basePipelineIndex; + }; + static_assert( sizeof( RayTracingPipelineCreateInfoNV ) == sizeof( VkRayTracingPipelineCreateInfoNV ), "struct and wrapper have different size!" ); + + enum class MemoryOverallocationBehaviorAMD + { + eDefault = VK_MEMORY_OVERALLOCATION_BEHAVIOR_DEFAULT_AMD, + eAllowed = VK_MEMORY_OVERALLOCATION_BEHAVIOR_ALLOWED_AMD, + eDisallowed = VK_MEMORY_OVERALLOCATION_BEHAVIOR_DISALLOWED_AMD + }; + + struct DeviceMemoryOverallocationCreateInfoAMD + { + DeviceMemoryOverallocationCreateInfoAMD( MemoryOverallocationBehaviorAMD overallocationBehavior_ = MemoryOverallocationBehaviorAMD::eDefault ) + : overallocationBehavior( overallocationBehavior_ ) + { + } + + DeviceMemoryOverallocationCreateInfoAMD( VkDeviceMemoryOverallocationCreateInfoAMD const & rhs ) + { + memcpy( this, &rhs, sizeof( DeviceMemoryOverallocationCreateInfoAMD ) ); + } + + DeviceMemoryOverallocationCreateInfoAMD& operator=( VkDeviceMemoryOverallocationCreateInfoAMD const & rhs ) + { + memcpy( this, &rhs, sizeof( DeviceMemoryOverallocationCreateInfoAMD ) ); + return *this; + } + DeviceMemoryOverallocationCreateInfoAMD& setPNext( const void* pNext_ ) + { + pNext = pNext_; + return *this; + } + + DeviceMemoryOverallocationCreateInfoAMD& setOverallocationBehavior( MemoryOverallocationBehaviorAMD overallocationBehavior_ ) + { + overallocationBehavior = overallocationBehavior_; + return *this; + } + + operator VkDeviceMemoryOverallocationCreateInfoAMD const&() const + { + return *reinterpret_cast(this); + } + + operator VkDeviceMemoryOverallocationCreateInfoAMD &() + { + return *reinterpret_cast(this); + } + + bool operator==( DeviceMemoryOverallocationCreateInfoAMD const& rhs ) const + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( overallocationBehavior == rhs.overallocationBehavior ); + } + + bool operator!=( DeviceMemoryOverallocationCreateInfoAMD const& rhs ) const + { + return !operator==( rhs ); + } + + private: + StructureType sType = StructureType::eDeviceMemoryOverallocationCreateInfoAMD; + + public: + const void* pNext = nullptr; + MemoryOverallocationBehaviorAMD overallocationBehavior; + }; + static_assert( sizeof( DeviceMemoryOverallocationCreateInfoAMD ) == sizeof( VkDeviceMemoryOverallocationCreateInfoAMD ), "struct and wrapper have different size!" ); + + template + Result enumerateInstanceVersion( uint32_t* pApiVersion, Dispatch const &d = Dispatch() ); +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + ResultValueType::type enumerateInstanceVersion(Dispatch const &d = Dispatch() ); +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE Result enumerateInstanceVersion( uint32_t* pApiVersion, Dispatch const &d) + { + return static_cast( d.vkEnumerateInstanceVersion( pApiVersion ) ); + } +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE ResultValueType::type enumerateInstanceVersion(Dispatch const &d ) + { + uint32_t apiVersion; + Result result = static_cast( d.vkEnumerateInstanceVersion( &apiVersion ) ); + return createResultValue( result, apiVersion, VULKAN_HPP_NAMESPACE_STRING"::enumerateInstanceVersion" ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + + template + Result enumerateInstanceLayerProperties( uint32_t* pPropertyCount, LayerProperties* pProperties, Dispatch const &d = Dispatch() ); +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template , typename Dispatch = DispatchLoaderStatic> + typename ResultValueType>::type enumerateInstanceLayerProperties(Dispatch const &d = Dispatch() ); + template , typename Dispatch = DispatchLoaderStatic> + typename ResultValueType>::type enumerateInstanceLayerProperties(Allocator const& vectorAllocator, Dispatch const &d ); +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE Result enumerateInstanceLayerProperties( uint32_t* pPropertyCount, LayerProperties* pProperties, Dispatch const &d) + { + return static_cast( d.vkEnumerateInstanceLayerProperties( pPropertyCount, reinterpret_cast( pProperties ) ) ); + } +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE typename ResultValueType>::type enumerateInstanceLayerProperties(Dispatch const &d ) + { + std::vector properties; + uint32_t propertyCount; + Result result; + do + { + result = static_cast( d.vkEnumerateInstanceLayerProperties( &propertyCount, nullptr ) ); + if ( ( result == Result::eSuccess ) && propertyCount ) + { + properties.resize( propertyCount ); + result = static_cast( d.vkEnumerateInstanceLayerProperties( &propertyCount, reinterpret_cast( properties.data() ) ) ); + } + } while ( result == Result::eIncomplete ); + VULKAN_HPP_ASSERT( propertyCount <= properties.size() ); + properties.resize( propertyCount ); + return createResultValue( result, properties, VULKAN_HPP_NAMESPACE_STRING"::enumerateInstanceLayerProperties" ); + } + template + VULKAN_HPP_INLINE typename ResultValueType>::type enumerateInstanceLayerProperties(Allocator const& vectorAllocator, Dispatch const &d ) + { + std::vector properties( vectorAllocator ); + uint32_t propertyCount; + Result result; + do + { + result = static_cast( d.vkEnumerateInstanceLayerProperties( &propertyCount, nullptr ) ); + if ( ( result == Result::eSuccess ) && propertyCount ) + { + properties.resize( propertyCount ); + result = static_cast( d.vkEnumerateInstanceLayerProperties( &propertyCount, reinterpret_cast( properties.data() ) ) ); + } + } while ( result == Result::eIncomplete ); + VULKAN_HPP_ASSERT( propertyCount <= properties.size() ); + properties.resize( propertyCount ); + return createResultValue( result, properties, VULKAN_HPP_NAMESPACE_STRING"::enumerateInstanceLayerProperties" ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + + template + Result enumerateInstanceExtensionProperties( const char* pLayerName, uint32_t* pPropertyCount, ExtensionProperties* pProperties, Dispatch const &d = Dispatch() ); +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template , typename Dispatch = DispatchLoaderStatic> + typename ResultValueType>::type enumerateInstanceExtensionProperties( Optional layerName = nullptr, Dispatch const &d = Dispatch() ); + template , typename Dispatch = DispatchLoaderStatic> + typename ResultValueType>::type enumerateInstanceExtensionProperties( Optional layerName, Allocator const& vectorAllocator, Dispatch const &d ); +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE Result enumerateInstanceExtensionProperties( const char* pLayerName, uint32_t* pPropertyCount, ExtensionProperties* pProperties, Dispatch const &d) + { + return static_cast( d.vkEnumerateInstanceExtensionProperties( pLayerName, pPropertyCount, reinterpret_cast( pProperties ) ) ); + } +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE typename ResultValueType>::type enumerateInstanceExtensionProperties( Optional layerName, Dispatch const &d ) + { + std::vector properties; + uint32_t propertyCount; + Result result; + do + { + result = static_cast( d.vkEnumerateInstanceExtensionProperties( layerName ? layerName->c_str() : nullptr, &propertyCount, nullptr ) ); + if ( ( result == Result::eSuccess ) && propertyCount ) + { + properties.resize( propertyCount ); + result = static_cast( d.vkEnumerateInstanceExtensionProperties( layerName ? layerName->c_str() : nullptr, &propertyCount, reinterpret_cast( properties.data() ) ) ); + } + } while ( result == Result::eIncomplete ); + VULKAN_HPP_ASSERT( propertyCount <= properties.size() ); + properties.resize( propertyCount ); + return createResultValue( result, properties, VULKAN_HPP_NAMESPACE_STRING"::enumerateInstanceExtensionProperties" ); + } + template + VULKAN_HPP_INLINE typename ResultValueType>::type enumerateInstanceExtensionProperties( Optional layerName, Allocator const& vectorAllocator, Dispatch const &d ) + { + std::vector properties( vectorAllocator ); + uint32_t propertyCount; + Result result; + do + { + result = static_cast( d.vkEnumerateInstanceExtensionProperties( layerName ? layerName->c_str() : nullptr, &propertyCount, nullptr ) ); + if ( ( result == Result::eSuccess ) && propertyCount ) + { + properties.resize( propertyCount ); + result = static_cast( d.vkEnumerateInstanceExtensionProperties( layerName ? layerName->c_str() : nullptr, &propertyCount, reinterpret_cast( properties.data() ) ) ); + } + } while ( result == Result::eIncomplete ); + VULKAN_HPP_ASSERT( propertyCount <= properties.size() ); + properties.resize( propertyCount ); + return createResultValue( result, properties, VULKAN_HPP_NAMESPACE_STRING"::enumerateInstanceExtensionProperties" ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + + // forward declarations + struct CmdProcessCommandsInfoNVX; + + class CommandBuffer + { + public: + VULKAN_HPP_CONSTEXPR CommandBuffer() + : m_commandBuffer(VK_NULL_HANDLE) + {} + + VULKAN_HPP_CONSTEXPR CommandBuffer( std::nullptr_t ) + : m_commandBuffer(VK_NULL_HANDLE) + {} + + VULKAN_HPP_TYPESAFE_EXPLICIT CommandBuffer( VkCommandBuffer commandBuffer ) + : m_commandBuffer( commandBuffer ) + {} + +#if defined(VULKAN_HPP_TYPESAFE_CONVERSION) + CommandBuffer & operator=(VkCommandBuffer commandBuffer) + { + m_commandBuffer = commandBuffer; + return *this; + } +#endif + + CommandBuffer & operator=( std::nullptr_t ) + { + m_commandBuffer = VK_NULL_HANDLE; + return *this; + } + + bool operator==( CommandBuffer const & rhs ) const + { + return m_commandBuffer == rhs.m_commandBuffer; + } + + bool operator!=(CommandBuffer const & rhs ) const + { + return m_commandBuffer != rhs.m_commandBuffer; + } + + bool operator<(CommandBuffer const & rhs ) const + { + return m_commandBuffer < rhs.m_commandBuffer; + } + + template + Result begin( const CommandBufferBeginInfo* pBeginInfo, Dispatch const &d = Dispatch() ) const; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + ResultValueType::type begin( const CommandBufferBeginInfo & beginInfo, Dispatch const &d = Dispatch() ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + +#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + Result end(Dispatch const &d = Dispatch() ) const; +#else + template + ResultValueType::type end(Dispatch const &d = Dispatch() ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + +#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + Result reset( CommandBufferResetFlags flags, Dispatch const &d = Dispatch() ) const; +#else + template + ResultValueType::type reset( CommandBufferResetFlags flags, Dispatch const &d = Dispatch() ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void bindPipeline( PipelineBindPoint pipelineBindPoint, Pipeline pipeline, Dispatch const &d = Dispatch() ) const; + + template + void setViewport( uint32_t firstViewport, uint32_t viewportCount, const Viewport* pViewports, Dispatch const &d = Dispatch() ) const; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void setViewport( uint32_t firstViewport, ArrayProxy viewports, Dispatch const &d = Dispatch() ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void setScissor( uint32_t firstScissor, uint32_t scissorCount, const Rect2D* pScissors, Dispatch const &d = Dispatch() ) const; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void setScissor( uint32_t firstScissor, ArrayProxy scissors, Dispatch const &d = Dispatch() ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void setLineWidth( float lineWidth, Dispatch const &d = Dispatch() ) const; + + template + void setDepthBias( float depthBiasConstantFactor, float depthBiasClamp, float depthBiasSlopeFactor, Dispatch const &d = Dispatch() ) const; + + template + void setBlendConstants( const float blendConstants[4], Dispatch const &d = Dispatch() ) const; + + template + void setDepthBounds( float minDepthBounds, float maxDepthBounds, Dispatch const &d = Dispatch() ) const; + + template + void setStencilCompareMask( StencilFaceFlags faceMask, uint32_t compareMask, Dispatch const &d = Dispatch() ) const; + + template + void setStencilWriteMask( StencilFaceFlags faceMask, uint32_t writeMask, Dispatch const &d = Dispatch() ) const; + + template + void setStencilReference( StencilFaceFlags faceMask, uint32_t reference, Dispatch const &d = Dispatch() ) const; + + template + void bindDescriptorSets( PipelineBindPoint pipelineBindPoint, PipelineLayout layout, uint32_t firstSet, uint32_t descriptorSetCount, const DescriptorSet* pDescriptorSets, uint32_t dynamicOffsetCount, const uint32_t* pDynamicOffsets, Dispatch const &d = Dispatch() ) const; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void bindDescriptorSets( PipelineBindPoint pipelineBindPoint, PipelineLayout layout, uint32_t firstSet, ArrayProxy descriptorSets, ArrayProxy dynamicOffsets, Dispatch const &d = Dispatch() ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void bindIndexBuffer( Buffer buffer, DeviceSize offset, IndexType indexType, Dispatch const &d = Dispatch() ) const; + + template + void bindVertexBuffers( uint32_t firstBinding, uint32_t bindingCount, const Buffer* pBuffers, const DeviceSize* pOffsets, Dispatch const &d = Dispatch() ) const; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void bindVertexBuffers( uint32_t firstBinding, ArrayProxy buffers, ArrayProxy offsets, Dispatch const &d = Dispatch() ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void draw( uint32_t vertexCount, uint32_t instanceCount, uint32_t firstVertex, uint32_t firstInstance, Dispatch const &d = Dispatch() ) const; + + template + void drawIndexed( uint32_t indexCount, uint32_t instanceCount, uint32_t firstIndex, int32_t vertexOffset, uint32_t firstInstance, Dispatch const &d = Dispatch() ) const; + + template + void drawIndirect( Buffer buffer, DeviceSize offset, uint32_t drawCount, uint32_t stride, Dispatch const &d = Dispatch() ) const; + + template + void drawIndexedIndirect( Buffer buffer, DeviceSize offset, uint32_t drawCount, uint32_t stride, Dispatch const &d = Dispatch() ) const; + + template + void dispatch( uint32_t groupCountX, uint32_t groupCountY, uint32_t groupCountZ, Dispatch const &d = Dispatch() ) const; + + template + void dispatchIndirect( Buffer buffer, DeviceSize offset, Dispatch const &d = Dispatch() ) const; + + template + void copyBuffer( Buffer srcBuffer, Buffer dstBuffer, uint32_t regionCount, const BufferCopy* pRegions, Dispatch const &d = Dispatch() ) const; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void copyBuffer( Buffer srcBuffer, Buffer dstBuffer, ArrayProxy regions, Dispatch const &d = Dispatch() ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void copyImage( Image srcImage, ImageLayout srcImageLayout, Image dstImage, ImageLayout dstImageLayout, uint32_t regionCount, const ImageCopy* pRegions, Dispatch const &d = Dispatch() ) const; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void copyImage( Image srcImage, ImageLayout srcImageLayout, Image dstImage, ImageLayout dstImageLayout, ArrayProxy regions, Dispatch const &d = Dispatch() ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void blitImage( Image srcImage, ImageLayout srcImageLayout, Image dstImage, ImageLayout dstImageLayout, uint32_t regionCount, const ImageBlit* pRegions, Filter filter, Dispatch const &d = Dispatch() ) const; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void blitImage( Image srcImage, ImageLayout srcImageLayout, Image dstImage, ImageLayout dstImageLayout, ArrayProxy regions, Filter filter, Dispatch const &d = Dispatch() ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void copyBufferToImage( Buffer srcBuffer, Image dstImage, ImageLayout dstImageLayout, uint32_t regionCount, const BufferImageCopy* pRegions, Dispatch const &d = Dispatch() ) const; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void copyBufferToImage( Buffer srcBuffer, Image dstImage, ImageLayout dstImageLayout, ArrayProxy regions, Dispatch const &d = Dispatch() ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void copyImageToBuffer( Image srcImage, ImageLayout srcImageLayout, Buffer dstBuffer, uint32_t regionCount, const BufferImageCopy* pRegions, Dispatch const &d = Dispatch() ) const; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void copyImageToBuffer( Image srcImage, ImageLayout srcImageLayout, Buffer dstBuffer, ArrayProxy regions, Dispatch const &d = Dispatch() ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void updateBuffer( Buffer dstBuffer, DeviceSize dstOffset, DeviceSize dataSize, const void* pData, Dispatch const &d = Dispatch() ) const; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void updateBuffer( Buffer dstBuffer, DeviceSize dstOffset, ArrayProxy data, Dispatch const &d = Dispatch() ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void fillBuffer( Buffer dstBuffer, DeviceSize dstOffset, DeviceSize size, uint32_t data, Dispatch const &d = Dispatch() ) const; + + template + void clearColorImage( Image image, ImageLayout imageLayout, const ClearColorValue* pColor, uint32_t rangeCount, const ImageSubresourceRange* pRanges, Dispatch const &d = Dispatch() ) const; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void clearColorImage( Image image, ImageLayout imageLayout, const ClearColorValue & color, ArrayProxy ranges, Dispatch const &d = Dispatch() ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void clearDepthStencilImage( Image image, ImageLayout imageLayout, const ClearDepthStencilValue* pDepthStencil, uint32_t rangeCount, const ImageSubresourceRange* pRanges, Dispatch const &d = Dispatch() ) const; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void clearDepthStencilImage( Image image, ImageLayout imageLayout, const ClearDepthStencilValue & depthStencil, ArrayProxy ranges, Dispatch const &d = Dispatch() ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void clearAttachments( uint32_t attachmentCount, const ClearAttachment* pAttachments, uint32_t rectCount, const ClearRect* pRects, Dispatch const &d = Dispatch() ) const; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void clearAttachments( ArrayProxy attachments, ArrayProxy rects, Dispatch const &d = Dispatch() ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void resolveImage( Image srcImage, ImageLayout srcImageLayout, Image dstImage, ImageLayout dstImageLayout, uint32_t regionCount, const ImageResolve* pRegions, Dispatch const &d = Dispatch() ) const; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void resolveImage( Image srcImage, ImageLayout srcImageLayout, Image dstImage, ImageLayout dstImageLayout, ArrayProxy regions, Dispatch const &d = Dispatch() ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void setEvent( Event event, PipelineStageFlags stageMask, Dispatch const &d = Dispatch() ) const; + + template + void resetEvent( Event event, PipelineStageFlags stageMask, Dispatch const &d = Dispatch() ) const; + + template + void waitEvents( uint32_t eventCount, const Event* pEvents, PipelineStageFlags srcStageMask, PipelineStageFlags dstStageMask, uint32_t memoryBarrierCount, const MemoryBarrier* pMemoryBarriers, uint32_t bufferMemoryBarrierCount, const BufferMemoryBarrier* pBufferMemoryBarriers, uint32_t imageMemoryBarrierCount, const ImageMemoryBarrier* pImageMemoryBarriers, Dispatch const &d = Dispatch() ) const; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void waitEvents( ArrayProxy events, PipelineStageFlags srcStageMask, PipelineStageFlags dstStageMask, ArrayProxy memoryBarriers, ArrayProxy bufferMemoryBarriers, ArrayProxy imageMemoryBarriers, Dispatch const &d = Dispatch() ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void pipelineBarrier( PipelineStageFlags srcStageMask, PipelineStageFlags dstStageMask, DependencyFlags dependencyFlags, uint32_t memoryBarrierCount, const MemoryBarrier* pMemoryBarriers, uint32_t bufferMemoryBarrierCount, const BufferMemoryBarrier* pBufferMemoryBarriers, uint32_t imageMemoryBarrierCount, const ImageMemoryBarrier* pImageMemoryBarriers, Dispatch const &d = Dispatch() ) const; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void pipelineBarrier( PipelineStageFlags srcStageMask, PipelineStageFlags dstStageMask, DependencyFlags dependencyFlags, ArrayProxy memoryBarriers, ArrayProxy bufferMemoryBarriers, ArrayProxy imageMemoryBarriers, Dispatch const &d = Dispatch() ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void beginQuery( QueryPool queryPool, uint32_t query, QueryControlFlags flags, Dispatch const &d = Dispatch() ) const; + + template + void endQuery( QueryPool queryPool, uint32_t query, Dispatch const &d = Dispatch() ) const; + + template + void beginConditionalRenderingEXT( const ConditionalRenderingBeginInfoEXT* pConditionalRenderingBegin, Dispatch const &d = Dispatch() ) const; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void beginConditionalRenderingEXT( const ConditionalRenderingBeginInfoEXT & conditionalRenderingBegin, Dispatch const &d = Dispatch() ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void endConditionalRenderingEXT(Dispatch const &d = Dispatch() ) const; + + template + void resetQueryPool( QueryPool queryPool, uint32_t firstQuery, uint32_t queryCount, Dispatch const &d = Dispatch() ) const; + + template + void writeTimestamp( PipelineStageFlagBits pipelineStage, QueryPool queryPool, uint32_t query, Dispatch const &d = Dispatch() ) const; + + template + void copyQueryPoolResults( QueryPool queryPool, uint32_t firstQuery, uint32_t queryCount, Buffer dstBuffer, DeviceSize dstOffset, DeviceSize stride, QueryResultFlags flags, Dispatch const &d = Dispatch() ) const; + + template + void pushConstants( PipelineLayout layout, ShaderStageFlags stageFlags, uint32_t offset, uint32_t size, const void* pValues, Dispatch const &d = Dispatch() ) const; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void pushConstants( PipelineLayout layout, ShaderStageFlags stageFlags, uint32_t offset, ArrayProxy values, Dispatch const &d = Dispatch() ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void beginRenderPass( const RenderPassBeginInfo* pRenderPassBegin, SubpassContents contents, Dispatch const &d = Dispatch() ) const; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void beginRenderPass( const RenderPassBeginInfo & renderPassBegin, SubpassContents contents, Dispatch const &d = Dispatch() ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void nextSubpass( SubpassContents contents, Dispatch const &d = Dispatch() ) const; + + template + void endRenderPass(Dispatch const &d = Dispatch() ) const; + + template + void executeCommands( uint32_t commandBufferCount, const CommandBuffer* pCommandBuffers, Dispatch const &d = Dispatch() ) const; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void executeCommands( ArrayProxy commandBuffers, Dispatch const &d = Dispatch() ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void debugMarkerBeginEXT( const DebugMarkerMarkerInfoEXT* pMarkerInfo, Dispatch const &d = Dispatch() ) const; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void debugMarkerBeginEXT( const DebugMarkerMarkerInfoEXT & markerInfo, Dispatch const &d = Dispatch() ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void debugMarkerEndEXT(Dispatch const &d = Dispatch() ) const; + + template + void debugMarkerInsertEXT( const DebugMarkerMarkerInfoEXT* pMarkerInfo, Dispatch const &d = Dispatch() ) const; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void debugMarkerInsertEXT( const DebugMarkerMarkerInfoEXT & markerInfo, Dispatch const &d = Dispatch() ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void drawIndirectCountAMD( Buffer buffer, DeviceSize offset, Buffer countBuffer, DeviceSize countBufferOffset, uint32_t maxDrawCount, uint32_t stride, Dispatch const &d = Dispatch() ) const; + + template + void drawIndexedIndirectCountAMD( Buffer buffer, DeviceSize offset, Buffer countBuffer, DeviceSize countBufferOffset, uint32_t maxDrawCount, uint32_t stride, Dispatch const &d = Dispatch() ) const; + + template + void processCommandsNVX( const CmdProcessCommandsInfoNVX* pProcessCommandsInfo, Dispatch const &d = Dispatch() ) const; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void processCommandsNVX( const CmdProcessCommandsInfoNVX & processCommandsInfo, Dispatch const &d = Dispatch() ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void reserveSpaceForCommandsNVX( const CmdReserveSpaceForCommandsInfoNVX* pReserveSpaceInfo, Dispatch const &d = Dispatch() ) const; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void reserveSpaceForCommandsNVX( const CmdReserveSpaceForCommandsInfoNVX & reserveSpaceInfo, Dispatch const &d = Dispatch() ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void pushDescriptorSetKHR( PipelineBindPoint pipelineBindPoint, PipelineLayout layout, uint32_t set, uint32_t descriptorWriteCount, const WriteDescriptorSet* pDescriptorWrites, Dispatch const &d = Dispatch() ) const; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void pushDescriptorSetKHR( PipelineBindPoint pipelineBindPoint, PipelineLayout layout, uint32_t set, ArrayProxy descriptorWrites, Dispatch const &d = Dispatch() ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void setDeviceMask( uint32_t deviceMask, Dispatch const &d = Dispatch() ) const; + + template + void setDeviceMaskKHR( uint32_t deviceMask, Dispatch const &d = Dispatch() ) const; + + template + void dispatchBase( uint32_t baseGroupX, uint32_t baseGroupY, uint32_t baseGroupZ, uint32_t groupCountX, uint32_t groupCountY, uint32_t groupCountZ, Dispatch const &d = Dispatch() ) const; + + template + void dispatchBaseKHR( uint32_t baseGroupX, uint32_t baseGroupY, uint32_t baseGroupZ, uint32_t groupCountX, uint32_t groupCountY, uint32_t groupCountZ, Dispatch const &d = Dispatch() ) const; + + template + void pushDescriptorSetWithTemplateKHR( DescriptorUpdateTemplate descriptorUpdateTemplate, PipelineLayout layout, uint32_t set, const void* pData, Dispatch const &d = Dispatch() ) const; + + template + void setViewportWScalingNV( uint32_t firstViewport, uint32_t viewportCount, const ViewportWScalingNV* pViewportWScalings, Dispatch const &d = Dispatch() ) const; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void setViewportWScalingNV( uint32_t firstViewport, ArrayProxy viewportWScalings, Dispatch const &d = Dispatch() ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void setDiscardRectangleEXT( uint32_t firstDiscardRectangle, uint32_t discardRectangleCount, const Rect2D* pDiscardRectangles, Dispatch const &d = Dispatch() ) const; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void setDiscardRectangleEXT( uint32_t firstDiscardRectangle, ArrayProxy discardRectangles, Dispatch const &d = Dispatch() ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void setSampleLocationsEXT( const SampleLocationsInfoEXT* pSampleLocationsInfo, Dispatch const &d = Dispatch() ) const; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void setSampleLocationsEXT( const SampleLocationsInfoEXT & sampleLocationsInfo, Dispatch const &d = Dispatch() ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void beginDebugUtilsLabelEXT( const DebugUtilsLabelEXT* pLabelInfo, Dispatch const &d = Dispatch() ) const; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void beginDebugUtilsLabelEXT( const DebugUtilsLabelEXT & labelInfo, Dispatch const &d = Dispatch() ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void endDebugUtilsLabelEXT(Dispatch const &d = Dispatch() ) const; + + template + void insertDebugUtilsLabelEXT( const DebugUtilsLabelEXT* pLabelInfo, Dispatch const &d = Dispatch() ) const; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void insertDebugUtilsLabelEXT( const DebugUtilsLabelEXT & labelInfo, Dispatch const &d = Dispatch() ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void writeBufferMarkerAMD( PipelineStageFlagBits pipelineStage, Buffer dstBuffer, DeviceSize dstOffset, uint32_t marker, Dispatch const &d = Dispatch() ) const; + + template + void beginRenderPass2KHR( const RenderPassBeginInfo* pRenderPassBegin, const SubpassBeginInfoKHR* pSubpassBeginInfo, Dispatch const &d = Dispatch() ) const; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void beginRenderPass2KHR( const RenderPassBeginInfo & renderPassBegin, const SubpassBeginInfoKHR & subpassBeginInfo, Dispatch const &d = Dispatch() ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void nextSubpass2KHR( const SubpassBeginInfoKHR* pSubpassBeginInfo, const SubpassEndInfoKHR* pSubpassEndInfo, Dispatch const &d = Dispatch() ) const; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void nextSubpass2KHR( const SubpassBeginInfoKHR & subpassBeginInfo, const SubpassEndInfoKHR & subpassEndInfo, Dispatch const &d = Dispatch() ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void endRenderPass2KHR( const SubpassEndInfoKHR* pSubpassEndInfo, Dispatch const &d = Dispatch() ) const; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void endRenderPass2KHR( const SubpassEndInfoKHR & subpassEndInfo, Dispatch const &d = Dispatch() ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void drawIndirectCountKHR( Buffer buffer, DeviceSize offset, Buffer countBuffer, DeviceSize countBufferOffset, uint32_t maxDrawCount, uint32_t stride, Dispatch const &d = Dispatch() ) const; + + template + void drawIndexedIndirectCountKHR( Buffer buffer, DeviceSize offset, Buffer countBuffer, DeviceSize countBufferOffset, uint32_t maxDrawCount, uint32_t stride, Dispatch const &d = Dispatch() ) const; + + template + void setCheckpointNV( const void* pCheckpointMarker, Dispatch const &d = Dispatch() ) const; + + template + void bindTransformFeedbackBuffersEXT( uint32_t firstBinding, uint32_t bindingCount, const Buffer* pBuffers, const DeviceSize* pOffsets, const DeviceSize* pSizes, Dispatch const &d = Dispatch() ) const; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void bindTransformFeedbackBuffersEXT( uint32_t firstBinding, ArrayProxy buffers, ArrayProxy offsets, ArrayProxy sizes, Dispatch const &d = Dispatch() ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void beginTransformFeedbackEXT( uint32_t firstCounterBuffer, uint32_t counterBufferCount, const Buffer* pCounterBuffers, const DeviceSize* pCounterBufferOffsets, Dispatch const &d = Dispatch() ) const; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void beginTransformFeedbackEXT( uint32_t firstCounterBuffer, ArrayProxy counterBuffers, ArrayProxy counterBufferOffsets, Dispatch const &d = Dispatch() ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void endTransformFeedbackEXT( uint32_t firstCounterBuffer, uint32_t counterBufferCount, const Buffer* pCounterBuffers, const DeviceSize* pCounterBufferOffsets, Dispatch const &d = Dispatch() ) const; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void endTransformFeedbackEXT( uint32_t firstCounterBuffer, ArrayProxy counterBuffers, ArrayProxy counterBufferOffsets, Dispatch const &d = Dispatch() ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void beginQueryIndexedEXT( QueryPool queryPool, uint32_t query, QueryControlFlags flags, uint32_t index, Dispatch const &d = Dispatch() ) const; + + template + void endQueryIndexedEXT( QueryPool queryPool, uint32_t query, uint32_t index, Dispatch const &d = Dispatch() ) const; + + template + void drawIndirectByteCountEXT( uint32_t instanceCount, uint32_t firstInstance, Buffer counterBuffer, DeviceSize counterBufferOffset, uint32_t counterOffset, uint32_t vertexStride, Dispatch const &d = Dispatch() ) const; + + template + void setExclusiveScissorNV( uint32_t firstExclusiveScissor, uint32_t exclusiveScissorCount, const Rect2D* pExclusiveScissors, Dispatch const &d = Dispatch() ) const; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void setExclusiveScissorNV( uint32_t firstExclusiveScissor, ArrayProxy exclusiveScissors, Dispatch const &d = Dispatch() ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void bindShadingRateImageNV( ImageView imageView, ImageLayout imageLayout, Dispatch const &d = Dispatch() ) const; + + template + void setViewportShadingRatePaletteNV( uint32_t firstViewport, uint32_t viewportCount, const ShadingRatePaletteNV* pShadingRatePalettes, Dispatch const &d = Dispatch() ) const; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void setViewportShadingRatePaletteNV( uint32_t firstViewport, ArrayProxy shadingRatePalettes, Dispatch const &d = Dispatch() ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void setCoarseSampleOrderNV( CoarseSampleOrderTypeNV sampleOrderType, uint32_t customSampleOrderCount, const CoarseSampleOrderCustomNV* pCustomSampleOrders, Dispatch const &d = Dispatch() ) const; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void setCoarseSampleOrderNV( CoarseSampleOrderTypeNV sampleOrderType, ArrayProxy customSampleOrders, Dispatch const &d = Dispatch() ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void drawMeshTasksNV( uint32_t taskCount, uint32_t firstTask, Dispatch const &d = Dispatch() ) const; + + template + void drawMeshTasksIndirectNV( Buffer buffer, DeviceSize offset, uint32_t drawCount, uint32_t stride, Dispatch const &d = Dispatch() ) const; + + template + void drawMeshTasksIndirectCountNV( Buffer buffer, DeviceSize offset, Buffer countBuffer, DeviceSize countBufferOffset, uint32_t maxDrawCount, uint32_t stride, Dispatch const &d = Dispatch() ) const; + + template + void copyAccelerationStructureNV( AccelerationStructureNV dst, AccelerationStructureNV src, CopyAccelerationStructureModeNV mode, Dispatch const &d = Dispatch() ) const; + + template + void writeAccelerationStructuresPropertiesNV( uint32_t accelerationStructureCount, const AccelerationStructureNV* pAccelerationStructures, QueryType queryType, QueryPool queryPool, uint32_t firstQuery, Dispatch const &d = Dispatch() ) const; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void writeAccelerationStructuresPropertiesNV( ArrayProxy accelerationStructures, QueryType queryType, QueryPool queryPool, uint32_t firstQuery, Dispatch const &d = Dispatch() ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void buildAccelerationStructureNV( const AccelerationStructureInfoNV* pInfo, Buffer instanceData, DeviceSize instanceOffset, Bool32 update, AccelerationStructureNV dst, AccelerationStructureNV src, Buffer scratch, DeviceSize scratchOffset, Dispatch const &d = Dispatch() ) const; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void buildAccelerationStructureNV( const AccelerationStructureInfoNV & info, Buffer instanceData, DeviceSize instanceOffset, Bool32 update, AccelerationStructureNV dst, AccelerationStructureNV src, Buffer scratch, DeviceSize scratchOffset, Dispatch const &d = Dispatch() ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void traceRaysNV( Buffer raygenShaderBindingTableBuffer, DeviceSize raygenShaderBindingOffset, Buffer missShaderBindingTableBuffer, DeviceSize missShaderBindingOffset, DeviceSize missShaderBindingStride, Buffer hitShaderBindingTableBuffer, DeviceSize hitShaderBindingOffset, DeviceSize hitShaderBindingStride, Buffer callableShaderBindingTableBuffer, DeviceSize callableShaderBindingOffset, DeviceSize callableShaderBindingStride, uint32_t width, uint32_t height, uint32_t depth, Dispatch const &d = Dispatch() ) const; + + + + VULKAN_HPP_TYPESAFE_EXPLICIT operator VkCommandBuffer() const + { + return m_commandBuffer; + } + + explicit operator bool() const + { + return m_commandBuffer != VK_NULL_HANDLE; + } + + bool operator!() const + { + return m_commandBuffer == VK_NULL_HANDLE; + } + + private: + VkCommandBuffer m_commandBuffer; + }; + + static_assert( sizeof( CommandBuffer ) == sizeof( VkCommandBuffer ), "handle and wrapper have different size!" ); + + template + VULKAN_HPP_INLINE Result CommandBuffer::begin( const CommandBufferBeginInfo* pBeginInfo, Dispatch const &d) const + { + return static_cast( d.vkBeginCommandBuffer( m_commandBuffer, reinterpret_cast( pBeginInfo ) ) ); + } +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE ResultValueType::type CommandBuffer::begin( const CommandBufferBeginInfo & beginInfo, Dispatch const &d ) const + { + Result result = static_cast( d.vkBeginCommandBuffer( m_commandBuffer, reinterpret_cast( &beginInfo ) ) ); + return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING"::CommandBuffer::begin" ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + +#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE Result CommandBuffer::end(Dispatch const &d) const + { + return static_cast( d.vkEndCommandBuffer( m_commandBuffer ) ); + } +#else + template + VULKAN_HPP_INLINE ResultValueType::type CommandBuffer::end(Dispatch const &d ) const + { + Result result = static_cast( d.vkEndCommandBuffer( m_commandBuffer ) ); + return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING"::CommandBuffer::end" ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + +#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE Result CommandBuffer::reset( CommandBufferResetFlags flags, Dispatch const &d) const + { + return static_cast( d.vkResetCommandBuffer( m_commandBuffer, static_cast( flags ) ) ); + } +#else + template + VULKAN_HPP_INLINE ResultValueType::type CommandBuffer::reset( CommandBufferResetFlags flags, Dispatch const &d ) const + { + Result result = static_cast( d.vkResetCommandBuffer( m_commandBuffer, static_cast( flags ) ) ); + return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING"::CommandBuffer::reset" ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + +#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void CommandBuffer::bindPipeline( PipelineBindPoint pipelineBindPoint, Pipeline pipeline, Dispatch const &d) const + { + d.vkCmdBindPipeline( m_commandBuffer, static_cast( pipelineBindPoint ), static_cast( pipeline ) ); + } +#else + template + VULKAN_HPP_INLINE void CommandBuffer::bindPipeline( PipelineBindPoint pipelineBindPoint, Pipeline pipeline, Dispatch const &d ) const + { + d.vkCmdBindPipeline( m_commandBuffer, static_cast( pipelineBindPoint ), static_cast( pipeline ) ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void CommandBuffer::setViewport( uint32_t firstViewport, uint32_t viewportCount, const Viewport* pViewports, Dispatch const &d) const + { + d.vkCmdSetViewport( m_commandBuffer, firstViewport, viewportCount, reinterpret_cast( pViewports ) ); + } +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void CommandBuffer::setViewport( uint32_t firstViewport, ArrayProxy viewports, Dispatch const &d ) const + { + d.vkCmdSetViewport( m_commandBuffer, firstViewport, viewports.size() , reinterpret_cast( viewports.data() ) ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void CommandBuffer::setScissor( uint32_t firstScissor, uint32_t scissorCount, const Rect2D* pScissors, Dispatch const &d) const + { + d.vkCmdSetScissor( m_commandBuffer, firstScissor, scissorCount, reinterpret_cast( pScissors ) ); + } +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void CommandBuffer::setScissor( uint32_t firstScissor, ArrayProxy scissors, Dispatch const &d ) const + { + d.vkCmdSetScissor( m_commandBuffer, firstScissor, scissors.size() , reinterpret_cast( scissors.data() ) ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + +#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void CommandBuffer::setLineWidth( float lineWidth, Dispatch const &d) const + { + d.vkCmdSetLineWidth( m_commandBuffer, lineWidth ); + } +#else + template + VULKAN_HPP_INLINE void CommandBuffer::setLineWidth( float lineWidth, Dispatch const &d ) const + { + d.vkCmdSetLineWidth( m_commandBuffer, lineWidth ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + +#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void CommandBuffer::setDepthBias( float depthBiasConstantFactor, float depthBiasClamp, float depthBiasSlopeFactor, Dispatch const &d) const + { + d.vkCmdSetDepthBias( m_commandBuffer, depthBiasConstantFactor, depthBiasClamp, depthBiasSlopeFactor ); + } +#else + template + VULKAN_HPP_INLINE void CommandBuffer::setDepthBias( float depthBiasConstantFactor, float depthBiasClamp, float depthBiasSlopeFactor, Dispatch const &d ) const + { + d.vkCmdSetDepthBias( m_commandBuffer, depthBiasConstantFactor, depthBiasClamp, depthBiasSlopeFactor ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + +#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void CommandBuffer::setBlendConstants( const float blendConstants[4], Dispatch const &d) const + { + d.vkCmdSetBlendConstants( m_commandBuffer, blendConstants ); + } +#else + template + VULKAN_HPP_INLINE void CommandBuffer::setBlendConstants( const float blendConstants[4], Dispatch const &d ) const + { + d.vkCmdSetBlendConstants( m_commandBuffer, blendConstants ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + +#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void CommandBuffer::setDepthBounds( float minDepthBounds, float maxDepthBounds, Dispatch const &d) const + { + d.vkCmdSetDepthBounds( m_commandBuffer, minDepthBounds, maxDepthBounds ); + } +#else + template + VULKAN_HPP_INLINE void CommandBuffer::setDepthBounds( float minDepthBounds, float maxDepthBounds, Dispatch const &d ) const + { + d.vkCmdSetDepthBounds( m_commandBuffer, minDepthBounds, maxDepthBounds ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + +#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void CommandBuffer::setStencilCompareMask( StencilFaceFlags faceMask, uint32_t compareMask, Dispatch const &d) const + { + d.vkCmdSetStencilCompareMask( m_commandBuffer, static_cast( faceMask ), compareMask ); + } +#else + template + VULKAN_HPP_INLINE void CommandBuffer::setStencilCompareMask( StencilFaceFlags faceMask, uint32_t compareMask, Dispatch const &d ) const + { + d.vkCmdSetStencilCompareMask( m_commandBuffer, static_cast( faceMask ), compareMask ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + +#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void CommandBuffer::setStencilWriteMask( StencilFaceFlags faceMask, uint32_t writeMask, Dispatch const &d) const + { + d.vkCmdSetStencilWriteMask( m_commandBuffer, static_cast( faceMask ), writeMask ); + } +#else + template + VULKAN_HPP_INLINE void CommandBuffer::setStencilWriteMask( StencilFaceFlags faceMask, uint32_t writeMask, Dispatch const &d ) const + { + d.vkCmdSetStencilWriteMask( m_commandBuffer, static_cast( faceMask ), writeMask ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + +#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void CommandBuffer::setStencilReference( StencilFaceFlags faceMask, uint32_t reference, Dispatch const &d) const + { + d.vkCmdSetStencilReference( m_commandBuffer, static_cast( faceMask ), reference ); + } +#else + template + VULKAN_HPP_INLINE void CommandBuffer::setStencilReference( StencilFaceFlags faceMask, uint32_t reference, Dispatch const &d ) const + { + d.vkCmdSetStencilReference( m_commandBuffer, static_cast( faceMask ), reference ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void CommandBuffer::bindDescriptorSets( PipelineBindPoint pipelineBindPoint, PipelineLayout layout, uint32_t firstSet, uint32_t descriptorSetCount, const DescriptorSet* pDescriptorSets, uint32_t dynamicOffsetCount, const uint32_t* pDynamicOffsets, Dispatch const &d) const + { + d.vkCmdBindDescriptorSets( m_commandBuffer, static_cast( pipelineBindPoint ), static_cast( layout ), firstSet, descriptorSetCount, reinterpret_cast( pDescriptorSets ), dynamicOffsetCount, pDynamicOffsets ); + } +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void CommandBuffer::bindDescriptorSets( PipelineBindPoint pipelineBindPoint, PipelineLayout layout, uint32_t firstSet, ArrayProxy descriptorSets, ArrayProxy dynamicOffsets, Dispatch const &d ) const + { + d.vkCmdBindDescriptorSets( m_commandBuffer, static_cast( pipelineBindPoint ), static_cast( layout ), firstSet, descriptorSets.size() , reinterpret_cast( descriptorSets.data() ), dynamicOffsets.size() , dynamicOffsets.data() ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + +#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void CommandBuffer::bindIndexBuffer( Buffer buffer, DeviceSize offset, IndexType indexType, Dispatch const &d) const + { + d.vkCmdBindIndexBuffer( m_commandBuffer, static_cast( buffer ), offset, static_cast( indexType ) ); + } +#else + template + VULKAN_HPP_INLINE void CommandBuffer::bindIndexBuffer( Buffer buffer, DeviceSize offset, IndexType indexType, Dispatch const &d ) const + { + d.vkCmdBindIndexBuffer( m_commandBuffer, static_cast( buffer ), offset, static_cast( indexType ) ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void CommandBuffer::bindVertexBuffers( uint32_t firstBinding, uint32_t bindingCount, const Buffer* pBuffers, const DeviceSize* pOffsets, Dispatch const &d) const + { + d.vkCmdBindVertexBuffers( m_commandBuffer, firstBinding, bindingCount, reinterpret_cast( pBuffers ), pOffsets ); + } +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void CommandBuffer::bindVertexBuffers( uint32_t firstBinding, ArrayProxy buffers, ArrayProxy offsets, Dispatch const &d ) const + { +#ifdef VULKAN_HPP_NO_EXCEPTIONS + VULKAN_HPP_ASSERT( buffers.size() == offsets.size() ); +#else + if ( buffers.size() != offsets.size() ) + { + throw LogicError( VULKAN_HPP_NAMESPACE_STRING "::CommandBuffer::bindVertexBuffers: buffers.size() != offsets.size()" ); + } +#endif // VULKAN_HPP_NO_EXCEPTIONS + d.vkCmdBindVertexBuffers( m_commandBuffer, firstBinding, buffers.size() , reinterpret_cast( buffers.data() ), offsets.data() ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + +#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void CommandBuffer::draw( uint32_t vertexCount, uint32_t instanceCount, uint32_t firstVertex, uint32_t firstInstance, Dispatch const &d) const + { + d.vkCmdDraw( m_commandBuffer, vertexCount, instanceCount, firstVertex, firstInstance ); + } +#else + template + VULKAN_HPP_INLINE void CommandBuffer::draw( uint32_t vertexCount, uint32_t instanceCount, uint32_t firstVertex, uint32_t firstInstance, Dispatch const &d ) const + { + d.vkCmdDraw( m_commandBuffer, vertexCount, instanceCount, firstVertex, firstInstance ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + +#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void CommandBuffer::drawIndexed( uint32_t indexCount, uint32_t instanceCount, uint32_t firstIndex, int32_t vertexOffset, uint32_t firstInstance, Dispatch const &d) const + { + d.vkCmdDrawIndexed( m_commandBuffer, indexCount, instanceCount, firstIndex, vertexOffset, firstInstance ); + } +#else + template + VULKAN_HPP_INLINE void CommandBuffer::drawIndexed( uint32_t indexCount, uint32_t instanceCount, uint32_t firstIndex, int32_t vertexOffset, uint32_t firstInstance, Dispatch const &d ) const + { + d.vkCmdDrawIndexed( m_commandBuffer, indexCount, instanceCount, firstIndex, vertexOffset, firstInstance ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + +#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void CommandBuffer::drawIndirect( Buffer buffer, DeviceSize offset, uint32_t drawCount, uint32_t stride, Dispatch const &d) const + { + d.vkCmdDrawIndirect( m_commandBuffer, static_cast( buffer ), offset, drawCount, stride ); + } +#else + template + VULKAN_HPP_INLINE void CommandBuffer::drawIndirect( Buffer buffer, DeviceSize offset, uint32_t drawCount, uint32_t stride, Dispatch const &d ) const + { + d.vkCmdDrawIndirect( m_commandBuffer, static_cast( buffer ), offset, drawCount, stride ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + +#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void CommandBuffer::drawIndexedIndirect( Buffer buffer, DeviceSize offset, uint32_t drawCount, uint32_t stride, Dispatch const &d) const + { + d.vkCmdDrawIndexedIndirect( m_commandBuffer, static_cast( buffer ), offset, drawCount, stride ); + } +#else + template + VULKAN_HPP_INLINE void CommandBuffer::drawIndexedIndirect( Buffer buffer, DeviceSize offset, uint32_t drawCount, uint32_t stride, Dispatch const &d ) const + { + d.vkCmdDrawIndexedIndirect( m_commandBuffer, static_cast( buffer ), offset, drawCount, stride ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + +#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void CommandBuffer::dispatch( uint32_t groupCountX, uint32_t groupCountY, uint32_t groupCountZ, Dispatch const &d) const + { + d.vkCmdDispatch( m_commandBuffer, groupCountX, groupCountY, groupCountZ ); + } +#else + template + VULKAN_HPP_INLINE void CommandBuffer::dispatch( uint32_t groupCountX, uint32_t groupCountY, uint32_t groupCountZ, Dispatch const &d ) const + { + d.vkCmdDispatch( m_commandBuffer, groupCountX, groupCountY, groupCountZ ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + +#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void CommandBuffer::dispatchIndirect( Buffer buffer, DeviceSize offset, Dispatch const &d) const + { + d.vkCmdDispatchIndirect( m_commandBuffer, static_cast( buffer ), offset ); + } +#else + template + VULKAN_HPP_INLINE void CommandBuffer::dispatchIndirect( Buffer buffer, DeviceSize offset, Dispatch const &d ) const + { + d.vkCmdDispatchIndirect( m_commandBuffer, static_cast( buffer ), offset ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void CommandBuffer::copyBuffer( Buffer srcBuffer, Buffer dstBuffer, uint32_t regionCount, const BufferCopy* pRegions, Dispatch const &d) const + { + d.vkCmdCopyBuffer( m_commandBuffer, static_cast( srcBuffer ), static_cast( dstBuffer ), regionCount, reinterpret_cast( pRegions ) ); + } +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void CommandBuffer::copyBuffer( Buffer srcBuffer, Buffer dstBuffer, ArrayProxy regions, Dispatch const &d ) const + { + d.vkCmdCopyBuffer( m_commandBuffer, static_cast( srcBuffer ), static_cast( dstBuffer ), regions.size() , reinterpret_cast( regions.data() ) ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void CommandBuffer::copyImage( Image srcImage, ImageLayout srcImageLayout, Image dstImage, ImageLayout dstImageLayout, uint32_t regionCount, const ImageCopy* pRegions, Dispatch const &d) const + { + d.vkCmdCopyImage( m_commandBuffer, static_cast( srcImage ), static_cast( srcImageLayout ), static_cast( dstImage ), static_cast( dstImageLayout ), regionCount, reinterpret_cast( pRegions ) ); + } +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void CommandBuffer::copyImage( Image srcImage, ImageLayout srcImageLayout, Image dstImage, ImageLayout dstImageLayout, ArrayProxy regions, Dispatch const &d ) const + { + d.vkCmdCopyImage( m_commandBuffer, static_cast( srcImage ), static_cast( srcImageLayout ), static_cast( dstImage ), static_cast( dstImageLayout ), regions.size() , reinterpret_cast( regions.data() ) ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void CommandBuffer::blitImage( Image srcImage, ImageLayout srcImageLayout, Image dstImage, ImageLayout dstImageLayout, uint32_t regionCount, const ImageBlit* pRegions, Filter filter, Dispatch const &d) const + { + d.vkCmdBlitImage( m_commandBuffer, static_cast( srcImage ), static_cast( srcImageLayout ), static_cast( dstImage ), static_cast( dstImageLayout ), regionCount, reinterpret_cast( pRegions ), static_cast( filter ) ); + } +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void CommandBuffer::blitImage( Image srcImage, ImageLayout srcImageLayout, Image dstImage, ImageLayout dstImageLayout, ArrayProxy regions, Filter filter, Dispatch const &d ) const + { + d.vkCmdBlitImage( m_commandBuffer, static_cast( srcImage ), static_cast( srcImageLayout ), static_cast( dstImage ), static_cast( dstImageLayout ), regions.size() , reinterpret_cast( regions.data() ), static_cast( filter ) ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void CommandBuffer::copyBufferToImage( Buffer srcBuffer, Image dstImage, ImageLayout dstImageLayout, uint32_t regionCount, const BufferImageCopy* pRegions, Dispatch const &d) const + { + d.vkCmdCopyBufferToImage( m_commandBuffer, static_cast( srcBuffer ), static_cast( dstImage ), static_cast( dstImageLayout ), regionCount, reinterpret_cast( pRegions ) ); + } +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void CommandBuffer::copyBufferToImage( Buffer srcBuffer, Image dstImage, ImageLayout dstImageLayout, ArrayProxy regions, Dispatch const &d ) const + { + d.vkCmdCopyBufferToImage( m_commandBuffer, static_cast( srcBuffer ), static_cast( dstImage ), static_cast( dstImageLayout ), regions.size() , reinterpret_cast( regions.data() ) ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void CommandBuffer::copyImageToBuffer( Image srcImage, ImageLayout srcImageLayout, Buffer dstBuffer, uint32_t regionCount, const BufferImageCopy* pRegions, Dispatch const &d) const + { + d.vkCmdCopyImageToBuffer( m_commandBuffer, static_cast( srcImage ), static_cast( srcImageLayout ), static_cast( dstBuffer ), regionCount, reinterpret_cast( pRegions ) ); + } +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void CommandBuffer::copyImageToBuffer( Image srcImage, ImageLayout srcImageLayout, Buffer dstBuffer, ArrayProxy regions, Dispatch const &d ) const + { + d.vkCmdCopyImageToBuffer( m_commandBuffer, static_cast( srcImage ), static_cast( srcImageLayout ), static_cast( dstBuffer ), regions.size() , reinterpret_cast( regions.data() ) ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void CommandBuffer::updateBuffer( Buffer dstBuffer, DeviceSize dstOffset, DeviceSize dataSize, const void* pData, Dispatch const &d) const + { + d.vkCmdUpdateBuffer( m_commandBuffer, static_cast( dstBuffer ), dstOffset, dataSize, pData ); + } +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void CommandBuffer::updateBuffer( Buffer dstBuffer, DeviceSize dstOffset, ArrayProxy data, Dispatch const &d ) const + { + d.vkCmdUpdateBuffer( m_commandBuffer, static_cast( dstBuffer ), dstOffset, data.size() * sizeof( T ) , reinterpret_cast( data.data() ) ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + +#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void CommandBuffer::fillBuffer( Buffer dstBuffer, DeviceSize dstOffset, DeviceSize size, uint32_t data, Dispatch const &d) const + { + d.vkCmdFillBuffer( m_commandBuffer, static_cast( dstBuffer ), dstOffset, size, data ); + } +#else + template + VULKAN_HPP_INLINE void CommandBuffer::fillBuffer( Buffer dstBuffer, DeviceSize dstOffset, DeviceSize size, uint32_t data, Dispatch const &d ) const + { + d.vkCmdFillBuffer( m_commandBuffer, static_cast( dstBuffer ), dstOffset, size, data ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void CommandBuffer::clearColorImage( Image image, ImageLayout imageLayout, const ClearColorValue* pColor, uint32_t rangeCount, const ImageSubresourceRange* pRanges, Dispatch const &d) const + { + d.vkCmdClearColorImage( m_commandBuffer, static_cast( image ), static_cast( imageLayout ), reinterpret_cast( pColor ), rangeCount, reinterpret_cast( pRanges ) ); + } +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void CommandBuffer::clearColorImage( Image image, ImageLayout imageLayout, const ClearColorValue & color, ArrayProxy ranges, Dispatch const &d ) const + { + d.vkCmdClearColorImage( m_commandBuffer, static_cast( image ), static_cast( imageLayout ), reinterpret_cast( &color ), ranges.size() , reinterpret_cast( ranges.data() ) ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void CommandBuffer::clearDepthStencilImage( Image image, ImageLayout imageLayout, const ClearDepthStencilValue* pDepthStencil, uint32_t rangeCount, const ImageSubresourceRange* pRanges, Dispatch const &d) const + { + d.vkCmdClearDepthStencilImage( m_commandBuffer, static_cast( image ), static_cast( imageLayout ), reinterpret_cast( pDepthStencil ), rangeCount, reinterpret_cast( pRanges ) ); + } +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void CommandBuffer::clearDepthStencilImage( Image image, ImageLayout imageLayout, const ClearDepthStencilValue & depthStencil, ArrayProxy ranges, Dispatch const &d ) const + { + d.vkCmdClearDepthStencilImage( m_commandBuffer, static_cast( image ), static_cast( imageLayout ), reinterpret_cast( &depthStencil ), ranges.size() , reinterpret_cast( ranges.data() ) ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void CommandBuffer::clearAttachments( uint32_t attachmentCount, const ClearAttachment* pAttachments, uint32_t rectCount, const ClearRect* pRects, Dispatch const &d) const + { + d.vkCmdClearAttachments( m_commandBuffer, attachmentCount, reinterpret_cast( pAttachments ), rectCount, reinterpret_cast( pRects ) ); + } +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void CommandBuffer::clearAttachments( ArrayProxy attachments, ArrayProxy rects, Dispatch const &d ) const + { + d.vkCmdClearAttachments( m_commandBuffer, attachments.size() , reinterpret_cast( attachments.data() ), rects.size() , reinterpret_cast( rects.data() ) ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void CommandBuffer::resolveImage( Image srcImage, ImageLayout srcImageLayout, Image dstImage, ImageLayout dstImageLayout, uint32_t regionCount, const ImageResolve* pRegions, Dispatch const &d) const + { + d.vkCmdResolveImage( m_commandBuffer, static_cast( srcImage ), static_cast( srcImageLayout ), static_cast( dstImage ), static_cast( dstImageLayout ), regionCount, reinterpret_cast( pRegions ) ); + } +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void CommandBuffer::resolveImage( Image srcImage, ImageLayout srcImageLayout, Image dstImage, ImageLayout dstImageLayout, ArrayProxy regions, Dispatch const &d ) const + { + d.vkCmdResolveImage( m_commandBuffer, static_cast( srcImage ), static_cast( srcImageLayout ), static_cast( dstImage ), static_cast( dstImageLayout ), regions.size() , reinterpret_cast( regions.data() ) ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + +#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void CommandBuffer::setEvent( Event event, PipelineStageFlags stageMask, Dispatch const &d) const + { + d.vkCmdSetEvent( m_commandBuffer, static_cast( event ), static_cast( stageMask ) ); + } +#else + template + VULKAN_HPP_INLINE void CommandBuffer::setEvent( Event event, PipelineStageFlags stageMask, Dispatch const &d ) const + { + d.vkCmdSetEvent( m_commandBuffer, static_cast( event ), static_cast( stageMask ) ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + +#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void CommandBuffer::resetEvent( Event event, PipelineStageFlags stageMask, Dispatch const &d) const + { + d.vkCmdResetEvent( m_commandBuffer, static_cast( event ), static_cast( stageMask ) ); + } +#else + template + VULKAN_HPP_INLINE void CommandBuffer::resetEvent( Event event, PipelineStageFlags stageMask, Dispatch const &d ) const + { + d.vkCmdResetEvent( m_commandBuffer, static_cast( event ), static_cast( stageMask ) ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void CommandBuffer::waitEvents( uint32_t eventCount, const Event* pEvents, PipelineStageFlags srcStageMask, PipelineStageFlags dstStageMask, uint32_t memoryBarrierCount, const MemoryBarrier* pMemoryBarriers, uint32_t bufferMemoryBarrierCount, const BufferMemoryBarrier* pBufferMemoryBarriers, uint32_t imageMemoryBarrierCount, const ImageMemoryBarrier* pImageMemoryBarriers, Dispatch const &d) const + { + d.vkCmdWaitEvents( m_commandBuffer, eventCount, reinterpret_cast( pEvents ), static_cast( srcStageMask ), static_cast( dstStageMask ), memoryBarrierCount, reinterpret_cast( pMemoryBarriers ), bufferMemoryBarrierCount, reinterpret_cast( pBufferMemoryBarriers ), imageMemoryBarrierCount, reinterpret_cast( pImageMemoryBarriers ) ); + } +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void CommandBuffer::waitEvents( ArrayProxy events, PipelineStageFlags srcStageMask, PipelineStageFlags dstStageMask, ArrayProxy memoryBarriers, ArrayProxy bufferMemoryBarriers, ArrayProxy imageMemoryBarriers, Dispatch const &d ) const + { + d.vkCmdWaitEvents( m_commandBuffer, events.size() , reinterpret_cast( events.data() ), static_cast( srcStageMask ), static_cast( dstStageMask ), memoryBarriers.size() , reinterpret_cast( memoryBarriers.data() ), bufferMemoryBarriers.size() , reinterpret_cast( bufferMemoryBarriers.data() ), imageMemoryBarriers.size() , reinterpret_cast( imageMemoryBarriers.data() ) ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void CommandBuffer::pipelineBarrier( PipelineStageFlags srcStageMask, PipelineStageFlags dstStageMask, DependencyFlags dependencyFlags, uint32_t memoryBarrierCount, const MemoryBarrier* pMemoryBarriers, uint32_t bufferMemoryBarrierCount, const BufferMemoryBarrier* pBufferMemoryBarriers, uint32_t imageMemoryBarrierCount, const ImageMemoryBarrier* pImageMemoryBarriers, Dispatch const &d) const + { + d.vkCmdPipelineBarrier( m_commandBuffer, static_cast( srcStageMask ), static_cast( dstStageMask ), static_cast( dependencyFlags ), memoryBarrierCount, reinterpret_cast( pMemoryBarriers ), bufferMemoryBarrierCount, reinterpret_cast( pBufferMemoryBarriers ), imageMemoryBarrierCount, reinterpret_cast( pImageMemoryBarriers ) ); + } +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void CommandBuffer::pipelineBarrier( PipelineStageFlags srcStageMask, PipelineStageFlags dstStageMask, DependencyFlags dependencyFlags, ArrayProxy memoryBarriers, ArrayProxy bufferMemoryBarriers, ArrayProxy imageMemoryBarriers, Dispatch const &d ) const + { + d.vkCmdPipelineBarrier( m_commandBuffer, static_cast( srcStageMask ), static_cast( dstStageMask ), static_cast( dependencyFlags ), memoryBarriers.size() , reinterpret_cast( memoryBarriers.data() ), bufferMemoryBarriers.size() , reinterpret_cast( bufferMemoryBarriers.data() ), imageMemoryBarriers.size() , reinterpret_cast( imageMemoryBarriers.data() ) ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + +#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void CommandBuffer::beginQuery( QueryPool queryPool, uint32_t query, QueryControlFlags flags, Dispatch const &d) const + { + d.vkCmdBeginQuery( m_commandBuffer, static_cast( queryPool ), query, static_cast( flags ) ); + } +#else + template + VULKAN_HPP_INLINE void CommandBuffer::beginQuery( QueryPool queryPool, uint32_t query, QueryControlFlags flags, Dispatch const &d ) const + { + d.vkCmdBeginQuery( m_commandBuffer, static_cast( queryPool ), query, static_cast( flags ) ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + +#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void CommandBuffer::endQuery( QueryPool queryPool, uint32_t query, Dispatch const &d) const + { + d.vkCmdEndQuery( m_commandBuffer, static_cast( queryPool ), query ); + } +#else + template + VULKAN_HPP_INLINE void CommandBuffer::endQuery( QueryPool queryPool, uint32_t query, Dispatch const &d ) const + { + d.vkCmdEndQuery( m_commandBuffer, static_cast( queryPool ), query ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void CommandBuffer::beginConditionalRenderingEXT( const ConditionalRenderingBeginInfoEXT* pConditionalRenderingBegin, Dispatch const &d) const + { + d.vkCmdBeginConditionalRenderingEXT( m_commandBuffer, reinterpret_cast( pConditionalRenderingBegin ) ); + } +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void CommandBuffer::beginConditionalRenderingEXT( const ConditionalRenderingBeginInfoEXT & conditionalRenderingBegin, Dispatch const &d ) const + { + d.vkCmdBeginConditionalRenderingEXT( m_commandBuffer, reinterpret_cast( &conditionalRenderingBegin ) ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + +#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void CommandBuffer::endConditionalRenderingEXT(Dispatch const &d) const + { + d.vkCmdEndConditionalRenderingEXT( m_commandBuffer ); + } +#else + template + VULKAN_HPP_INLINE void CommandBuffer::endConditionalRenderingEXT(Dispatch const &d ) const + { + d.vkCmdEndConditionalRenderingEXT( m_commandBuffer ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + +#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void CommandBuffer::resetQueryPool( QueryPool queryPool, uint32_t firstQuery, uint32_t queryCount, Dispatch const &d) const + { + d.vkCmdResetQueryPool( m_commandBuffer, static_cast( queryPool ), firstQuery, queryCount ); + } +#else + template + VULKAN_HPP_INLINE void CommandBuffer::resetQueryPool( QueryPool queryPool, uint32_t firstQuery, uint32_t queryCount, Dispatch const &d ) const + { + d.vkCmdResetQueryPool( m_commandBuffer, static_cast( queryPool ), firstQuery, queryCount ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + +#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void CommandBuffer::writeTimestamp( PipelineStageFlagBits pipelineStage, QueryPool queryPool, uint32_t query, Dispatch const &d) const + { + d.vkCmdWriteTimestamp( m_commandBuffer, static_cast( pipelineStage ), static_cast( queryPool ), query ); + } +#else + template + VULKAN_HPP_INLINE void CommandBuffer::writeTimestamp( PipelineStageFlagBits pipelineStage, QueryPool queryPool, uint32_t query, Dispatch const &d ) const + { + d.vkCmdWriteTimestamp( m_commandBuffer, static_cast( pipelineStage ), static_cast( queryPool ), query ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + +#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void CommandBuffer::copyQueryPoolResults( QueryPool queryPool, uint32_t firstQuery, uint32_t queryCount, Buffer dstBuffer, DeviceSize dstOffset, DeviceSize stride, QueryResultFlags flags, Dispatch const &d) const + { + d.vkCmdCopyQueryPoolResults( m_commandBuffer, static_cast( queryPool ), firstQuery, queryCount, static_cast( dstBuffer ), dstOffset, stride, static_cast( flags ) ); + } +#else + template + VULKAN_HPP_INLINE void CommandBuffer::copyQueryPoolResults( QueryPool queryPool, uint32_t firstQuery, uint32_t queryCount, Buffer dstBuffer, DeviceSize dstOffset, DeviceSize stride, QueryResultFlags flags, Dispatch const &d ) const + { + d.vkCmdCopyQueryPoolResults( m_commandBuffer, static_cast( queryPool ), firstQuery, queryCount, static_cast( dstBuffer ), dstOffset, stride, static_cast( flags ) ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void CommandBuffer::pushConstants( PipelineLayout layout, ShaderStageFlags stageFlags, uint32_t offset, uint32_t size, const void* pValues, Dispatch const &d) const + { + d.vkCmdPushConstants( m_commandBuffer, static_cast( layout ), static_cast( stageFlags ), offset, size, pValues ); + } +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void CommandBuffer::pushConstants( PipelineLayout layout, ShaderStageFlags stageFlags, uint32_t offset, ArrayProxy values, Dispatch const &d ) const + { + d.vkCmdPushConstants( m_commandBuffer, static_cast( layout ), static_cast( stageFlags ), offset, values.size() * sizeof( T ) , reinterpret_cast( values.data() ) ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void CommandBuffer::beginRenderPass( const RenderPassBeginInfo* pRenderPassBegin, SubpassContents contents, Dispatch const &d) const + { + d.vkCmdBeginRenderPass( m_commandBuffer, reinterpret_cast( pRenderPassBegin ), static_cast( contents ) ); + } +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void CommandBuffer::beginRenderPass( const RenderPassBeginInfo & renderPassBegin, SubpassContents contents, Dispatch const &d ) const + { + d.vkCmdBeginRenderPass( m_commandBuffer, reinterpret_cast( &renderPassBegin ), static_cast( contents ) ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + +#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void CommandBuffer::nextSubpass( SubpassContents contents, Dispatch const &d) const + { + d.vkCmdNextSubpass( m_commandBuffer, static_cast( contents ) ); + } +#else + template + VULKAN_HPP_INLINE void CommandBuffer::nextSubpass( SubpassContents contents, Dispatch const &d ) const + { + d.vkCmdNextSubpass( m_commandBuffer, static_cast( contents ) ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + +#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void CommandBuffer::endRenderPass(Dispatch const &d) const + { + d.vkCmdEndRenderPass( m_commandBuffer ); + } +#else + template + VULKAN_HPP_INLINE void CommandBuffer::endRenderPass(Dispatch const &d ) const + { + d.vkCmdEndRenderPass( m_commandBuffer ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void CommandBuffer::executeCommands( uint32_t commandBufferCount, const CommandBuffer* pCommandBuffers, Dispatch const &d) const + { + d.vkCmdExecuteCommands( m_commandBuffer, commandBufferCount, reinterpret_cast( pCommandBuffers ) ); + } +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void CommandBuffer::executeCommands( ArrayProxy commandBuffers, Dispatch const &d ) const + { + d.vkCmdExecuteCommands( m_commandBuffer, commandBuffers.size() , reinterpret_cast( commandBuffers.data() ) ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void CommandBuffer::debugMarkerBeginEXT( const DebugMarkerMarkerInfoEXT* pMarkerInfo, Dispatch const &d) const + { + d.vkCmdDebugMarkerBeginEXT( m_commandBuffer, reinterpret_cast( pMarkerInfo ) ); + } +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void CommandBuffer::debugMarkerBeginEXT( const DebugMarkerMarkerInfoEXT & markerInfo, Dispatch const &d ) const + { + d.vkCmdDebugMarkerBeginEXT( m_commandBuffer, reinterpret_cast( &markerInfo ) ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + +#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void CommandBuffer::debugMarkerEndEXT(Dispatch const &d) const + { + d.vkCmdDebugMarkerEndEXT( m_commandBuffer ); + } +#else + template + VULKAN_HPP_INLINE void CommandBuffer::debugMarkerEndEXT(Dispatch const &d ) const + { + d.vkCmdDebugMarkerEndEXT( m_commandBuffer ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void CommandBuffer::debugMarkerInsertEXT( const DebugMarkerMarkerInfoEXT* pMarkerInfo, Dispatch const &d) const + { + d.vkCmdDebugMarkerInsertEXT( m_commandBuffer, reinterpret_cast( pMarkerInfo ) ); + } +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void CommandBuffer::debugMarkerInsertEXT( const DebugMarkerMarkerInfoEXT & markerInfo, Dispatch const &d ) const + { + d.vkCmdDebugMarkerInsertEXT( m_commandBuffer, reinterpret_cast( &markerInfo ) ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + +#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void CommandBuffer::drawIndirectCountAMD( Buffer buffer, DeviceSize offset, Buffer countBuffer, DeviceSize countBufferOffset, uint32_t maxDrawCount, uint32_t stride, Dispatch const &d) const + { + d.vkCmdDrawIndirectCountAMD( m_commandBuffer, static_cast( buffer ), offset, static_cast( countBuffer ), countBufferOffset, maxDrawCount, stride ); + } +#else + template + VULKAN_HPP_INLINE void CommandBuffer::drawIndirectCountAMD( Buffer buffer, DeviceSize offset, Buffer countBuffer, DeviceSize countBufferOffset, uint32_t maxDrawCount, uint32_t stride, Dispatch const &d ) const + { + d.vkCmdDrawIndirectCountAMD( m_commandBuffer, static_cast( buffer ), offset, static_cast( countBuffer ), countBufferOffset, maxDrawCount, stride ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + +#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void CommandBuffer::drawIndexedIndirectCountAMD( Buffer buffer, DeviceSize offset, Buffer countBuffer, DeviceSize countBufferOffset, uint32_t maxDrawCount, uint32_t stride, Dispatch const &d) const + { + d.vkCmdDrawIndexedIndirectCountAMD( m_commandBuffer, static_cast( buffer ), offset, static_cast( countBuffer ), countBufferOffset, maxDrawCount, stride ); + } +#else + template + VULKAN_HPP_INLINE void CommandBuffer::drawIndexedIndirectCountAMD( Buffer buffer, DeviceSize offset, Buffer countBuffer, DeviceSize countBufferOffset, uint32_t maxDrawCount, uint32_t stride, Dispatch const &d ) const + { + d.vkCmdDrawIndexedIndirectCountAMD( m_commandBuffer, static_cast( buffer ), offset, static_cast( countBuffer ), countBufferOffset, maxDrawCount, stride ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void CommandBuffer::processCommandsNVX( const CmdProcessCommandsInfoNVX* pProcessCommandsInfo, Dispatch const &d) const + { + d.vkCmdProcessCommandsNVX( m_commandBuffer, reinterpret_cast( pProcessCommandsInfo ) ); + } +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void CommandBuffer::processCommandsNVX( const CmdProcessCommandsInfoNVX & processCommandsInfo, Dispatch const &d ) const + { + d.vkCmdProcessCommandsNVX( m_commandBuffer, reinterpret_cast( &processCommandsInfo ) ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void CommandBuffer::reserveSpaceForCommandsNVX( const CmdReserveSpaceForCommandsInfoNVX* pReserveSpaceInfo, Dispatch const &d) const + { + d.vkCmdReserveSpaceForCommandsNVX( m_commandBuffer, reinterpret_cast( pReserveSpaceInfo ) ); + } +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void CommandBuffer::reserveSpaceForCommandsNVX( const CmdReserveSpaceForCommandsInfoNVX & reserveSpaceInfo, Dispatch const &d ) const + { + d.vkCmdReserveSpaceForCommandsNVX( m_commandBuffer, reinterpret_cast( &reserveSpaceInfo ) ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void CommandBuffer::pushDescriptorSetKHR( PipelineBindPoint pipelineBindPoint, PipelineLayout layout, uint32_t set, uint32_t descriptorWriteCount, const WriteDescriptorSet* pDescriptorWrites, Dispatch const &d) const + { + d.vkCmdPushDescriptorSetKHR( m_commandBuffer, static_cast( pipelineBindPoint ), static_cast( layout ), set, descriptorWriteCount, reinterpret_cast( pDescriptorWrites ) ); + } +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void CommandBuffer::pushDescriptorSetKHR( PipelineBindPoint pipelineBindPoint, PipelineLayout layout, uint32_t set, ArrayProxy descriptorWrites, Dispatch const &d ) const + { + d.vkCmdPushDescriptorSetKHR( m_commandBuffer, static_cast( pipelineBindPoint ), static_cast( layout ), set, descriptorWrites.size() , reinterpret_cast( descriptorWrites.data() ) ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + +#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void CommandBuffer::setDeviceMask( uint32_t deviceMask, Dispatch const &d) const + { + d.vkCmdSetDeviceMask( m_commandBuffer, deviceMask ); + } +#else + template + VULKAN_HPP_INLINE void CommandBuffer::setDeviceMask( uint32_t deviceMask, Dispatch const &d ) const + { + d.vkCmdSetDeviceMask( m_commandBuffer, deviceMask ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + +#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void CommandBuffer::setDeviceMaskKHR( uint32_t deviceMask, Dispatch const &d) const + { + d.vkCmdSetDeviceMaskKHR( m_commandBuffer, deviceMask ); + } +#else + template + VULKAN_HPP_INLINE void CommandBuffer::setDeviceMaskKHR( uint32_t deviceMask, Dispatch const &d ) const + { + d.vkCmdSetDeviceMaskKHR( m_commandBuffer, deviceMask ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + +#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void CommandBuffer::dispatchBase( uint32_t baseGroupX, uint32_t baseGroupY, uint32_t baseGroupZ, uint32_t groupCountX, uint32_t groupCountY, uint32_t groupCountZ, Dispatch const &d) const + { + d.vkCmdDispatchBase( m_commandBuffer, baseGroupX, baseGroupY, baseGroupZ, groupCountX, groupCountY, groupCountZ ); + } +#else + template + VULKAN_HPP_INLINE void CommandBuffer::dispatchBase( uint32_t baseGroupX, uint32_t baseGroupY, uint32_t baseGroupZ, uint32_t groupCountX, uint32_t groupCountY, uint32_t groupCountZ, Dispatch const &d ) const + { + d.vkCmdDispatchBase( m_commandBuffer, baseGroupX, baseGroupY, baseGroupZ, groupCountX, groupCountY, groupCountZ ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + +#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void CommandBuffer::dispatchBaseKHR( uint32_t baseGroupX, uint32_t baseGroupY, uint32_t baseGroupZ, uint32_t groupCountX, uint32_t groupCountY, uint32_t groupCountZ, Dispatch const &d) const + { + d.vkCmdDispatchBaseKHR( m_commandBuffer, baseGroupX, baseGroupY, baseGroupZ, groupCountX, groupCountY, groupCountZ ); + } +#else + template + VULKAN_HPP_INLINE void CommandBuffer::dispatchBaseKHR( uint32_t baseGroupX, uint32_t baseGroupY, uint32_t baseGroupZ, uint32_t groupCountX, uint32_t groupCountY, uint32_t groupCountZ, Dispatch const &d ) const + { + d.vkCmdDispatchBaseKHR( m_commandBuffer, baseGroupX, baseGroupY, baseGroupZ, groupCountX, groupCountY, groupCountZ ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + +#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void CommandBuffer::pushDescriptorSetWithTemplateKHR( DescriptorUpdateTemplate descriptorUpdateTemplate, PipelineLayout layout, uint32_t set, const void* pData, Dispatch const &d) const + { + d.vkCmdPushDescriptorSetWithTemplateKHR( m_commandBuffer, static_cast( descriptorUpdateTemplate ), static_cast( layout ), set, pData ); + } +#else + template + VULKAN_HPP_INLINE void CommandBuffer::pushDescriptorSetWithTemplateKHR( DescriptorUpdateTemplate descriptorUpdateTemplate, PipelineLayout layout, uint32_t set, const void* pData, Dispatch const &d ) const + { + d.vkCmdPushDescriptorSetWithTemplateKHR( m_commandBuffer, static_cast( descriptorUpdateTemplate ), static_cast( layout ), set, pData ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void CommandBuffer::setViewportWScalingNV( uint32_t firstViewport, uint32_t viewportCount, const ViewportWScalingNV* pViewportWScalings, Dispatch const &d) const + { + d.vkCmdSetViewportWScalingNV( m_commandBuffer, firstViewport, viewportCount, reinterpret_cast( pViewportWScalings ) ); + } +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void CommandBuffer::setViewportWScalingNV( uint32_t firstViewport, ArrayProxy viewportWScalings, Dispatch const &d ) const + { + d.vkCmdSetViewportWScalingNV( m_commandBuffer, firstViewport, viewportWScalings.size() , reinterpret_cast( viewportWScalings.data() ) ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void CommandBuffer::setDiscardRectangleEXT( uint32_t firstDiscardRectangle, uint32_t discardRectangleCount, const Rect2D* pDiscardRectangles, Dispatch const &d) const + { + d.vkCmdSetDiscardRectangleEXT( m_commandBuffer, firstDiscardRectangle, discardRectangleCount, reinterpret_cast( pDiscardRectangles ) ); + } +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void CommandBuffer::setDiscardRectangleEXT( uint32_t firstDiscardRectangle, ArrayProxy discardRectangles, Dispatch const &d ) const + { + d.vkCmdSetDiscardRectangleEXT( m_commandBuffer, firstDiscardRectangle, discardRectangles.size() , reinterpret_cast( discardRectangles.data() ) ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void CommandBuffer::setSampleLocationsEXT( const SampleLocationsInfoEXT* pSampleLocationsInfo, Dispatch const &d) const + { + d.vkCmdSetSampleLocationsEXT( m_commandBuffer, reinterpret_cast( pSampleLocationsInfo ) ); + } +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void CommandBuffer::setSampleLocationsEXT( const SampleLocationsInfoEXT & sampleLocationsInfo, Dispatch const &d ) const + { + d.vkCmdSetSampleLocationsEXT( m_commandBuffer, reinterpret_cast( &sampleLocationsInfo ) ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void CommandBuffer::beginDebugUtilsLabelEXT( const DebugUtilsLabelEXT* pLabelInfo, Dispatch const &d) const + { + d.vkCmdBeginDebugUtilsLabelEXT( m_commandBuffer, reinterpret_cast( pLabelInfo ) ); + } +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void CommandBuffer::beginDebugUtilsLabelEXT( const DebugUtilsLabelEXT & labelInfo, Dispatch const &d ) const + { + d.vkCmdBeginDebugUtilsLabelEXT( m_commandBuffer, reinterpret_cast( &labelInfo ) ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + +#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void CommandBuffer::endDebugUtilsLabelEXT(Dispatch const &d) const + { + d.vkCmdEndDebugUtilsLabelEXT( m_commandBuffer ); + } +#else + template + VULKAN_HPP_INLINE void CommandBuffer::endDebugUtilsLabelEXT(Dispatch const &d ) const + { + d.vkCmdEndDebugUtilsLabelEXT( m_commandBuffer ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void CommandBuffer::insertDebugUtilsLabelEXT( const DebugUtilsLabelEXT* pLabelInfo, Dispatch const &d) const + { + d.vkCmdInsertDebugUtilsLabelEXT( m_commandBuffer, reinterpret_cast( pLabelInfo ) ); + } +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void CommandBuffer::insertDebugUtilsLabelEXT( const DebugUtilsLabelEXT & labelInfo, Dispatch const &d ) const + { + d.vkCmdInsertDebugUtilsLabelEXT( m_commandBuffer, reinterpret_cast( &labelInfo ) ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + +#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void CommandBuffer::writeBufferMarkerAMD( PipelineStageFlagBits pipelineStage, Buffer dstBuffer, DeviceSize dstOffset, uint32_t marker, Dispatch const &d) const + { + d.vkCmdWriteBufferMarkerAMD( m_commandBuffer, static_cast( pipelineStage ), static_cast( dstBuffer ), dstOffset, marker ); + } +#else + template + VULKAN_HPP_INLINE void CommandBuffer::writeBufferMarkerAMD( PipelineStageFlagBits pipelineStage, Buffer dstBuffer, DeviceSize dstOffset, uint32_t marker, Dispatch const &d ) const + { + d.vkCmdWriteBufferMarkerAMD( m_commandBuffer, static_cast( pipelineStage ), static_cast( dstBuffer ), dstOffset, marker ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void CommandBuffer::beginRenderPass2KHR( const RenderPassBeginInfo* pRenderPassBegin, const SubpassBeginInfoKHR* pSubpassBeginInfo, Dispatch const &d) const + { + d.vkCmdBeginRenderPass2KHR( m_commandBuffer, reinterpret_cast( pRenderPassBegin ), reinterpret_cast( pSubpassBeginInfo ) ); + } +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void CommandBuffer::beginRenderPass2KHR( const RenderPassBeginInfo & renderPassBegin, const SubpassBeginInfoKHR & subpassBeginInfo, Dispatch const &d ) const + { + d.vkCmdBeginRenderPass2KHR( m_commandBuffer, reinterpret_cast( &renderPassBegin ), reinterpret_cast( &subpassBeginInfo ) ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void CommandBuffer::nextSubpass2KHR( const SubpassBeginInfoKHR* pSubpassBeginInfo, const SubpassEndInfoKHR* pSubpassEndInfo, Dispatch const &d) const + { + d.vkCmdNextSubpass2KHR( m_commandBuffer, reinterpret_cast( pSubpassBeginInfo ), reinterpret_cast( pSubpassEndInfo ) ); + } +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void CommandBuffer::nextSubpass2KHR( const SubpassBeginInfoKHR & subpassBeginInfo, const SubpassEndInfoKHR & subpassEndInfo, Dispatch const &d ) const + { + d.vkCmdNextSubpass2KHR( m_commandBuffer, reinterpret_cast( &subpassBeginInfo ), reinterpret_cast( &subpassEndInfo ) ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void CommandBuffer::endRenderPass2KHR( const SubpassEndInfoKHR* pSubpassEndInfo, Dispatch const &d) const + { + d.vkCmdEndRenderPass2KHR( m_commandBuffer, reinterpret_cast( pSubpassEndInfo ) ); + } +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void CommandBuffer::endRenderPass2KHR( const SubpassEndInfoKHR & subpassEndInfo, Dispatch const &d ) const + { + d.vkCmdEndRenderPass2KHR( m_commandBuffer, reinterpret_cast( &subpassEndInfo ) ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + +#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void CommandBuffer::drawIndirectCountKHR( Buffer buffer, DeviceSize offset, Buffer countBuffer, DeviceSize countBufferOffset, uint32_t maxDrawCount, uint32_t stride, Dispatch const &d) const + { + d.vkCmdDrawIndirectCountKHR( m_commandBuffer, static_cast( buffer ), offset, static_cast( countBuffer ), countBufferOffset, maxDrawCount, stride ); + } +#else + template + VULKAN_HPP_INLINE void CommandBuffer::drawIndirectCountKHR( Buffer buffer, DeviceSize offset, Buffer countBuffer, DeviceSize countBufferOffset, uint32_t maxDrawCount, uint32_t stride, Dispatch const &d ) const + { + d.vkCmdDrawIndirectCountKHR( m_commandBuffer, static_cast( buffer ), offset, static_cast( countBuffer ), countBufferOffset, maxDrawCount, stride ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + +#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void CommandBuffer::drawIndexedIndirectCountKHR( Buffer buffer, DeviceSize offset, Buffer countBuffer, DeviceSize countBufferOffset, uint32_t maxDrawCount, uint32_t stride, Dispatch const &d) const + { + d.vkCmdDrawIndexedIndirectCountKHR( m_commandBuffer, static_cast( buffer ), offset, static_cast( countBuffer ), countBufferOffset, maxDrawCount, stride ); + } +#else + template + VULKAN_HPP_INLINE void CommandBuffer::drawIndexedIndirectCountKHR( Buffer buffer, DeviceSize offset, Buffer countBuffer, DeviceSize countBufferOffset, uint32_t maxDrawCount, uint32_t stride, Dispatch const &d ) const + { + d.vkCmdDrawIndexedIndirectCountKHR( m_commandBuffer, static_cast( buffer ), offset, static_cast( countBuffer ), countBufferOffset, maxDrawCount, stride ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + +#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void CommandBuffer::setCheckpointNV( const void* pCheckpointMarker, Dispatch const &d) const + { + d.vkCmdSetCheckpointNV( m_commandBuffer, pCheckpointMarker ); + } +#else + template + VULKAN_HPP_INLINE void CommandBuffer::setCheckpointNV( const void* pCheckpointMarker, Dispatch const &d ) const + { + d.vkCmdSetCheckpointNV( m_commandBuffer, pCheckpointMarker ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void CommandBuffer::bindTransformFeedbackBuffersEXT( uint32_t firstBinding, uint32_t bindingCount, const Buffer* pBuffers, const DeviceSize* pOffsets, const DeviceSize* pSizes, Dispatch const &d) const + { + d.vkCmdBindTransformFeedbackBuffersEXT( m_commandBuffer, firstBinding, bindingCount, reinterpret_cast( pBuffers ), pOffsets, pSizes ); + } +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void CommandBuffer::bindTransformFeedbackBuffersEXT( uint32_t firstBinding, ArrayProxy buffers, ArrayProxy offsets, ArrayProxy sizes, Dispatch const &d ) const + { +#ifdef VULKAN_HPP_NO_EXCEPTIONS + VULKAN_HPP_ASSERT( buffers.size() == offsets.size() ); +#else + if ( buffers.size() != offsets.size() ) + { + throw LogicError( VULKAN_HPP_NAMESPACE_STRING "::CommandBuffer::bindTransformFeedbackBuffersEXT: buffers.size() != offsets.size()" ); + } +#endif // VULKAN_HPP_NO_EXCEPTIONS +#ifdef VULKAN_HPP_NO_EXCEPTIONS + VULKAN_HPP_ASSERT( buffers.size() == sizes.size() ); +#else + if ( buffers.size() != sizes.size() ) + { + throw LogicError( VULKAN_HPP_NAMESPACE_STRING "::CommandBuffer::bindTransformFeedbackBuffersEXT: buffers.size() != sizes.size()" ); + } +#endif // VULKAN_HPP_NO_EXCEPTIONS +#ifdef VULKAN_HPP_NO_EXCEPTIONS + VULKAN_HPP_ASSERT( offsets.size() == sizes.size() ); +#else + if ( offsets.size() != sizes.size() ) + { + throw LogicError( VULKAN_HPP_NAMESPACE_STRING "::CommandBuffer::bindTransformFeedbackBuffersEXT: offsets.size() != sizes.size()" ); + } +#endif // VULKAN_HPP_NO_EXCEPTIONS + d.vkCmdBindTransformFeedbackBuffersEXT( m_commandBuffer, firstBinding, buffers.size() , reinterpret_cast( buffers.data() ), offsets.data(), sizes.data() ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void CommandBuffer::beginTransformFeedbackEXT( uint32_t firstCounterBuffer, uint32_t counterBufferCount, const Buffer* pCounterBuffers, const DeviceSize* pCounterBufferOffsets, Dispatch const &d) const + { + d.vkCmdBeginTransformFeedbackEXT( m_commandBuffer, firstCounterBuffer, counterBufferCount, reinterpret_cast( pCounterBuffers ), pCounterBufferOffsets ); + } +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void CommandBuffer::beginTransformFeedbackEXT( uint32_t firstCounterBuffer, ArrayProxy counterBuffers, ArrayProxy counterBufferOffsets, Dispatch const &d ) const + { +#ifdef VULKAN_HPP_NO_EXCEPTIONS + VULKAN_HPP_ASSERT( counterBuffers.size() == counterBufferOffsets.size() ); +#else + if ( counterBuffers.size() != counterBufferOffsets.size() ) + { + throw LogicError( VULKAN_HPP_NAMESPACE_STRING "::CommandBuffer::beginTransformFeedbackEXT: counterBuffers.size() != counterBufferOffsets.size()" ); + } +#endif // VULKAN_HPP_NO_EXCEPTIONS + d.vkCmdBeginTransformFeedbackEXT( m_commandBuffer, firstCounterBuffer, counterBuffers.size() , reinterpret_cast( counterBuffers.data() ), counterBufferOffsets.data() ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void CommandBuffer::endTransformFeedbackEXT( uint32_t firstCounterBuffer, uint32_t counterBufferCount, const Buffer* pCounterBuffers, const DeviceSize* pCounterBufferOffsets, Dispatch const &d) const + { + d.vkCmdEndTransformFeedbackEXT( m_commandBuffer, firstCounterBuffer, counterBufferCount, reinterpret_cast( pCounterBuffers ), pCounterBufferOffsets ); + } +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void CommandBuffer::endTransformFeedbackEXT( uint32_t firstCounterBuffer, ArrayProxy counterBuffers, ArrayProxy counterBufferOffsets, Dispatch const &d ) const + { +#ifdef VULKAN_HPP_NO_EXCEPTIONS + VULKAN_HPP_ASSERT( counterBuffers.size() == counterBufferOffsets.size() ); +#else + if ( counterBuffers.size() != counterBufferOffsets.size() ) + { + throw LogicError( VULKAN_HPP_NAMESPACE_STRING "::CommandBuffer::endTransformFeedbackEXT: counterBuffers.size() != counterBufferOffsets.size()" ); + } +#endif // VULKAN_HPP_NO_EXCEPTIONS + d.vkCmdEndTransformFeedbackEXT( m_commandBuffer, firstCounterBuffer, counterBuffers.size() , reinterpret_cast( counterBuffers.data() ), counterBufferOffsets.data() ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + +#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void CommandBuffer::beginQueryIndexedEXT( QueryPool queryPool, uint32_t query, QueryControlFlags flags, uint32_t index, Dispatch const &d) const + { + d.vkCmdBeginQueryIndexedEXT( m_commandBuffer, static_cast( queryPool ), query, static_cast( flags ), index ); + } +#else + template + VULKAN_HPP_INLINE void CommandBuffer::beginQueryIndexedEXT( QueryPool queryPool, uint32_t query, QueryControlFlags flags, uint32_t index, Dispatch const &d ) const + { + d.vkCmdBeginQueryIndexedEXT( m_commandBuffer, static_cast( queryPool ), query, static_cast( flags ), index ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + +#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void CommandBuffer::endQueryIndexedEXT( QueryPool queryPool, uint32_t query, uint32_t index, Dispatch const &d) const + { + d.vkCmdEndQueryIndexedEXT( m_commandBuffer, static_cast( queryPool ), query, index ); + } +#else + template + VULKAN_HPP_INLINE void CommandBuffer::endQueryIndexedEXT( QueryPool queryPool, uint32_t query, uint32_t index, Dispatch const &d ) const + { + d.vkCmdEndQueryIndexedEXT( m_commandBuffer, static_cast( queryPool ), query, index ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + +#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void CommandBuffer::drawIndirectByteCountEXT( uint32_t instanceCount, uint32_t firstInstance, Buffer counterBuffer, DeviceSize counterBufferOffset, uint32_t counterOffset, uint32_t vertexStride, Dispatch const &d) const + { + d.vkCmdDrawIndirectByteCountEXT( m_commandBuffer, instanceCount, firstInstance, static_cast( counterBuffer ), counterBufferOffset, counterOffset, vertexStride ); + } +#else + template + VULKAN_HPP_INLINE void CommandBuffer::drawIndirectByteCountEXT( uint32_t instanceCount, uint32_t firstInstance, Buffer counterBuffer, DeviceSize counterBufferOffset, uint32_t counterOffset, uint32_t vertexStride, Dispatch const &d ) const + { + d.vkCmdDrawIndirectByteCountEXT( m_commandBuffer, instanceCount, firstInstance, static_cast( counterBuffer ), counterBufferOffset, counterOffset, vertexStride ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void CommandBuffer::setExclusiveScissorNV( uint32_t firstExclusiveScissor, uint32_t exclusiveScissorCount, const Rect2D* pExclusiveScissors, Dispatch const &d) const + { + d.vkCmdSetExclusiveScissorNV( m_commandBuffer, firstExclusiveScissor, exclusiveScissorCount, reinterpret_cast( pExclusiveScissors ) ); + } +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void CommandBuffer::setExclusiveScissorNV( uint32_t firstExclusiveScissor, ArrayProxy exclusiveScissors, Dispatch const &d ) const + { + d.vkCmdSetExclusiveScissorNV( m_commandBuffer, firstExclusiveScissor, exclusiveScissors.size() , reinterpret_cast( exclusiveScissors.data() ) ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + +#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void CommandBuffer::bindShadingRateImageNV( ImageView imageView, ImageLayout imageLayout, Dispatch const &d) const + { + d.vkCmdBindShadingRateImageNV( m_commandBuffer, static_cast( imageView ), static_cast( imageLayout ) ); + } +#else + template + VULKAN_HPP_INLINE void CommandBuffer::bindShadingRateImageNV( ImageView imageView, ImageLayout imageLayout, Dispatch const &d ) const + { + d.vkCmdBindShadingRateImageNV( m_commandBuffer, static_cast( imageView ), static_cast( imageLayout ) ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void CommandBuffer::setViewportShadingRatePaletteNV( uint32_t firstViewport, uint32_t viewportCount, const ShadingRatePaletteNV* pShadingRatePalettes, Dispatch const &d) const + { + d.vkCmdSetViewportShadingRatePaletteNV( m_commandBuffer, firstViewport, viewportCount, reinterpret_cast( pShadingRatePalettes ) ); + } +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void CommandBuffer::setViewportShadingRatePaletteNV( uint32_t firstViewport, ArrayProxy shadingRatePalettes, Dispatch const &d ) const + { + d.vkCmdSetViewportShadingRatePaletteNV( m_commandBuffer, firstViewport, shadingRatePalettes.size() , reinterpret_cast( shadingRatePalettes.data() ) ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void CommandBuffer::setCoarseSampleOrderNV( CoarseSampleOrderTypeNV sampleOrderType, uint32_t customSampleOrderCount, const CoarseSampleOrderCustomNV* pCustomSampleOrders, Dispatch const &d) const + { + d.vkCmdSetCoarseSampleOrderNV( m_commandBuffer, static_cast( sampleOrderType ), customSampleOrderCount, reinterpret_cast( pCustomSampleOrders ) ); + } +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void CommandBuffer::setCoarseSampleOrderNV( CoarseSampleOrderTypeNV sampleOrderType, ArrayProxy customSampleOrders, Dispatch const &d ) const + { + d.vkCmdSetCoarseSampleOrderNV( m_commandBuffer, static_cast( sampleOrderType ), customSampleOrders.size() , reinterpret_cast( customSampleOrders.data() ) ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + +#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void CommandBuffer::drawMeshTasksNV( uint32_t taskCount, uint32_t firstTask, Dispatch const &d) const + { + d.vkCmdDrawMeshTasksNV( m_commandBuffer, taskCount, firstTask ); + } +#else + template + VULKAN_HPP_INLINE void CommandBuffer::drawMeshTasksNV( uint32_t taskCount, uint32_t firstTask, Dispatch const &d ) const + { + d.vkCmdDrawMeshTasksNV( m_commandBuffer, taskCount, firstTask ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + +#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void CommandBuffer::drawMeshTasksIndirectNV( Buffer buffer, DeviceSize offset, uint32_t drawCount, uint32_t stride, Dispatch const &d) const + { + d.vkCmdDrawMeshTasksIndirectNV( m_commandBuffer, static_cast( buffer ), offset, drawCount, stride ); + } +#else + template + VULKAN_HPP_INLINE void CommandBuffer::drawMeshTasksIndirectNV( Buffer buffer, DeviceSize offset, uint32_t drawCount, uint32_t stride, Dispatch const &d ) const + { + d.vkCmdDrawMeshTasksIndirectNV( m_commandBuffer, static_cast( buffer ), offset, drawCount, stride ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + +#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void CommandBuffer::drawMeshTasksIndirectCountNV( Buffer buffer, DeviceSize offset, Buffer countBuffer, DeviceSize countBufferOffset, uint32_t maxDrawCount, uint32_t stride, Dispatch const &d) const + { + d.vkCmdDrawMeshTasksIndirectCountNV( m_commandBuffer, static_cast( buffer ), offset, static_cast( countBuffer ), countBufferOffset, maxDrawCount, stride ); + } +#else + template + VULKAN_HPP_INLINE void CommandBuffer::drawMeshTasksIndirectCountNV( Buffer buffer, DeviceSize offset, Buffer countBuffer, DeviceSize countBufferOffset, uint32_t maxDrawCount, uint32_t stride, Dispatch const &d ) const + { + d.vkCmdDrawMeshTasksIndirectCountNV( m_commandBuffer, static_cast( buffer ), offset, static_cast( countBuffer ), countBufferOffset, maxDrawCount, stride ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + +#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void CommandBuffer::copyAccelerationStructureNV( AccelerationStructureNV dst, AccelerationStructureNV src, CopyAccelerationStructureModeNV mode, Dispatch const &d) const + { + d.vkCmdCopyAccelerationStructureNV( m_commandBuffer, static_cast( dst ), static_cast( src ), static_cast( mode ) ); + } +#else + template + VULKAN_HPP_INLINE void CommandBuffer::copyAccelerationStructureNV( AccelerationStructureNV dst, AccelerationStructureNV src, CopyAccelerationStructureModeNV mode, Dispatch const &d ) const + { + d.vkCmdCopyAccelerationStructureNV( m_commandBuffer, static_cast( dst ), static_cast( src ), static_cast( mode ) ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void CommandBuffer::writeAccelerationStructuresPropertiesNV( uint32_t accelerationStructureCount, const AccelerationStructureNV* pAccelerationStructures, QueryType queryType, QueryPool queryPool, uint32_t firstQuery, Dispatch const &d) const + { + d.vkCmdWriteAccelerationStructuresPropertiesNV( m_commandBuffer, accelerationStructureCount, reinterpret_cast( pAccelerationStructures ), static_cast( queryType ), static_cast( queryPool ), firstQuery ); + } +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void CommandBuffer::writeAccelerationStructuresPropertiesNV( ArrayProxy accelerationStructures, QueryType queryType, QueryPool queryPool, uint32_t firstQuery, Dispatch const &d ) const + { + d.vkCmdWriteAccelerationStructuresPropertiesNV( m_commandBuffer, accelerationStructures.size() , reinterpret_cast( accelerationStructures.data() ), static_cast( queryType ), static_cast( queryPool ), firstQuery ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void CommandBuffer::buildAccelerationStructureNV( const AccelerationStructureInfoNV* pInfo, Buffer instanceData, DeviceSize instanceOffset, Bool32 update, AccelerationStructureNV dst, AccelerationStructureNV src, Buffer scratch, DeviceSize scratchOffset, Dispatch const &d) const + { + d.vkCmdBuildAccelerationStructureNV( m_commandBuffer, reinterpret_cast( pInfo ), static_cast( instanceData ), instanceOffset, update, static_cast( dst ), static_cast( src ), static_cast( scratch ), scratchOffset ); + } +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void CommandBuffer::buildAccelerationStructureNV( const AccelerationStructureInfoNV & info, Buffer instanceData, DeviceSize instanceOffset, Bool32 update, AccelerationStructureNV dst, AccelerationStructureNV src, Buffer scratch, DeviceSize scratchOffset, Dispatch const &d ) const + { + d.vkCmdBuildAccelerationStructureNV( m_commandBuffer, reinterpret_cast( &info ), static_cast( instanceData ), instanceOffset, update, static_cast( dst ), static_cast( src ), static_cast( scratch ), scratchOffset ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + +#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void CommandBuffer::traceRaysNV( Buffer raygenShaderBindingTableBuffer, DeviceSize raygenShaderBindingOffset, Buffer missShaderBindingTableBuffer, DeviceSize missShaderBindingOffset, DeviceSize missShaderBindingStride, Buffer hitShaderBindingTableBuffer, DeviceSize hitShaderBindingOffset, DeviceSize hitShaderBindingStride, Buffer callableShaderBindingTableBuffer, DeviceSize callableShaderBindingOffset, DeviceSize callableShaderBindingStride, uint32_t width, uint32_t height, uint32_t depth, Dispatch const &d) const + { + d.vkCmdTraceRaysNV( m_commandBuffer, static_cast( raygenShaderBindingTableBuffer ), raygenShaderBindingOffset, static_cast( missShaderBindingTableBuffer ), missShaderBindingOffset, missShaderBindingStride, static_cast( hitShaderBindingTableBuffer ), hitShaderBindingOffset, hitShaderBindingStride, static_cast( callableShaderBindingTableBuffer ), callableShaderBindingOffset, callableShaderBindingStride, width, height, depth ); + } +#else + template + VULKAN_HPP_INLINE void CommandBuffer::traceRaysNV( Buffer raygenShaderBindingTableBuffer, DeviceSize raygenShaderBindingOffset, Buffer missShaderBindingTableBuffer, DeviceSize missShaderBindingOffset, DeviceSize missShaderBindingStride, Buffer hitShaderBindingTableBuffer, DeviceSize hitShaderBindingOffset, DeviceSize hitShaderBindingStride, Buffer callableShaderBindingTableBuffer, DeviceSize callableShaderBindingOffset, DeviceSize callableShaderBindingStride, uint32_t width, uint32_t height, uint32_t depth, Dispatch const &d ) const + { + d.vkCmdTraceRaysNV( m_commandBuffer, static_cast( raygenShaderBindingTableBuffer ), raygenShaderBindingOffset, static_cast( missShaderBindingTableBuffer ), missShaderBindingOffset, missShaderBindingStride, static_cast( hitShaderBindingTableBuffer ), hitShaderBindingOffset, hitShaderBindingStride, static_cast( callableShaderBindingTableBuffer ), callableShaderBindingOffset, callableShaderBindingStride, width, height, depth ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + struct SubmitInfo + { + SubmitInfo( uint32_t waitSemaphoreCount_ = 0, + const Semaphore* pWaitSemaphores_ = nullptr, + const PipelineStageFlags* pWaitDstStageMask_ = nullptr, + uint32_t commandBufferCount_ = 0, + const CommandBuffer* pCommandBuffers_ = nullptr, + uint32_t signalSemaphoreCount_ = 0, + const Semaphore* pSignalSemaphores_ = nullptr ) + : waitSemaphoreCount( waitSemaphoreCount_ ) + , pWaitSemaphores( pWaitSemaphores_ ) + , pWaitDstStageMask( pWaitDstStageMask_ ) + , commandBufferCount( commandBufferCount_ ) + , pCommandBuffers( pCommandBuffers_ ) + , signalSemaphoreCount( signalSemaphoreCount_ ) + , pSignalSemaphores( pSignalSemaphores_ ) + { + } + + SubmitInfo( VkSubmitInfo const & rhs ) + { + memcpy( this, &rhs, sizeof( SubmitInfo ) ); + } + + SubmitInfo& operator=( VkSubmitInfo const & rhs ) + { + memcpy( this, &rhs, sizeof( SubmitInfo ) ); + return *this; + } + SubmitInfo& setPNext( const void* pNext_ ) + { + pNext = pNext_; + return *this; + } + + SubmitInfo& setWaitSemaphoreCount( uint32_t waitSemaphoreCount_ ) + { + waitSemaphoreCount = waitSemaphoreCount_; + return *this; + } + + SubmitInfo& setPWaitSemaphores( const Semaphore* pWaitSemaphores_ ) + { + pWaitSemaphores = pWaitSemaphores_; + return *this; + } + + SubmitInfo& setPWaitDstStageMask( const PipelineStageFlags* pWaitDstStageMask_ ) + { + pWaitDstStageMask = pWaitDstStageMask_; + return *this; + } + + SubmitInfo& setCommandBufferCount( uint32_t commandBufferCount_ ) + { + commandBufferCount = commandBufferCount_; + return *this; + } + + SubmitInfo& setPCommandBuffers( const CommandBuffer* pCommandBuffers_ ) + { + pCommandBuffers = pCommandBuffers_; + return *this; + } + + SubmitInfo& setSignalSemaphoreCount( uint32_t signalSemaphoreCount_ ) + { + signalSemaphoreCount = signalSemaphoreCount_; + return *this; + } + + SubmitInfo& setPSignalSemaphores( const Semaphore* pSignalSemaphores_ ) + { + pSignalSemaphores = pSignalSemaphores_; + return *this; + } + + operator VkSubmitInfo const&() const + { + return *reinterpret_cast(this); + } + + operator VkSubmitInfo &() + { + return *reinterpret_cast(this); + } + + bool operator==( SubmitInfo const& rhs ) const + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( waitSemaphoreCount == rhs.waitSemaphoreCount ) + && ( pWaitSemaphores == rhs.pWaitSemaphores ) + && ( pWaitDstStageMask == rhs.pWaitDstStageMask ) + && ( commandBufferCount == rhs.commandBufferCount ) + && ( pCommandBuffers == rhs.pCommandBuffers ) + && ( signalSemaphoreCount == rhs.signalSemaphoreCount ) + && ( pSignalSemaphores == rhs.pSignalSemaphores ); + } + + bool operator!=( SubmitInfo const& rhs ) const + { + return !operator==( rhs ); + } + + private: + StructureType sType = StructureType::eSubmitInfo; + + public: + const void* pNext = nullptr; + uint32_t waitSemaphoreCount; + const Semaphore* pWaitSemaphores; + const PipelineStageFlags* pWaitDstStageMask; + uint32_t commandBufferCount; + const CommandBuffer* pCommandBuffers; + uint32_t signalSemaphoreCount; + const Semaphore* pSignalSemaphores; + }; + static_assert( sizeof( SubmitInfo ) == sizeof( VkSubmitInfo ), "struct and wrapper have different size!" ); + + class Queue + { + public: + VULKAN_HPP_CONSTEXPR Queue() + : m_queue(VK_NULL_HANDLE) + {} + + VULKAN_HPP_CONSTEXPR Queue( std::nullptr_t ) + : m_queue(VK_NULL_HANDLE) + {} + + VULKAN_HPP_TYPESAFE_EXPLICIT Queue( VkQueue queue ) + : m_queue( queue ) + {} + +#if defined(VULKAN_HPP_TYPESAFE_CONVERSION) + Queue & operator=(VkQueue queue) + { + m_queue = queue; + return *this; + } +#endif + + Queue & operator=( std::nullptr_t ) + { + m_queue = VK_NULL_HANDLE; + return *this; + } + + bool operator==( Queue const & rhs ) const + { + return m_queue == rhs.m_queue; + } + + bool operator!=(Queue const & rhs ) const + { + return m_queue != rhs.m_queue; + } + + bool operator<(Queue const & rhs ) const + { + return m_queue < rhs.m_queue; + } + + template + Result submit( uint32_t submitCount, const SubmitInfo* pSubmits, Fence fence, Dispatch const &d = Dispatch() ) const; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + ResultValueType::type submit( ArrayProxy submits, Fence fence, Dispatch const &d = Dispatch() ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + +#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + Result waitIdle(Dispatch const &d = Dispatch() ) const; +#else + template + ResultValueType::type waitIdle(Dispatch const &d = Dispatch() ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + Result bindSparse( uint32_t bindInfoCount, const BindSparseInfo* pBindInfo, Fence fence, Dispatch const &d = Dispatch() ) const; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + ResultValueType::type bindSparse( ArrayProxy bindInfo, Fence fence, Dispatch const &d = Dispatch() ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + Result presentKHR( const PresentInfoKHR* pPresentInfo, Dispatch const &d = Dispatch() ) const; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + Result presentKHR( const PresentInfoKHR & presentInfo, Dispatch const &d = Dispatch() ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void beginDebugUtilsLabelEXT( const DebugUtilsLabelEXT* pLabelInfo, Dispatch const &d = Dispatch() ) const; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void beginDebugUtilsLabelEXT( const DebugUtilsLabelEXT & labelInfo, Dispatch const &d = Dispatch() ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void endDebugUtilsLabelEXT(Dispatch const &d = Dispatch() ) const; + + template + void insertDebugUtilsLabelEXT( const DebugUtilsLabelEXT* pLabelInfo, Dispatch const &d = Dispatch() ) const; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void insertDebugUtilsLabelEXT( const DebugUtilsLabelEXT & labelInfo, Dispatch const &d = Dispatch() ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void getCheckpointDataNV( uint32_t* pCheckpointDataCount, CheckpointDataNV* pCheckpointData, Dispatch const &d = Dispatch() ) const; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template , typename Dispatch = DispatchLoaderStatic> + std::vector getCheckpointDataNV(Dispatch const &d = Dispatch() ) const; + template , typename Dispatch = DispatchLoaderStatic> + std::vector getCheckpointDataNV(Allocator const& vectorAllocator, Dispatch const &d ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + + + VULKAN_HPP_TYPESAFE_EXPLICIT operator VkQueue() const + { + return m_queue; + } + + explicit operator bool() const + { + return m_queue != VK_NULL_HANDLE; + } + + bool operator!() const + { + return m_queue == VK_NULL_HANDLE; + } + + private: + VkQueue m_queue; + }; + + static_assert( sizeof( Queue ) == sizeof( VkQueue ), "handle and wrapper have different size!" ); + + template + VULKAN_HPP_INLINE Result Queue::submit( uint32_t submitCount, const SubmitInfo* pSubmits, Fence fence, Dispatch const &d) const + { + return static_cast( d.vkQueueSubmit( m_queue, submitCount, reinterpret_cast( pSubmits ), static_cast( fence ) ) ); + } +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE ResultValueType::type Queue::submit( ArrayProxy submits, Fence fence, Dispatch const &d ) const + { + Result result = static_cast( d.vkQueueSubmit( m_queue, submits.size() , reinterpret_cast( submits.data() ), static_cast( fence ) ) ); + return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING"::Queue::submit" ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + +#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE Result Queue::waitIdle(Dispatch const &d) const + { + return static_cast( d.vkQueueWaitIdle( m_queue ) ); + } +#else + template + VULKAN_HPP_INLINE ResultValueType::type Queue::waitIdle(Dispatch const &d ) const + { + Result result = static_cast( d.vkQueueWaitIdle( m_queue ) ); + return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING"::Queue::waitIdle" ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE Result Queue::bindSparse( uint32_t bindInfoCount, const BindSparseInfo* pBindInfo, Fence fence, Dispatch const &d) const + { + return static_cast( d.vkQueueBindSparse( m_queue, bindInfoCount, reinterpret_cast( pBindInfo ), static_cast( fence ) ) ); + } +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE ResultValueType::type Queue::bindSparse( ArrayProxy bindInfo, Fence fence, Dispatch const &d ) const + { + Result result = static_cast( d.vkQueueBindSparse( m_queue, bindInfo.size() , reinterpret_cast( bindInfo.data() ), static_cast( fence ) ) ); + return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING"::Queue::bindSparse" ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE Result Queue::presentKHR( const PresentInfoKHR* pPresentInfo, Dispatch const &d) const + { + return static_cast( d.vkQueuePresentKHR( m_queue, reinterpret_cast( pPresentInfo ) ) ); + } +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE Result Queue::presentKHR( const PresentInfoKHR & presentInfo, Dispatch const &d ) const + { + Result result = static_cast( d.vkQueuePresentKHR( m_queue, reinterpret_cast( &presentInfo ) ) ); + return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING"::Queue::presentKHR", { Result::eSuccess, Result::eSuboptimalKHR } ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void Queue::beginDebugUtilsLabelEXT( const DebugUtilsLabelEXT* pLabelInfo, Dispatch const &d) const + { + d.vkQueueBeginDebugUtilsLabelEXT( m_queue, reinterpret_cast( pLabelInfo ) ); + } +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void Queue::beginDebugUtilsLabelEXT( const DebugUtilsLabelEXT & labelInfo, Dispatch const &d ) const + { + d.vkQueueBeginDebugUtilsLabelEXT( m_queue, reinterpret_cast( &labelInfo ) ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + +#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void Queue::endDebugUtilsLabelEXT(Dispatch const &d) const + { + d.vkQueueEndDebugUtilsLabelEXT( m_queue ); + } +#else + template + VULKAN_HPP_INLINE void Queue::endDebugUtilsLabelEXT(Dispatch const &d ) const + { + d.vkQueueEndDebugUtilsLabelEXT( m_queue ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void Queue::insertDebugUtilsLabelEXT( const DebugUtilsLabelEXT* pLabelInfo, Dispatch const &d) const + { + d.vkQueueInsertDebugUtilsLabelEXT( m_queue, reinterpret_cast( pLabelInfo ) ); + } +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void Queue::insertDebugUtilsLabelEXT( const DebugUtilsLabelEXT & labelInfo, Dispatch const &d ) const + { + d.vkQueueInsertDebugUtilsLabelEXT( m_queue, reinterpret_cast( &labelInfo ) ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void Queue::getCheckpointDataNV( uint32_t* pCheckpointDataCount, CheckpointDataNV* pCheckpointData, Dispatch const &d) const + { + d.vkGetQueueCheckpointDataNV( m_queue, pCheckpointDataCount, reinterpret_cast( pCheckpointData ) ); + } +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE std::vector Queue::getCheckpointDataNV(Dispatch const &d ) const + { + std::vector checkpointData; + uint32_t checkpointDataCount; + d.vkGetQueueCheckpointDataNV( m_queue, &checkpointDataCount, nullptr ); + checkpointData.resize( checkpointDataCount ); + d.vkGetQueueCheckpointDataNV( m_queue, &checkpointDataCount, reinterpret_cast( checkpointData.data() ) ); + return checkpointData; + } + template + VULKAN_HPP_INLINE std::vector Queue::getCheckpointDataNV(Allocator const& vectorAllocator, Dispatch const &d ) const + { + std::vector checkpointData( vectorAllocator ); + uint32_t checkpointDataCount; + d.vkGetQueueCheckpointDataNV( m_queue, &checkpointDataCount, nullptr ); + checkpointData.resize( checkpointDataCount ); + d.vkGetQueueCheckpointDataNV( m_queue, &checkpointDataCount, reinterpret_cast( checkpointData.data() ) ); + return checkpointData; + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + +#ifndef VULKAN_HPP_NO_SMART_HANDLE + class Device; + + template class UniqueHandleTraits {public: using deleter = ObjectDestroy; }; + using UniqueAccelerationStructureNV = UniqueHandle; + template class UniqueHandleTraits {public: using deleter = ObjectDestroy; }; + using UniqueBuffer = UniqueHandle; + template class UniqueHandleTraits {public: using deleter = ObjectDestroy; }; + using UniqueBufferView = UniqueHandle; + template class UniqueHandleTraits {public: using deleter = PoolFree; }; + using UniqueCommandBuffer = UniqueHandle; + template class UniqueHandleTraits {public: using deleter = ObjectDestroy; }; + using UniqueCommandPool = UniqueHandle; + template class UniqueHandleTraits {public: using deleter = ObjectDestroy; }; + using UniqueDescriptorPool = UniqueHandle; + template class UniqueHandleTraits {public: using deleter = PoolFree; }; + using UniqueDescriptorSet = UniqueHandle; + template class UniqueHandleTraits {public: using deleter = ObjectDestroy; }; + using UniqueDescriptorSetLayout = UniqueHandle; + template class UniqueHandleTraits {public: using deleter = ObjectDestroy; }; + using UniqueDescriptorUpdateTemplate = UniqueHandle; + template class UniqueHandleTraits {public: using deleter = ObjectFree; }; + using UniqueDeviceMemory = UniqueHandle; + template class UniqueHandleTraits {public: using deleter = ObjectDestroy; }; + using UniqueEvent = UniqueHandle; + template class UniqueHandleTraits {public: using deleter = ObjectDestroy; }; + using UniqueFence = UniqueHandle; + template class UniqueHandleTraits {public: using deleter = ObjectDestroy; }; + using UniqueFramebuffer = UniqueHandle; + template class UniqueHandleTraits {public: using deleter = ObjectDestroy; }; + using UniqueImage = UniqueHandle; + template class UniqueHandleTraits {public: using deleter = ObjectDestroy; }; + using UniqueImageView = UniqueHandle; + template class UniqueHandleTraits {public: using deleter = ObjectDestroy; }; + using UniqueIndirectCommandsLayoutNVX = UniqueHandle; + template class UniqueHandleTraits {public: using deleter = ObjectDestroy; }; + using UniqueObjectTableNVX = UniqueHandle; + template class UniqueHandleTraits {public: using deleter = ObjectDestroy; }; + using UniquePipeline = UniqueHandle; + template class UniqueHandleTraits {public: using deleter = ObjectDestroy; }; + using UniquePipelineCache = UniqueHandle; + template class UniqueHandleTraits {public: using deleter = ObjectDestroy; }; + using UniquePipelineLayout = UniqueHandle; + template class UniqueHandleTraits {public: using deleter = ObjectDestroy; }; + using UniqueQueryPool = UniqueHandle; + template class UniqueHandleTraits {public: using deleter = ObjectDestroy; }; + using UniqueRenderPass = UniqueHandle; + template class UniqueHandleTraits {public: using deleter = ObjectDestroy; }; + using UniqueSampler = UniqueHandle; + template class UniqueHandleTraits {public: using deleter = ObjectDestroy; }; + using UniqueSamplerYcbcrConversion = UniqueHandle; + template class UniqueHandleTraits {public: using deleter = ObjectDestroy; }; + using UniqueSemaphore = UniqueHandle; + template class UniqueHandleTraits {public: using deleter = ObjectDestroy; }; + using UniqueShaderModule = UniqueHandle; + template class UniqueHandleTraits {public: using deleter = ObjectDestroy; }; + using UniqueSwapchainKHR = UniqueHandle; + template class UniqueHandleTraits {public: using deleter = ObjectDestroy; }; + using UniqueValidationCacheEXT = UniqueHandle; +#endif /*VULKAN_HPP_NO_SMART_HANDLE*/ + + class Device + { + public: + VULKAN_HPP_CONSTEXPR Device() + : m_device(VK_NULL_HANDLE) + {} + + VULKAN_HPP_CONSTEXPR Device( std::nullptr_t ) + : m_device(VK_NULL_HANDLE) + {} + + VULKAN_HPP_TYPESAFE_EXPLICIT Device( VkDevice device ) + : m_device( device ) + {} + +#if defined(VULKAN_HPP_TYPESAFE_CONVERSION) + Device & operator=(VkDevice device) + { + m_device = device; + return *this; + } +#endif + + Device & operator=( std::nullptr_t ) + { + m_device = VK_NULL_HANDLE; + return *this; + } + + bool operator==( Device const & rhs ) const + { + return m_device == rhs.m_device; + } + + bool operator!=(Device const & rhs ) const + { + return m_device != rhs.m_device; + } + + bool operator<(Device const & rhs ) const + { + return m_device < rhs.m_device; + } + + template + PFN_vkVoidFunction getProcAddr( const char* pName, Dispatch const &d = Dispatch() ) const; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + PFN_vkVoidFunction getProcAddr( const std::string & name, Dispatch const &d = Dispatch() ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void destroy( const AllocationCallbacks* pAllocator, Dispatch const &d = Dispatch() ) const; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void destroy( Optional allocator = nullptr, Dispatch const &d = Dispatch() ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void getQueue( uint32_t queueFamilyIndex, uint32_t queueIndex, Queue* pQueue, Dispatch const &d = Dispatch() ) const; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + Queue getQueue( uint32_t queueFamilyIndex, uint32_t queueIndex, Dispatch const &d = Dispatch() ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + +#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + Result waitIdle(Dispatch const &d = Dispatch() ) const; +#else + template + ResultValueType::type waitIdle(Dispatch const &d = Dispatch() ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + Result allocateMemory( const MemoryAllocateInfo* pAllocateInfo, const AllocationCallbacks* pAllocator, DeviceMemory* pMemory, Dispatch const &d = Dispatch() ) const; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + ResultValueType::type allocateMemory( const MemoryAllocateInfo & allocateInfo, Optional allocator = nullptr, Dispatch const &d = Dispatch() ) const; +#ifndef VULKAN_HPP_NO_SMART_HANDLE + template + typename ResultValueType>::type allocateMemoryUnique( const MemoryAllocateInfo & allocateInfo, Optional allocator = nullptr, Dispatch const &d = Dispatch() ) const; +#endif /*VULKAN_HPP_NO_SMART_HANDLE*/ +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void freeMemory( DeviceMemory memory, const AllocationCallbacks* pAllocator, Dispatch const &d = Dispatch() ) const; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void freeMemory( DeviceMemory memory, Optional allocator = nullptr, Dispatch const &d = Dispatch() ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void free( DeviceMemory memory, const AllocationCallbacks* pAllocator, Dispatch const &d = Dispatch() ) const; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void free( DeviceMemory memory, Optional allocator = nullptr, Dispatch const &d = Dispatch() ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + Result mapMemory( DeviceMemory memory, DeviceSize offset, DeviceSize size, MemoryMapFlags flags, void** ppData, Dispatch const &d = Dispatch() ) const; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + ResultValueType::type mapMemory( DeviceMemory memory, DeviceSize offset, DeviceSize size, MemoryMapFlags flags = MemoryMapFlags(), Dispatch const &d = Dispatch() ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void unmapMemory( DeviceMemory memory, Dispatch const &d = Dispatch() ) const; + + template + Result flushMappedMemoryRanges( uint32_t memoryRangeCount, const MappedMemoryRange* pMemoryRanges, Dispatch const &d = Dispatch() ) const; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + ResultValueType::type flushMappedMemoryRanges( ArrayProxy memoryRanges, Dispatch const &d = Dispatch() ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + Result invalidateMappedMemoryRanges( uint32_t memoryRangeCount, const MappedMemoryRange* pMemoryRanges, Dispatch const &d = Dispatch() ) const; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + ResultValueType::type invalidateMappedMemoryRanges( ArrayProxy memoryRanges, Dispatch const &d = Dispatch() ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void getMemoryCommitment( DeviceMemory memory, DeviceSize* pCommittedMemoryInBytes, Dispatch const &d = Dispatch() ) const; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + DeviceSize getMemoryCommitment( DeviceMemory memory, Dispatch const &d = Dispatch() ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void getBufferMemoryRequirements( Buffer buffer, MemoryRequirements* pMemoryRequirements, Dispatch const &d = Dispatch() ) const; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + MemoryRequirements getBufferMemoryRequirements( Buffer buffer, Dispatch const &d = Dispatch() ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + +#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + Result bindBufferMemory( Buffer buffer, DeviceMemory memory, DeviceSize memoryOffset, Dispatch const &d = Dispatch() ) const; +#else + template + ResultValueType::type bindBufferMemory( Buffer buffer, DeviceMemory memory, DeviceSize memoryOffset, Dispatch const &d = Dispatch() ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void getImageMemoryRequirements( Image image, MemoryRequirements* pMemoryRequirements, Dispatch const &d = Dispatch() ) const; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + MemoryRequirements getImageMemoryRequirements( Image image, Dispatch const &d = Dispatch() ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + +#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + Result bindImageMemory( Image image, DeviceMemory memory, DeviceSize memoryOffset, Dispatch const &d = Dispatch() ) const; +#else + template + ResultValueType::type bindImageMemory( Image image, DeviceMemory memory, DeviceSize memoryOffset, Dispatch const &d = Dispatch() ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void getImageSparseMemoryRequirements( Image image, uint32_t* pSparseMemoryRequirementCount, SparseImageMemoryRequirements* pSparseMemoryRequirements, Dispatch const &d = Dispatch() ) const; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template , typename Dispatch = DispatchLoaderStatic> + std::vector getImageSparseMemoryRequirements( Image image, Dispatch const &d = Dispatch() ) const; + template , typename Dispatch = DispatchLoaderStatic> + std::vector getImageSparseMemoryRequirements( Image image, Allocator const& vectorAllocator, Dispatch const &d ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + Result createFence( const FenceCreateInfo* pCreateInfo, const AllocationCallbacks* pAllocator, Fence* pFence, Dispatch const &d = Dispatch() ) const; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + ResultValueType::type createFence( const FenceCreateInfo & createInfo, Optional allocator = nullptr, Dispatch const &d = Dispatch() ) const; +#ifndef VULKAN_HPP_NO_SMART_HANDLE + template + typename ResultValueType>::type createFenceUnique( const FenceCreateInfo & createInfo, Optional allocator = nullptr, Dispatch const &d = Dispatch() ) const; +#endif /*VULKAN_HPP_NO_SMART_HANDLE*/ +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void destroyFence( Fence fence, const AllocationCallbacks* pAllocator, Dispatch const &d = Dispatch() ) const; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void destroyFence( Fence fence, Optional allocator = nullptr, Dispatch const &d = Dispatch() ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void destroy( Fence fence, const AllocationCallbacks* pAllocator, Dispatch const &d = Dispatch() ) const; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void destroy( Fence fence, Optional allocator = nullptr, Dispatch const &d = Dispatch() ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + Result resetFences( uint32_t fenceCount, const Fence* pFences, Dispatch const &d = Dispatch() ) const; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + ResultValueType::type resetFences( ArrayProxy fences, Dispatch const &d = Dispatch() ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + Result getFenceStatus( Fence fence, Dispatch const &d = Dispatch() ) const; + + template + Result waitForFences( uint32_t fenceCount, const Fence* pFences, Bool32 waitAll, uint64_t timeout, Dispatch const &d = Dispatch() ) const; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + Result waitForFences( ArrayProxy fences, Bool32 waitAll, uint64_t timeout, Dispatch const &d = Dispatch() ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + Result createSemaphore( const SemaphoreCreateInfo* pCreateInfo, const AllocationCallbacks* pAllocator, Semaphore* pSemaphore, Dispatch const &d = Dispatch() ) const; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + ResultValueType::type createSemaphore( const SemaphoreCreateInfo & createInfo, Optional allocator = nullptr, Dispatch const &d = Dispatch() ) const; +#ifndef VULKAN_HPP_NO_SMART_HANDLE + template + typename ResultValueType>::type createSemaphoreUnique( const SemaphoreCreateInfo & createInfo, Optional allocator = nullptr, Dispatch const &d = Dispatch() ) const; +#endif /*VULKAN_HPP_NO_SMART_HANDLE*/ +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void destroySemaphore( Semaphore semaphore, const AllocationCallbacks* pAllocator, Dispatch const &d = Dispatch() ) const; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void destroySemaphore( Semaphore semaphore, Optional allocator = nullptr, Dispatch const &d = Dispatch() ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void destroy( Semaphore semaphore, const AllocationCallbacks* pAllocator, Dispatch const &d = Dispatch() ) const; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void destroy( Semaphore semaphore, Optional allocator = nullptr, Dispatch const &d = Dispatch() ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + Result createEvent( const EventCreateInfo* pCreateInfo, const AllocationCallbacks* pAllocator, Event* pEvent, Dispatch const &d = Dispatch() ) const; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + ResultValueType::type createEvent( const EventCreateInfo & createInfo, Optional allocator = nullptr, Dispatch const &d = Dispatch() ) const; +#ifndef VULKAN_HPP_NO_SMART_HANDLE + template + typename ResultValueType>::type createEventUnique( const EventCreateInfo & createInfo, Optional allocator = nullptr, Dispatch const &d = Dispatch() ) const; +#endif /*VULKAN_HPP_NO_SMART_HANDLE*/ +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void destroyEvent( Event event, const AllocationCallbacks* pAllocator, Dispatch const &d = Dispatch() ) const; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void destroyEvent( Event event, Optional allocator = nullptr, Dispatch const &d = Dispatch() ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void destroy( Event event, const AllocationCallbacks* pAllocator, Dispatch const &d = Dispatch() ) const; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void destroy( Event event, Optional allocator = nullptr, Dispatch const &d = Dispatch() ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + Result getEventStatus( Event event, Dispatch const &d = Dispatch() ) const; + +#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + Result setEvent( Event event, Dispatch const &d = Dispatch() ) const; +#else + template + ResultValueType::type setEvent( Event event, Dispatch const &d = Dispatch() ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + +#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + Result resetEvent( Event event, Dispatch const &d = Dispatch() ) const; +#else + template + ResultValueType::type resetEvent( Event event, Dispatch const &d = Dispatch() ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + Result createQueryPool( const QueryPoolCreateInfo* pCreateInfo, const AllocationCallbacks* pAllocator, QueryPool* pQueryPool, Dispatch const &d = Dispatch() ) const; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + ResultValueType::type createQueryPool( const QueryPoolCreateInfo & createInfo, Optional allocator = nullptr, Dispatch const &d = Dispatch() ) const; +#ifndef VULKAN_HPP_NO_SMART_HANDLE + template + typename ResultValueType>::type createQueryPoolUnique( const QueryPoolCreateInfo & createInfo, Optional allocator = nullptr, Dispatch const &d = Dispatch() ) const; +#endif /*VULKAN_HPP_NO_SMART_HANDLE*/ +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void destroyQueryPool( QueryPool queryPool, const AllocationCallbacks* pAllocator, Dispatch const &d = Dispatch() ) const; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void destroyQueryPool( QueryPool queryPool, Optional allocator = nullptr, Dispatch const &d = Dispatch() ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void destroy( QueryPool queryPool, const AllocationCallbacks* pAllocator, Dispatch const &d = Dispatch() ) const; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void destroy( QueryPool queryPool, Optional allocator = nullptr, Dispatch const &d = Dispatch() ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + Result getQueryPoolResults( QueryPool queryPool, uint32_t firstQuery, uint32_t queryCount, size_t dataSize, void* pData, DeviceSize stride, QueryResultFlags flags, Dispatch const &d = Dispatch() ) const; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + Result getQueryPoolResults( QueryPool queryPool, uint32_t firstQuery, uint32_t queryCount, ArrayProxy data, DeviceSize stride, QueryResultFlags flags, Dispatch const &d = Dispatch() ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + Result createBuffer( const BufferCreateInfo* pCreateInfo, const AllocationCallbacks* pAllocator, Buffer* pBuffer, Dispatch const &d = Dispatch() ) const; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + ResultValueType::type createBuffer( const BufferCreateInfo & createInfo, Optional allocator = nullptr, Dispatch const &d = Dispatch() ) const; +#ifndef VULKAN_HPP_NO_SMART_HANDLE + template + typename ResultValueType>::type createBufferUnique( const BufferCreateInfo & createInfo, Optional allocator = nullptr, Dispatch const &d = Dispatch() ) const; +#endif /*VULKAN_HPP_NO_SMART_HANDLE*/ +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void destroyBuffer( Buffer buffer, const AllocationCallbacks* pAllocator, Dispatch const &d = Dispatch() ) const; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void destroyBuffer( Buffer buffer, Optional allocator = nullptr, Dispatch const &d = Dispatch() ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void destroy( Buffer buffer, const AllocationCallbacks* pAllocator, Dispatch const &d = Dispatch() ) const; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void destroy( Buffer buffer, Optional allocator = nullptr, Dispatch const &d = Dispatch() ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + Result createBufferView( const BufferViewCreateInfo* pCreateInfo, const AllocationCallbacks* pAllocator, BufferView* pView, Dispatch const &d = Dispatch() ) const; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + ResultValueType::type createBufferView( const BufferViewCreateInfo & createInfo, Optional allocator = nullptr, Dispatch const &d = Dispatch() ) const; +#ifndef VULKAN_HPP_NO_SMART_HANDLE + template + typename ResultValueType>::type createBufferViewUnique( const BufferViewCreateInfo & createInfo, Optional allocator = nullptr, Dispatch const &d = Dispatch() ) const; +#endif /*VULKAN_HPP_NO_SMART_HANDLE*/ +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void destroyBufferView( BufferView bufferView, const AllocationCallbacks* pAllocator, Dispatch const &d = Dispatch() ) const; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void destroyBufferView( BufferView bufferView, Optional allocator = nullptr, Dispatch const &d = Dispatch() ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void destroy( BufferView bufferView, const AllocationCallbacks* pAllocator, Dispatch const &d = Dispatch() ) const; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void destroy( BufferView bufferView, Optional allocator = nullptr, Dispatch const &d = Dispatch() ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + Result createImage( const ImageCreateInfo* pCreateInfo, const AllocationCallbacks* pAllocator, Image* pImage, Dispatch const &d = Dispatch() ) const; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + ResultValueType::type createImage( const ImageCreateInfo & createInfo, Optional allocator = nullptr, Dispatch const &d = Dispatch() ) const; +#ifndef VULKAN_HPP_NO_SMART_HANDLE + template + typename ResultValueType>::type createImageUnique( const ImageCreateInfo & createInfo, Optional allocator = nullptr, Dispatch const &d = Dispatch() ) const; +#endif /*VULKAN_HPP_NO_SMART_HANDLE*/ +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void destroyImage( Image image, const AllocationCallbacks* pAllocator, Dispatch const &d = Dispatch() ) const; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void destroyImage( Image image, Optional allocator = nullptr, Dispatch const &d = Dispatch() ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void destroy( Image image, const AllocationCallbacks* pAllocator, Dispatch const &d = Dispatch() ) const; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void destroy( Image image, Optional allocator = nullptr, Dispatch const &d = Dispatch() ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void getImageSubresourceLayout( Image image, const ImageSubresource* pSubresource, SubresourceLayout* pLayout, Dispatch const &d = Dispatch() ) const; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + SubresourceLayout getImageSubresourceLayout( Image image, const ImageSubresource & subresource, Dispatch const &d = Dispatch() ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + Result createImageView( const ImageViewCreateInfo* pCreateInfo, const AllocationCallbacks* pAllocator, ImageView* pView, Dispatch const &d = Dispatch() ) const; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + ResultValueType::type createImageView( const ImageViewCreateInfo & createInfo, Optional allocator = nullptr, Dispatch const &d = Dispatch() ) const; +#ifndef VULKAN_HPP_NO_SMART_HANDLE + template + typename ResultValueType>::type createImageViewUnique( const ImageViewCreateInfo & createInfo, Optional allocator = nullptr, Dispatch const &d = Dispatch() ) const; +#endif /*VULKAN_HPP_NO_SMART_HANDLE*/ +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void destroyImageView( ImageView imageView, const AllocationCallbacks* pAllocator, Dispatch const &d = Dispatch() ) const; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void destroyImageView( ImageView imageView, Optional allocator = nullptr, Dispatch const &d = Dispatch() ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void destroy( ImageView imageView, const AllocationCallbacks* pAllocator, Dispatch const &d = Dispatch() ) const; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void destroy( ImageView imageView, Optional allocator = nullptr, Dispatch const &d = Dispatch() ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + Result createShaderModule( const ShaderModuleCreateInfo* pCreateInfo, const AllocationCallbacks* pAllocator, ShaderModule* pShaderModule, Dispatch const &d = Dispatch() ) const; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + ResultValueType::type createShaderModule( const ShaderModuleCreateInfo & createInfo, Optional allocator = nullptr, Dispatch const &d = Dispatch() ) const; +#ifndef VULKAN_HPP_NO_SMART_HANDLE + template + typename ResultValueType>::type createShaderModuleUnique( const ShaderModuleCreateInfo & createInfo, Optional allocator = nullptr, Dispatch const &d = Dispatch() ) const; +#endif /*VULKAN_HPP_NO_SMART_HANDLE*/ +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void destroyShaderModule( ShaderModule shaderModule, const AllocationCallbacks* pAllocator, Dispatch const &d = Dispatch() ) const; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void destroyShaderModule( ShaderModule shaderModule, Optional allocator = nullptr, Dispatch const &d = Dispatch() ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void destroy( ShaderModule shaderModule, const AllocationCallbacks* pAllocator, Dispatch const &d = Dispatch() ) const; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void destroy( ShaderModule shaderModule, Optional allocator = nullptr, Dispatch const &d = Dispatch() ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + Result createPipelineCache( const PipelineCacheCreateInfo* pCreateInfo, const AllocationCallbacks* pAllocator, PipelineCache* pPipelineCache, Dispatch const &d = Dispatch() ) const; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + ResultValueType::type createPipelineCache( const PipelineCacheCreateInfo & createInfo, Optional allocator = nullptr, Dispatch const &d = Dispatch() ) const; +#ifndef VULKAN_HPP_NO_SMART_HANDLE + template + typename ResultValueType>::type createPipelineCacheUnique( const PipelineCacheCreateInfo & createInfo, Optional allocator = nullptr, Dispatch const &d = Dispatch() ) const; +#endif /*VULKAN_HPP_NO_SMART_HANDLE*/ +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void destroyPipelineCache( PipelineCache pipelineCache, const AllocationCallbacks* pAllocator, Dispatch const &d = Dispatch() ) const; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void destroyPipelineCache( PipelineCache pipelineCache, Optional allocator = nullptr, Dispatch const &d = Dispatch() ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void destroy( PipelineCache pipelineCache, const AllocationCallbacks* pAllocator, Dispatch const &d = Dispatch() ) const; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void destroy( PipelineCache pipelineCache, Optional allocator = nullptr, Dispatch const &d = Dispatch() ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + Result getPipelineCacheData( PipelineCache pipelineCache, size_t* pDataSize, void* pData, Dispatch const &d = Dispatch() ) const; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template , typename Dispatch = DispatchLoaderStatic> + typename ResultValueType>::type getPipelineCacheData( PipelineCache pipelineCache, Dispatch const &d = Dispatch() ) const; + template , typename Dispatch = DispatchLoaderStatic> + typename ResultValueType>::type getPipelineCacheData( PipelineCache pipelineCache, Allocator const& vectorAllocator, Dispatch const &d ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + Result mergePipelineCaches( PipelineCache dstCache, uint32_t srcCacheCount, const PipelineCache* pSrcCaches, Dispatch const &d = Dispatch() ) const; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + ResultValueType::type mergePipelineCaches( PipelineCache dstCache, ArrayProxy srcCaches, Dispatch const &d = Dispatch() ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + Result createGraphicsPipelines( PipelineCache pipelineCache, uint32_t createInfoCount, const GraphicsPipelineCreateInfo* pCreateInfos, const AllocationCallbacks* pAllocator, Pipeline* pPipelines, Dispatch const &d = Dispatch() ) const; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template , typename Dispatch = DispatchLoaderStatic> + typename ResultValueType>::type createGraphicsPipelines( PipelineCache pipelineCache, ArrayProxy createInfos, Optional allocator = nullptr, Dispatch const &d = Dispatch() ) const; + template , typename Dispatch = DispatchLoaderStatic> + typename ResultValueType>::type createGraphicsPipelines( PipelineCache pipelineCache, ArrayProxy createInfos, Optional allocator, Allocator const& vectorAllocator, Dispatch const &d ) const; + template + ResultValueType::type createGraphicsPipeline( PipelineCache pipelineCache, const GraphicsPipelineCreateInfo & createInfo, Optional allocator = nullptr, Dispatch const &d = Dispatch() ) const; +#ifndef VULKAN_HPP_NO_SMART_HANDLE + template , typename Dispatch = DispatchLoaderStatic> + typename ResultValueType,Allocator>>::type createGraphicsPipelinesUnique( PipelineCache pipelineCache, ArrayProxy createInfos, Optional allocator = nullptr, Dispatch const &d = Dispatch() ) const; + template , typename Dispatch = DispatchLoaderStatic> + typename ResultValueType,Allocator>>::type createGraphicsPipelinesUnique( PipelineCache pipelineCache, ArrayProxy createInfos, Optional allocator, Allocator const& vectorAllocator, Dispatch const &d ) const; + template + typename ResultValueType>::type createGraphicsPipelineUnique( PipelineCache pipelineCache, const GraphicsPipelineCreateInfo & createInfo, Optional allocator = nullptr, Dispatch const &d = Dispatch() ) const; +#endif /*VULKAN_HPP_NO_SMART_HANDLE*/ +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + Result createComputePipelines( PipelineCache pipelineCache, uint32_t createInfoCount, const ComputePipelineCreateInfo* pCreateInfos, const AllocationCallbacks* pAllocator, Pipeline* pPipelines, Dispatch const &d = Dispatch() ) const; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template , typename Dispatch = DispatchLoaderStatic> + typename ResultValueType>::type createComputePipelines( PipelineCache pipelineCache, ArrayProxy createInfos, Optional allocator = nullptr, Dispatch const &d = Dispatch() ) const; + template , typename Dispatch = DispatchLoaderStatic> + typename ResultValueType>::type createComputePipelines( PipelineCache pipelineCache, ArrayProxy createInfos, Optional allocator, Allocator const& vectorAllocator, Dispatch const &d ) const; + template + ResultValueType::type createComputePipeline( PipelineCache pipelineCache, const ComputePipelineCreateInfo & createInfo, Optional allocator = nullptr, Dispatch const &d = Dispatch() ) const; +#ifndef VULKAN_HPP_NO_SMART_HANDLE + template , typename Dispatch = DispatchLoaderStatic> + typename ResultValueType,Allocator>>::type createComputePipelinesUnique( PipelineCache pipelineCache, ArrayProxy createInfos, Optional allocator = nullptr, Dispatch const &d = Dispatch() ) const; + template , typename Dispatch = DispatchLoaderStatic> + typename ResultValueType,Allocator>>::type createComputePipelinesUnique( PipelineCache pipelineCache, ArrayProxy createInfos, Optional allocator, Allocator const& vectorAllocator, Dispatch const &d ) const; + template + typename ResultValueType>::type createComputePipelineUnique( PipelineCache pipelineCache, const ComputePipelineCreateInfo & createInfo, Optional allocator = nullptr, Dispatch const &d = Dispatch() ) const; +#endif /*VULKAN_HPP_NO_SMART_HANDLE*/ +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void destroyPipeline( Pipeline pipeline, const AllocationCallbacks* pAllocator, Dispatch const &d = Dispatch() ) const; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void destroyPipeline( Pipeline pipeline, Optional allocator = nullptr, Dispatch const &d = Dispatch() ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void destroy( Pipeline pipeline, const AllocationCallbacks* pAllocator, Dispatch const &d = Dispatch() ) const; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void destroy( Pipeline pipeline, Optional allocator = nullptr, Dispatch const &d = Dispatch() ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + Result createPipelineLayout( const PipelineLayoutCreateInfo* pCreateInfo, const AllocationCallbacks* pAllocator, PipelineLayout* pPipelineLayout, Dispatch const &d = Dispatch() ) const; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + ResultValueType::type createPipelineLayout( const PipelineLayoutCreateInfo & createInfo, Optional allocator = nullptr, Dispatch const &d = Dispatch() ) const; +#ifndef VULKAN_HPP_NO_SMART_HANDLE + template + typename ResultValueType>::type createPipelineLayoutUnique( const PipelineLayoutCreateInfo & createInfo, Optional allocator = nullptr, Dispatch const &d = Dispatch() ) const; +#endif /*VULKAN_HPP_NO_SMART_HANDLE*/ +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void destroyPipelineLayout( PipelineLayout pipelineLayout, const AllocationCallbacks* pAllocator, Dispatch const &d = Dispatch() ) const; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void destroyPipelineLayout( PipelineLayout pipelineLayout, Optional allocator = nullptr, Dispatch const &d = Dispatch() ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void destroy( PipelineLayout pipelineLayout, const AllocationCallbacks* pAllocator, Dispatch const &d = Dispatch() ) const; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void destroy( PipelineLayout pipelineLayout, Optional allocator = nullptr, Dispatch const &d = Dispatch() ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + Result createSampler( const SamplerCreateInfo* pCreateInfo, const AllocationCallbacks* pAllocator, Sampler* pSampler, Dispatch const &d = Dispatch() ) const; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + ResultValueType::type createSampler( const SamplerCreateInfo & createInfo, Optional allocator = nullptr, Dispatch const &d = Dispatch() ) const; +#ifndef VULKAN_HPP_NO_SMART_HANDLE + template + typename ResultValueType>::type createSamplerUnique( const SamplerCreateInfo & createInfo, Optional allocator = nullptr, Dispatch const &d = Dispatch() ) const; +#endif /*VULKAN_HPP_NO_SMART_HANDLE*/ +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void destroySampler( Sampler sampler, const AllocationCallbacks* pAllocator, Dispatch const &d = Dispatch() ) const; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void destroySampler( Sampler sampler, Optional allocator = nullptr, Dispatch const &d = Dispatch() ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void destroy( Sampler sampler, const AllocationCallbacks* pAllocator, Dispatch const &d = Dispatch() ) const; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void destroy( Sampler sampler, Optional allocator = nullptr, Dispatch const &d = Dispatch() ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + Result createDescriptorSetLayout( const DescriptorSetLayoutCreateInfo* pCreateInfo, const AllocationCallbacks* pAllocator, DescriptorSetLayout* pSetLayout, Dispatch const &d = Dispatch() ) const; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + ResultValueType::type createDescriptorSetLayout( const DescriptorSetLayoutCreateInfo & createInfo, Optional allocator = nullptr, Dispatch const &d = Dispatch() ) const; +#ifndef VULKAN_HPP_NO_SMART_HANDLE + template + typename ResultValueType>::type createDescriptorSetLayoutUnique( const DescriptorSetLayoutCreateInfo & createInfo, Optional allocator = nullptr, Dispatch const &d = Dispatch() ) const; +#endif /*VULKAN_HPP_NO_SMART_HANDLE*/ +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void destroyDescriptorSetLayout( DescriptorSetLayout descriptorSetLayout, const AllocationCallbacks* pAllocator, Dispatch const &d = Dispatch() ) const; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void destroyDescriptorSetLayout( DescriptorSetLayout descriptorSetLayout, Optional allocator = nullptr, Dispatch const &d = Dispatch() ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void destroy( DescriptorSetLayout descriptorSetLayout, const AllocationCallbacks* pAllocator, Dispatch const &d = Dispatch() ) const; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void destroy( DescriptorSetLayout descriptorSetLayout, Optional allocator = nullptr, Dispatch const &d = Dispatch() ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + Result createDescriptorPool( const DescriptorPoolCreateInfo* pCreateInfo, const AllocationCallbacks* pAllocator, DescriptorPool* pDescriptorPool, Dispatch const &d = Dispatch() ) const; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + ResultValueType::type createDescriptorPool( const DescriptorPoolCreateInfo & createInfo, Optional allocator = nullptr, Dispatch const &d = Dispatch() ) const; +#ifndef VULKAN_HPP_NO_SMART_HANDLE + template + typename ResultValueType>::type createDescriptorPoolUnique( const DescriptorPoolCreateInfo & createInfo, Optional allocator = nullptr, Dispatch const &d = Dispatch() ) const; +#endif /*VULKAN_HPP_NO_SMART_HANDLE*/ +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void destroyDescriptorPool( DescriptorPool descriptorPool, const AllocationCallbacks* pAllocator, Dispatch const &d = Dispatch() ) const; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void destroyDescriptorPool( DescriptorPool descriptorPool, Optional allocator = nullptr, Dispatch const &d = Dispatch() ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void destroy( DescriptorPool descriptorPool, const AllocationCallbacks* pAllocator, Dispatch const &d = Dispatch() ) const; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void destroy( DescriptorPool descriptorPool, Optional allocator = nullptr, Dispatch const &d = Dispatch() ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + +#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + Result resetDescriptorPool( DescriptorPool descriptorPool, DescriptorPoolResetFlags flags = DescriptorPoolResetFlags(), Dispatch const &d = Dispatch() ) const; +#else + template + ResultValueType::type resetDescriptorPool( DescriptorPool descriptorPool, DescriptorPoolResetFlags flags = DescriptorPoolResetFlags(), Dispatch const &d = Dispatch() ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + Result allocateDescriptorSets( const DescriptorSetAllocateInfo* pAllocateInfo, DescriptorSet* pDescriptorSets, Dispatch const &d = Dispatch() ) const; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template , typename Dispatch = DispatchLoaderStatic> + typename ResultValueType>::type allocateDescriptorSets( const DescriptorSetAllocateInfo & allocateInfo, Dispatch const &d = Dispatch() ) const; + template , typename Dispatch = DispatchLoaderStatic> + typename ResultValueType>::type allocateDescriptorSets( const DescriptorSetAllocateInfo & allocateInfo, Allocator const& vectorAllocator, Dispatch const &d ) const; +#ifndef VULKAN_HPP_NO_SMART_HANDLE + template , typename Dispatch = DispatchLoaderStatic> + typename ResultValueType,Allocator>>::type allocateDescriptorSetsUnique( const DescriptorSetAllocateInfo & allocateInfo, Dispatch const &d = Dispatch() ) const; + template , typename Dispatch = DispatchLoaderStatic> + typename ResultValueType,Allocator>>::type allocateDescriptorSetsUnique( const DescriptorSetAllocateInfo & allocateInfo, Allocator const& vectorAllocator, Dispatch const &d ) const; +#endif /*VULKAN_HPP_NO_SMART_HANDLE*/ +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + Result freeDescriptorSets( DescriptorPool descriptorPool, uint32_t descriptorSetCount, const DescriptorSet* pDescriptorSets, Dispatch const &d = Dispatch() ) const; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + ResultValueType::type freeDescriptorSets( DescriptorPool descriptorPool, ArrayProxy descriptorSets, Dispatch const &d = Dispatch() ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + Result free( DescriptorPool descriptorPool, uint32_t descriptorSetCount, const DescriptorSet* pDescriptorSets, Dispatch const &d = Dispatch() ) const; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + ResultValueType::type free( DescriptorPool descriptorPool, ArrayProxy descriptorSets, Dispatch const &d = Dispatch() ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void updateDescriptorSets( uint32_t descriptorWriteCount, const WriteDescriptorSet* pDescriptorWrites, uint32_t descriptorCopyCount, const CopyDescriptorSet* pDescriptorCopies, Dispatch const &d = Dispatch() ) const; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void updateDescriptorSets( ArrayProxy descriptorWrites, ArrayProxy descriptorCopies, Dispatch const &d = Dispatch() ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + Result createFramebuffer( const FramebufferCreateInfo* pCreateInfo, const AllocationCallbacks* pAllocator, Framebuffer* pFramebuffer, Dispatch const &d = Dispatch() ) const; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + ResultValueType::type createFramebuffer( const FramebufferCreateInfo & createInfo, Optional allocator = nullptr, Dispatch const &d = Dispatch() ) const; +#ifndef VULKAN_HPP_NO_SMART_HANDLE + template + typename ResultValueType>::type createFramebufferUnique( const FramebufferCreateInfo & createInfo, Optional allocator = nullptr, Dispatch const &d = Dispatch() ) const; +#endif /*VULKAN_HPP_NO_SMART_HANDLE*/ +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void destroyFramebuffer( Framebuffer framebuffer, const AllocationCallbacks* pAllocator, Dispatch const &d = Dispatch() ) const; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void destroyFramebuffer( Framebuffer framebuffer, Optional allocator = nullptr, Dispatch const &d = Dispatch() ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void destroy( Framebuffer framebuffer, const AllocationCallbacks* pAllocator, Dispatch const &d = Dispatch() ) const; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void destroy( Framebuffer framebuffer, Optional allocator = nullptr, Dispatch const &d = Dispatch() ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + Result createRenderPass( const RenderPassCreateInfo* pCreateInfo, const AllocationCallbacks* pAllocator, RenderPass* pRenderPass, Dispatch const &d = Dispatch() ) const; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + ResultValueType::type createRenderPass( const RenderPassCreateInfo & createInfo, Optional allocator = nullptr, Dispatch const &d = Dispatch() ) const; +#ifndef VULKAN_HPP_NO_SMART_HANDLE + template + typename ResultValueType>::type createRenderPassUnique( const RenderPassCreateInfo & createInfo, Optional allocator = nullptr, Dispatch const &d = Dispatch() ) const; +#endif /*VULKAN_HPP_NO_SMART_HANDLE*/ +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void destroyRenderPass( RenderPass renderPass, const AllocationCallbacks* pAllocator, Dispatch const &d = Dispatch() ) const; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void destroyRenderPass( RenderPass renderPass, Optional allocator = nullptr, Dispatch const &d = Dispatch() ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void destroy( RenderPass renderPass, const AllocationCallbacks* pAllocator, Dispatch const &d = Dispatch() ) const; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void destroy( RenderPass renderPass, Optional allocator = nullptr, Dispatch const &d = Dispatch() ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void getRenderAreaGranularity( RenderPass renderPass, Extent2D* pGranularity, Dispatch const &d = Dispatch() ) const; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + Extent2D getRenderAreaGranularity( RenderPass renderPass, Dispatch const &d = Dispatch() ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + Result createCommandPool( const CommandPoolCreateInfo* pCreateInfo, const AllocationCallbacks* pAllocator, CommandPool* pCommandPool, Dispatch const &d = Dispatch() ) const; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + ResultValueType::type createCommandPool( const CommandPoolCreateInfo & createInfo, Optional allocator = nullptr, Dispatch const &d = Dispatch() ) const; +#ifndef VULKAN_HPP_NO_SMART_HANDLE + template + typename ResultValueType>::type createCommandPoolUnique( const CommandPoolCreateInfo & createInfo, Optional allocator = nullptr, Dispatch const &d = Dispatch() ) const; +#endif /*VULKAN_HPP_NO_SMART_HANDLE*/ +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void destroyCommandPool( CommandPool commandPool, const AllocationCallbacks* pAllocator, Dispatch const &d = Dispatch() ) const; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void destroyCommandPool( CommandPool commandPool, Optional allocator = nullptr, Dispatch const &d = Dispatch() ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void destroy( CommandPool commandPool, const AllocationCallbacks* pAllocator, Dispatch const &d = Dispatch() ) const; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void destroy( CommandPool commandPool, Optional allocator = nullptr, Dispatch const &d = Dispatch() ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + +#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + Result resetCommandPool( CommandPool commandPool, CommandPoolResetFlags flags, Dispatch const &d = Dispatch() ) const; +#else + template + ResultValueType::type resetCommandPool( CommandPool commandPool, CommandPoolResetFlags flags, Dispatch const &d = Dispatch() ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + Result allocateCommandBuffers( const CommandBufferAllocateInfo* pAllocateInfo, CommandBuffer* pCommandBuffers, Dispatch const &d = Dispatch() ) const; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template , typename Dispatch = DispatchLoaderStatic> + typename ResultValueType>::type allocateCommandBuffers( const CommandBufferAllocateInfo & allocateInfo, Dispatch const &d = Dispatch() ) const; + template , typename Dispatch = DispatchLoaderStatic> + typename ResultValueType>::type allocateCommandBuffers( const CommandBufferAllocateInfo & allocateInfo, Allocator const& vectorAllocator, Dispatch const &d ) const; +#ifndef VULKAN_HPP_NO_SMART_HANDLE + template , typename Dispatch = DispatchLoaderStatic> + typename ResultValueType,Allocator>>::type allocateCommandBuffersUnique( const CommandBufferAllocateInfo & allocateInfo, Dispatch const &d = Dispatch() ) const; + template , typename Dispatch = DispatchLoaderStatic> + typename ResultValueType,Allocator>>::type allocateCommandBuffersUnique( const CommandBufferAllocateInfo & allocateInfo, Allocator const& vectorAllocator, Dispatch const &d ) const; +#endif /*VULKAN_HPP_NO_SMART_HANDLE*/ +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void freeCommandBuffers( CommandPool commandPool, uint32_t commandBufferCount, const CommandBuffer* pCommandBuffers, Dispatch const &d = Dispatch() ) const; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void freeCommandBuffers( CommandPool commandPool, ArrayProxy commandBuffers, Dispatch const &d = Dispatch() ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void free( CommandPool commandPool, uint32_t commandBufferCount, const CommandBuffer* pCommandBuffers, Dispatch const &d = Dispatch() ) const; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void free( CommandPool commandPool, ArrayProxy commandBuffers, Dispatch const &d = Dispatch() ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + Result createSharedSwapchainsKHR( uint32_t swapchainCount, const SwapchainCreateInfoKHR* pCreateInfos, const AllocationCallbacks* pAllocator, SwapchainKHR* pSwapchains, Dispatch const &d = Dispatch() ) const; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template , typename Dispatch = DispatchLoaderStatic> + typename ResultValueType>::type createSharedSwapchainsKHR( ArrayProxy createInfos, Optional allocator = nullptr, Dispatch const &d = Dispatch() ) const; + template , typename Dispatch = DispatchLoaderStatic> + typename ResultValueType>::type createSharedSwapchainsKHR( ArrayProxy createInfos, Optional allocator, Allocator const& vectorAllocator, Dispatch const &d ) const; + template + ResultValueType::type createSharedSwapchainKHR( const SwapchainCreateInfoKHR & createInfo, Optional allocator = nullptr, Dispatch const &d = Dispatch() ) const; +#ifndef VULKAN_HPP_NO_SMART_HANDLE + template , typename Dispatch = DispatchLoaderStatic> + typename ResultValueType,Allocator>>::type createSharedSwapchainsKHRUnique( ArrayProxy createInfos, Optional allocator = nullptr, Dispatch const &d = Dispatch() ) const; + template , typename Dispatch = DispatchLoaderStatic> + typename ResultValueType,Allocator>>::type createSharedSwapchainsKHRUnique( ArrayProxy createInfos, Optional allocator, Allocator const& vectorAllocator, Dispatch const &d ) const; + template + typename ResultValueType>::type createSharedSwapchainKHRUnique( const SwapchainCreateInfoKHR & createInfo, Optional allocator = nullptr, Dispatch const &d = Dispatch() ) const; +#endif /*VULKAN_HPP_NO_SMART_HANDLE*/ +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + Result createSwapchainKHR( const SwapchainCreateInfoKHR* pCreateInfo, const AllocationCallbacks* pAllocator, SwapchainKHR* pSwapchain, Dispatch const &d = Dispatch() ) const; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + ResultValueType::type createSwapchainKHR( const SwapchainCreateInfoKHR & createInfo, Optional allocator = nullptr, Dispatch const &d = Dispatch() ) const; +#ifndef VULKAN_HPP_NO_SMART_HANDLE + template + typename ResultValueType>::type createSwapchainKHRUnique( const SwapchainCreateInfoKHR & createInfo, Optional allocator = nullptr, Dispatch const &d = Dispatch() ) const; +#endif /*VULKAN_HPP_NO_SMART_HANDLE*/ +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void destroySwapchainKHR( SwapchainKHR swapchain, const AllocationCallbacks* pAllocator, Dispatch const &d = Dispatch() ) const; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void destroySwapchainKHR( SwapchainKHR swapchain, Optional allocator = nullptr, Dispatch const &d = Dispatch() ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void destroy( SwapchainKHR swapchain, const AllocationCallbacks* pAllocator, Dispatch const &d = Dispatch() ) const; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void destroy( SwapchainKHR swapchain, Optional allocator = nullptr, Dispatch const &d = Dispatch() ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + Result getSwapchainImagesKHR( SwapchainKHR swapchain, uint32_t* pSwapchainImageCount, Image* pSwapchainImages, Dispatch const &d = Dispatch() ) const; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template , typename Dispatch = DispatchLoaderStatic> + typename ResultValueType>::type getSwapchainImagesKHR( SwapchainKHR swapchain, Dispatch const &d = Dispatch() ) const; + template , typename Dispatch = DispatchLoaderStatic> + typename ResultValueType>::type getSwapchainImagesKHR( SwapchainKHR swapchain, Allocator const& vectorAllocator, Dispatch const &d ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + Result acquireNextImageKHR( SwapchainKHR swapchain, uint64_t timeout, Semaphore semaphore, Fence fence, uint32_t* pImageIndex, Dispatch const &d = Dispatch() ) const; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + ResultValue acquireNextImageKHR( SwapchainKHR swapchain, uint64_t timeout, Semaphore semaphore, Fence fence, Dispatch const &d = Dispatch() ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + Result debugMarkerSetObjectNameEXT( const DebugMarkerObjectNameInfoEXT* pNameInfo, Dispatch const &d = Dispatch() ) const; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + ResultValueType::type debugMarkerSetObjectNameEXT( const DebugMarkerObjectNameInfoEXT & nameInfo, Dispatch const &d = Dispatch() ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + Result debugMarkerSetObjectTagEXT( const DebugMarkerObjectTagInfoEXT* pTagInfo, Dispatch const &d = Dispatch() ) const; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + ResultValueType::type debugMarkerSetObjectTagEXT( const DebugMarkerObjectTagInfoEXT & tagInfo, Dispatch const &d = Dispatch() ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + +#ifdef VK_USE_PLATFORM_WIN32_NV + template + Result getMemoryWin32HandleNV( DeviceMemory memory, ExternalMemoryHandleTypeFlagsNV handleType, HANDLE* pHandle, Dispatch const &d = Dispatch() ) const; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + ResultValueType::type getMemoryWin32HandleNV( DeviceMemory memory, ExternalMemoryHandleTypeFlagsNV handleType, Dispatch const &d = Dispatch() ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VK_USE_PLATFORM_WIN32_NV*/ + + template + Result createIndirectCommandsLayoutNVX( const IndirectCommandsLayoutCreateInfoNVX* pCreateInfo, const AllocationCallbacks* pAllocator, IndirectCommandsLayoutNVX* pIndirectCommandsLayout, Dispatch const &d = Dispatch() ) const; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + ResultValueType::type createIndirectCommandsLayoutNVX( const IndirectCommandsLayoutCreateInfoNVX & createInfo, Optional allocator = nullptr, Dispatch const &d = Dispatch() ) const; +#ifndef VULKAN_HPP_NO_SMART_HANDLE + template + typename ResultValueType>::type createIndirectCommandsLayoutNVXUnique( const IndirectCommandsLayoutCreateInfoNVX & createInfo, Optional allocator = nullptr, Dispatch const &d = Dispatch() ) const; +#endif /*VULKAN_HPP_NO_SMART_HANDLE*/ +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void destroyIndirectCommandsLayoutNVX( IndirectCommandsLayoutNVX indirectCommandsLayout, const AllocationCallbacks* pAllocator, Dispatch const &d = Dispatch() ) const; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void destroyIndirectCommandsLayoutNVX( IndirectCommandsLayoutNVX indirectCommandsLayout, Optional allocator = nullptr, Dispatch const &d = Dispatch() ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void destroy( IndirectCommandsLayoutNVX indirectCommandsLayout, const AllocationCallbacks* pAllocator, Dispatch const &d = Dispatch() ) const; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void destroy( IndirectCommandsLayoutNVX indirectCommandsLayout, Optional allocator = nullptr, Dispatch const &d = Dispatch() ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + Result createObjectTableNVX( const ObjectTableCreateInfoNVX* pCreateInfo, const AllocationCallbacks* pAllocator, ObjectTableNVX* pObjectTable, Dispatch const &d = Dispatch() ) const; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + ResultValueType::type createObjectTableNVX( const ObjectTableCreateInfoNVX & createInfo, Optional allocator = nullptr, Dispatch const &d = Dispatch() ) const; +#ifndef VULKAN_HPP_NO_SMART_HANDLE + template + typename ResultValueType>::type createObjectTableNVXUnique( const ObjectTableCreateInfoNVX & createInfo, Optional allocator = nullptr, Dispatch const &d = Dispatch() ) const; +#endif /*VULKAN_HPP_NO_SMART_HANDLE*/ +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void destroyObjectTableNVX( ObjectTableNVX objectTable, const AllocationCallbacks* pAllocator, Dispatch const &d = Dispatch() ) const; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void destroyObjectTableNVX( ObjectTableNVX objectTable, Optional allocator = nullptr, Dispatch const &d = Dispatch() ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void destroy( ObjectTableNVX objectTable, const AllocationCallbacks* pAllocator, Dispatch const &d = Dispatch() ) const; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void destroy( ObjectTableNVX objectTable, Optional allocator = nullptr, Dispatch const &d = Dispatch() ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + Result registerObjectsNVX( ObjectTableNVX objectTable, uint32_t objectCount, const ObjectTableEntryNVX* const* ppObjectTableEntries, const uint32_t* pObjectIndices, Dispatch const &d = Dispatch() ) const; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + ResultValueType::type registerObjectsNVX( ObjectTableNVX objectTable, ArrayProxy pObjectTableEntries, ArrayProxy objectIndices, Dispatch const &d = Dispatch() ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + Result unregisterObjectsNVX( ObjectTableNVX objectTable, uint32_t objectCount, const ObjectEntryTypeNVX* pObjectEntryTypes, const uint32_t* pObjectIndices, Dispatch const &d = Dispatch() ) const; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + ResultValueType::type unregisterObjectsNVX( ObjectTableNVX objectTable, ArrayProxy objectEntryTypes, ArrayProxy objectIndices, Dispatch const &d = Dispatch() ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void trimCommandPool( CommandPool commandPool, CommandPoolTrimFlags flags = CommandPoolTrimFlags(), Dispatch const &d = Dispatch() ) const; + + template + void trimCommandPoolKHR( CommandPool commandPool, CommandPoolTrimFlags flags = CommandPoolTrimFlags(), Dispatch const &d = Dispatch() ) const; + +#ifdef VK_USE_PLATFORM_WIN32_KHR + template + Result getMemoryWin32HandleKHR( const MemoryGetWin32HandleInfoKHR* pGetWin32HandleInfo, HANDLE* pHandle, Dispatch const &d = Dispatch() ) const; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + ResultValueType::type getMemoryWin32HandleKHR( const MemoryGetWin32HandleInfoKHR & getWin32HandleInfo, Dispatch const &d = Dispatch() ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VK_USE_PLATFORM_WIN32_KHR*/ + +#ifdef VK_USE_PLATFORM_WIN32_KHR + template + Result getMemoryWin32HandlePropertiesKHR( ExternalMemoryHandleTypeFlagBits handleType, HANDLE handle, MemoryWin32HandlePropertiesKHR* pMemoryWin32HandleProperties, Dispatch const &d = Dispatch() ) const; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + ResultValueType::type getMemoryWin32HandlePropertiesKHR( ExternalMemoryHandleTypeFlagBits handleType, HANDLE handle, Dispatch const &d = Dispatch() ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VK_USE_PLATFORM_WIN32_KHR*/ + + template + Result getMemoryFdKHR( const MemoryGetFdInfoKHR* pGetFdInfo, int* pFd, Dispatch const &d = Dispatch() ) const; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + ResultValueType::type getMemoryFdKHR( const MemoryGetFdInfoKHR & getFdInfo, Dispatch const &d = Dispatch() ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + Result getMemoryFdPropertiesKHR( ExternalMemoryHandleTypeFlagBits handleType, int fd, MemoryFdPropertiesKHR* pMemoryFdProperties, Dispatch const &d = Dispatch() ) const; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + ResultValueType::type getMemoryFdPropertiesKHR( ExternalMemoryHandleTypeFlagBits handleType, int fd, Dispatch const &d = Dispatch() ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + +#ifdef VK_USE_PLATFORM_WIN32_KHR + template + Result getSemaphoreWin32HandleKHR( const SemaphoreGetWin32HandleInfoKHR* pGetWin32HandleInfo, HANDLE* pHandle, Dispatch const &d = Dispatch() ) const; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + ResultValueType::type getSemaphoreWin32HandleKHR( const SemaphoreGetWin32HandleInfoKHR & getWin32HandleInfo, Dispatch const &d = Dispatch() ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VK_USE_PLATFORM_WIN32_KHR*/ + +#ifdef VK_USE_PLATFORM_WIN32_KHR + template + Result importSemaphoreWin32HandleKHR( const ImportSemaphoreWin32HandleInfoKHR* pImportSemaphoreWin32HandleInfo, Dispatch const &d = Dispatch() ) const; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + ResultValueType::type importSemaphoreWin32HandleKHR( const ImportSemaphoreWin32HandleInfoKHR & importSemaphoreWin32HandleInfo, Dispatch const &d = Dispatch() ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VK_USE_PLATFORM_WIN32_KHR*/ + + template + Result getSemaphoreFdKHR( const SemaphoreGetFdInfoKHR* pGetFdInfo, int* pFd, Dispatch const &d = Dispatch() ) const; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + ResultValueType::type getSemaphoreFdKHR( const SemaphoreGetFdInfoKHR & getFdInfo, Dispatch const &d = Dispatch() ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + Result importSemaphoreFdKHR( const ImportSemaphoreFdInfoKHR* pImportSemaphoreFdInfo, Dispatch const &d = Dispatch() ) const; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + ResultValueType::type importSemaphoreFdKHR( const ImportSemaphoreFdInfoKHR & importSemaphoreFdInfo, Dispatch const &d = Dispatch() ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + +#ifdef VK_USE_PLATFORM_WIN32_KHR + template + Result getFenceWin32HandleKHR( const FenceGetWin32HandleInfoKHR* pGetWin32HandleInfo, HANDLE* pHandle, Dispatch const &d = Dispatch() ) const; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + ResultValueType::type getFenceWin32HandleKHR( const FenceGetWin32HandleInfoKHR & getWin32HandleInfo, Dispatch const &d = Dispatch() ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VK_USE_PLATFORM_WIN32_KHR*/ + +#ifdef VK_USE_PLATFORM_WIN32_KHR + template + Result importFenceWin32HandleKHR( const ImportFenceWin32HandleInfoKHR* pImportFenceWin32HandleInfo, Dispatch const &d = Dispatch() ) const; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + ResultValueType::type importFenceWin32HandleKHR( const ImportFenceWin32HandleInfoKHR & importFenceWin32HandleInfo, Dispatch const &d = Dispatch() ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VK_USE_PLATFORM_WIN32_KHR*/ + + template + Result getFenceFdKHR( const FenceGetFdInfoKHR* pGetFdInfo, int* pFd, Dispatch const &d = Dispatch() ) const; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + ResultValueType::type getFenceFdKHR( const FenceGetFdInfoKHR & getFdInfo, Dispatch const &d = Dispatch() ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + Result importFenceFdKHR( const ImportFenceFdInfoKHR* pImportFenceFdInfo, Dispatch const &d = Dispatch() ) const; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + ResultValueType::type importFenceFdKHR( const ImportFenceFdInfoKHR & importFenceFdInfo, Dispatch const &d = Dispatch() ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + Result displayPowerControlEXT( DisplayKHR display, const DisplayPowerInfoEXT* pDisplayPowerInfo, Dispatch const &d = Dispatch() ) const; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + ResultValueType::type displayPowerControlEXT( DisplayKHR display, const DisplayPowerInfoEXT & displayPowerInfo, Dispatch const &d = Dispatch() ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + Result registerEventEXT( const DeviceEventInfoEXT* pDeviceEventInfo, const AllocationCallbacks* pAllocator, Fence* pFence, Dispatch const &d = Dispatch() ) const; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + ResultValueType::type registerEventEXT( const DeviceEventInfoEXT & deviceEventInfo, Optional allocator = nullptr, Dispatch const &d = Dispatch() ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + Result registerDisplayEventEXT( DisplayKHR display, const DisplayEventInfoEXT* pDisplayEventInfo, const AllocationCallbacks* pAllocator, Fence* pFence, Dispatch const &d = Dispatch() ) const; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + ResultValueType::type registerDisplayEventEXT( DisplayKHR display, const DisplayEventInfoEXT & displayEventInfo, Optional allocator = nullptr, Dispatch const &d = Dispatch() ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + Result getSwapchainCounterEXT( SwapchainKHR swapchain, SurfaceCounterFlagBitsEXT counter, uint64_t* pCounterValue, Dispatch const &d = Dispatch() ) const; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + ResultValueType::type getSwapchainCounterEXT( SwapchainKHR swapchain, SurfaceCounterFlagBitsEXT counter, Dispatch const &d = Dispatch() ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void getGroupPeerMemoryFeatures( uint32_t heapIndex, uint32_t localDeviceIndex, uint32_t remoteDeviceIndex, PeerMemoryFeatureFlags* pPeerMemoryFeatures, Dispatch const &d = Dispatch() ) const; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + PeerMemoryFeatureFlags getGroupPeerMemoryFeatures( uint32_t heapIndex, uint32_t localDeviceIndex, uint32_t remoteDeviceIndex, Dispatch const &d = Dispatch() ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void getGroupPeerMemoryFeaturesKHR( uint32_t heapIndex, uint32_t localDeviceIndex, uint32_t remoteDeviceIndex, PeerMemoryFeatureFlags* pPeerMemoryFeatures, Dispatch const &d = Dispatch() ) const; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + PeerMemoryFeatureFlags getGroupPeerMemoryFeaturesKHR( uint32_t heapIndex, uint32_t localDeviceIndex, uint32_t remoteDeviceIndex, Dispatch const &d = Dispatch() ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + Result bindBufferMemory2( uint32_t bindInfoCount, const BindBufferMemoryInfo* pBindInfos, Dispatch const &d = Dispatch() ) const; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + ResultValueType::type bindBufferMemory2( ArrayProxy bindInfos, Dispatch const &d = Dispatch() ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + Result bindBufferMemory2KHR( uint32_t bindInfoCount, const BindBufferMemoryInfo* pBindInfos, Dispatch const &d = Dispatch() ) const; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + ResultValueType::type bindBufferMemory2KHR( ArrayProxy bindInfos, Dispatch const &d = Dispatch() ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + Result bindImageMemory2( uint32_t bindInfoCount, const BindImageMemoryInfo* pBindInfos, Dispatch const &d = Dispatch() ) const; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + ResultValueType::type bindImageMemory2( ArrayProxy bindInfos, Dispatch const &d = Dispatch() ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + Result bindImageMemory2KHR( uint32_t bindInfoCount, const BindImageMemoryInfo* pBindInfos, Dispatch const &d = Dispatch() ) const; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + ResultValueType::type bindImageMemory2KHR( ArrayProxy bindInfos, Dispatch const &d = Dispatch() ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + Result getGroupPresentCapabilitiesKHR( DeviceGroupPresentCapabilitiesKHR* pDeviceGroupPresentCapabilities, Dispatch const &d = Dispatch() ) const; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + ResultValueType::type getGroupPresentCapabilitiesKHR(Dispatch const &d = Dispatch() ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + Result getGroupSurfacePresentModesKHR( SurfaceKHR surface, DeviceGroupPresentModeFlagsKHR* pModes, Dispatch const &d = Dispatch() ) const; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + ResultValueType::type getGroupSurfacePresentModesKHR( SurfaceKHR surface, Dispatch const &d = Dispatch() ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + Result acquireNextImage2KHR( const AcquireNextImageInfoKHR* pAcquireInfo, uint32_t* pImageIndex, Dispatch const &d = Dispatch() ) const; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + ResultValue acquireNextImage2KHR( const AcquireNextImageInfoKHR & acquireInfo, Dispatch const &d = Dispatch() ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + Result createDescriptorUpdateTemplate( const DescriptorUpdateTemplateCreateInfo* pCreateInfo, const AllocationCallbacks* pAllocator, DescriptorUpdateTemplate* pDescriptorUpdateTemplate, Dispatch const &d = Dispatch() ) const; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + ResultValueType::type createDescriptorUpdateTemplate( const DescriptorUpdateTemplateCreateInfo & createInfo, Optional allocator = nullptr, Dispatch const &d = Dispatch() ) const; +#ifndef VULKAN_HPP_NO_SMART_HANDLE + template + typename ResultValueType>::type createDescriptorUpdateTemplateUnique( const DescriptorUpdateTemplateCreateInfo & createInfo, Optional allocator = nullptr, Dispatch const &d = Dispatch() ) const; +#endif /*VULKAN_HPP_NO_SMART_HANDLE*/ +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + Result createDescriptorUpdateTemplateKHR( const DescriptorUpdateTemplateCreateInfo* pCreateInfo, const AllocationCallbacks* pAllocator, DescriptorUpdateTemplate* pDescriptorUpdateTemplate, Dispatch const &d = Dispatch() ) const; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + ResultValueType::type createDescriptorUpdateTemplateKHR( const DescriptorUpdateTemplateCreateInfo & createInfo, Optional allocator = nullptr, Dispatch const &d = Dispatch() ) const; +#ifndef VULKAN_HPP_NO_SMART_HANDLE + template + typename ResultValueType>::type createDescriptorUpdateTemplateKHRUnique( const DescriptorUpdateTemplateCreateInfo & createInfo, Optional allocator = nullptr, Dispatch const &d = Dispatch() ) const; +#endif /*VULKAN_HPP_NO_SMART_HANDLE*/ +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void destroyDescriptorUpdateTemplate( DescriptorUpdateTemplate descriptorUpdateTemplate, const AllocationCallbacks* pAllocator, Dispatch const &d = Dispatch() ) const; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void destroyDescriptorUpdateTemplate( DescriptorUpdateTemplate descriptorUpdateTemplate, Optional allocator = nullptr, Dispatch const &d = Dispatch() ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void destroy( DescriptorUpdateTemplate descriptorUpdateTemplate, const AllocationCallbacks* pAllocator, Dispatch const &d = Dispatch() ) const; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void destroy( DescriptorUpdateTemplate descriptorUpdateTemplate, Optional allocator = nullptr, Dispatch const &d = Dispatch() ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void destroyDescriptorUpdateTemplateKHR( DescriptorUpdateTemplate descriptorUpdateTemplate, const AllocationCallbacks* pAllocator, Dispatch const &d = Dispatch() ) const; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void destroyDescriptorUpdateTemplateKHR( DescriptorUpdateTemplate descriptorUpdateTemplate, Optional allocator = nullptr, Dispatch const &d = Dispatch() ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void updateDescriptorSetWithTemplate( DescriptorSet descriptorSet, DescriptorUpdateTemplate descriptorUpdateTemplate, const void* pData, Dispatch const &d = Dispatch() ) const; + + template + void updateDescriptorSetWithTemplateKHR( DescriptorSet descriptorSet, DescriptorUpdateTemplate descriptorUpdateTemplate, const void* pData, Dispatch const &d = Dispatch() ) const; + + template + void setHdrMetadataEXT( uint32_t swapchainCount, const SwapchainKHR* pSwapchains, const HdrMetadataEXT* pMetadata, Dispatch const &d = Dispatch() ) const; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void setHdrMetadataEXT( ArrayProxy swapchains, ArrayProxy metadata, Dispatch const &d = Dispatch() ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + Result getSwapchainStatusKHR( SwapchainKHR swapchain, Dispatch const &d = Dispatch() ) const; + + template + Result getRefreshCycleDurationGOOGLE( SwapchainKHR swapchain, RefreshCycleDurationGOOGLE* pDisplayTimingProperties, Dispatch const &d = Dispatch() ) const; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + ResultValueType::type getRefreshCycleDurationGOOGLE( SwapchainKHR swapchain, Dispatch const &d = Dispatch() ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + Result getPastPresentationTimingGOOGLE( SwapchainKHR swapchain, uint32_t* pPresentationTimingCount, PastPresentationTimingGOOGLE* pPresentationTimings, Dispatch const &d = Dispatch() ) const; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template , typename Dispatch = DispatchLoaderStatic> + typename ResultValueType>::type getPastPresentationTimingGOOGLE( SwapchainKHR swapchain, Dispatch const &d = Dispatch() ) const; + template , typename Dispatch = DispatchLoaderStatic> + typename ResultValueType>::type getPastPresentationTimingGOOGLE( SwapchainKHR swapchain, Allocator const& vectorAllocator, Dispatch const &d ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void getBufferMemoryRequirements2( const BufferMemoryRequirementsInfo2* pInfo, MemoryRequirements2* pMemoryRequirements, Dispatch const &d = Dispatch() ) const; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + MemoryRequirements2 getBufferMemoryRequirements2( const BufferMemoryRequirementsInfo2 & info, Dispatch const &d = Dispatch() ) const; + template + StructureChain getBufferMemoryRequirements2( const BufferMemoryRequirementsInfo2 & info, Dispatch const &d = Dispatch() ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void getBufferMemoryRequirements2KHR( const BufferMemoryRequirementsInfo2* pInfo, MemoryRequirements2* pMemoryRequirements, Dispatch const &d = Dispatch() ) const; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + MemoryRequirements2 getBufferMemoryRequirements2KHR( const BufferMemoryRequirementsInfo2 & info, Dispatch const &d = Dispatch() ) const; + template + StructureChain getBufferMemoryRequirements2KHR( const BufferMemoryRequirementsInfo2 & info, Dispatch const &d = Dispatch() ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void getImageMemoryRequirements2( const ImageMemoryRequirementsInfo2* pInfo, MemoryRequirements2* pMemoryRequirements, Dispatch const &d = Dispatch() ) const; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + MemoryRequirements2 getImageMemoryRequirements2( const ImageMemoryRequirementsInfo2 & info, Dispatch const &d = Dispatch() ) const; + template + StructureChain getImageMemoryRequirements2( const ImageMemoryRequirementsInfo2 & info, Dispatch const &d = Dispatch() ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void getImageMemoryRequirements2KHR( const ImageMemoryRequirementsInfo2* pInfo, MemoryRequirements2* pMemoryRequirements, Dispatch const &d = Dispatch() ) const; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + MemoryRequirements2 getImageMemoryRequirements2KHR( const ImageMemoryRequirementsInfo2 & info, Dispatch const &d = Dispatch() ) const; + template + StructureChain getImageMemoryRequirements2KHR( const ImageMemoryRequirementsInfo2 & info, Dispatch const &d = Dispatch() ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void getImageSparseMemoryRequirements2( const ImageSparseMemoryRequirementsInfo2* pInfo, uint32_t* pSparseMemoryRequirementCount, SparseImageMemoryRequirements2* pSparseMemoryRequirements, Dispatch const &d = Dispatch() ) const; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template , typename Dispatch = DispatchLoaderStatic> + std::vector getImageSparseMemoryRequirements2( const ImageSparseMemoryRequirementsInfo2 & info, Dispatch const &d = Dispatch() ) const; + template , typename Dispatch = DispatchLoaderStatic> + std::vector getImageSparseMemoryRequirements2( const ImageSparseMemoryRequirementsInfo2 & info, Allocator const& vectorAllocator, Dispatch const &d ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void getImageSparseMemoryRequirements2KHR( const ImageSparseMemoryRequirementsInfo2* pInfo, uint32_t* pSparseMemoryRequirementCount, SparseImageMemoryRequirements2* pSparseMemoryRequirements, Dispatch const &d = Dispatch() ) const; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template , typename Dispatch = DispatchLoaderStatic> + std::vector getImageSparseMemoryRequirements2KHR( const ImageSparseMemoryRequirementsInfo2 & info, Dispatch const &d = Dispatch() ) const; + template , typename Dispatch = DispatchLoaderStatic> + std::vector getImageSparseMemoryRequirements2KHR( const ImageSparseMemoryRequirementsInfo2 & info, Allocator const& vectorAllocator, Dispatch const &d ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + Result createSamplerYcbcrConversion( const SamplerYcbcrConversionCreateInfo* pCreateInfo, const AllocationCallbacks* pAllocator, SamplerYcbcrConversion* pYcbcrConversion, Dispatch const &d = Dispatch() ) const; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + ResultValueType::type createSamplerYcbcrConversion( const SamplerYcbcrConversionCreateInfo & createInfo, Optional allocator = nullptr, Dispatch const &d = Dispatch() ) const; +#ifndef VULKAN_HPP_NO_SMART_HANDLE + template + typename ResultValueType>::type createSamplerYcbcrConversionUnique( const SamplerYcbcrConversionCreateInfo & createInfo, Optional allocator = nullptr, Dispatch const &d = Dispatch() ) const; +#endif /*VULKAN_HPP_NO_SMART_HANDLE*/ +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + Result createSamplerYcbcrConversionKHR( const SamplerYcbcrConversionCreateInfo* pCreateInfo, const AllocationCallbacks* pAllocator, SamplerYcbcrConversion* pYcbcrConversion, Dispatch const &d = Dispatch() ) const; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + ResultValueType::type createSamplerYcbcrConversionKHR( const SamplerYcbcrConversionCreateInfo & createInfo, Optional allocator = nullptr, Dispatch const &d = Dispatch() ) const; +#ifndef VULKAN_HPP_NO_SMART_HANDLE + template + typename ResultValueType>::type createSamplerYcbcrConversionKHRUnique( const SamplerYcbcrConversionCreateInfo & createInfo, Optional allocator = nullptr, Dispatch const &d = Dispatch() ) const; +#endif /*VULKAN_HPP_NO_SMART_HANDLE*/ +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void destroySamplerYcbcrConversion( SamplerYcbcrConversion ycbcrConversion, const AllocationCallbacks* pAllocator, Dispatch const &d = Dispatch() ) const; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void destroySamplerYcbcrConversion( SamplerYcbcrConversion ycbcrConversion, Optional allocator = nullptr, Dispatch const &d = Dispatch() ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void destroy( SamplerYcbcrConversion ycbcrConversion, const AllocationCallbacks* pAllocator, Dispatch const &d = Dispatch() ) const; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void destroy( SamplerYcbcrConversion ycbcrConversion, Optional allocator = nullptr, Dispatch const &d = Dispatch() ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void destroySamplerYcbcrConversionKHR( SamplerYcbcrConversion ycbcrConversion, const AllocationCallbacks* pAllocator, Dispatch const &d = Dispatch() ) const; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void destroySamplerYcbcrConversionKHR( SamplerYcbcrConversion ycbcrConversion, Optional allocator = nullptr, Dispatch const &d = Dispatch() ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void getQueue2( const DeviceQueueInfo2* pQueueInfo, Queue* pQueue, Dispatch const &d = Dispatch() ) const; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + Queue getQueue2( const DeviceQueueInfo2 & queueInfo, Dispatch const &d = Dispatch() ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + Result createValidationCacheEXT( const ValidationCacheCreateInfoEXT* pCreateInfo, const AllocationCallbacks* pAllocator, ValidationCacheEXT* pValidationCache, Dispatch const &d = Dispatch() ) const; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + ResultValueType::type createValidationCacheEXT( const ValidationCacheCreateInfoEXT & createInfo, Optional allocator = nullptr, Dispatch const &d = Dispatch() ) const; +#ifndef VULKAN_HPP_NO_SMART_HANDLE + template + typename ResultValueType>::type createValidationCacheEXTUnique( const ValidationCacheCreateInfoEXT & createInfo, Optional allocator = nullptr, Dispatch const &d = Dispatch() ) const; +#endif /*VULKAN_HPP_NO_SMART_HANDLE*/ +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void destroyValidationCacheEXT( ValidationCacheEXT validationCache, const AllocationCallbacks* pAllocator, Dispatch const &d = Dispatch() ) const; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void destroyValidationCacheEXT( ValidationCacheEXT validationCache, Optional allocator = nullptr, Dispatch const &d = Dispatch() ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void destroy( ValidationCacheEXT validationCache, const AllocationCallbacks* pAllocator, Dispatch const &d = Dispatch() ) const; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void destroy( ValidationCacheEXT validationCache, Optional allocator = nullptr, Dispatch const &d = Dispatch() ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + Result getValidationCacheDataEXT( ValidationCacheEXT validationCache, size_t* pDataSize, void* pData, Dispatch const &d = Dispatch() ) const; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template , typename Dispatch = DispatchLoaderStatic> + typename ResultValueType>::type getValidationCacheDataEXT( ValidationCacheEXT validationCache, Dispatch const &d = Dispatch() ) const; + template , typename Dispatch = DispatchLoaderStatic> + typename ResultValueType>::type getValidationCacheDataEXT( ValidationCacheEXT validationCache, Allocator const& vectorAllocator, Dispatch const &d ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + Result mergeValidationCachesEXT( ValidationCacheEXT dstCache, uint32_t srcCacheCount, const ValidationCacheEXT* pSrcCaches, Dispatch const &d = Dispatch() ) const; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + ResultValueType::type mergeValidationCachesEXT( ValidationCacheEXT dstCache, ArrayProxy srcCaches, Dispatch const &d = Dispatch() ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void getDescriptorSetLayoutSupport( const DescriptorSetLayoutCreateInfo* pCreateInfo, DescriptorSetLayoutSupport* pSupport, Dispatch const &d = Dispatch() ) const; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + DescriptorSetLayoutSupport getDescriptorSetLayoutSupport( const DescriptorSetLayoutCreateInfo & createInfo, Dispatch const &d = Dispatch() ) const; + template + StructureChain getDescriptorSetLayoutSupport( const DescriptorSetLayoutCreateInfo & createInfo, Dispatch const &d = Dispatch() ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void getDescriptorSetLayoutSupportKHR( const DescriptorSetLayoutCreateInfo* pCreateInfo, DescriptorSetLayoutSupport* pSupport, Dispatch const &d = Dispatch() ) const; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + DescriptorSetLayoutSupport getDescriptorSetLayoutSupportKHR( const DescriptorSetLayoutCreateInfo & createInfo, Dispatch const &d = Dispatch() ) const; + template + StructureChain getDescriptorSetLayoutSupportKHR( const DescriptorSetLayoutCreateInfo & createInfo, Dispatch const &d = Dispatch() ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + Result getShaderInfoAMD( Pipeline pipeline, ShaderStageFlagBits shaderStage, ShaderInfoTypeAMD infoType, size_t* pInfoSize, void* pInfo, Dispatch const &d = Dispatch() ) const; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template , typename Dispatch = DispatchLoaderStatic> + typename ResultValueType>::type getShaderInfoAMD( Pipeline pipeline, ShaderStageFlagBits shaderStage, ShaderInfoTypeAMD infoType, Dispatch const &d = Dispatch() ) const; + template , typename Dispatch = DispatchLoaderStatic> + typename ResultValueType>::type getShaderInfoAMD( Pipeline pipeline, ShaderStageFlagBits shaderStage, ShaderInfoTypeAMD infoType, Allocator const& vectorAllocator, Dispatch const &d ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + Result getCalibratedTimestampsEXT( uint32_t timestampCount, const CalibratedTimestampInfoEXT* pTimestampInfos, uint64_t* pTimestamps, uint64_t* pMaxDeviation, Dispatch const &d = Dispatch() ) const; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + ResultValueType::type getCalibratedTimestampsEXT( ArrayProxy timestampInfos, ArrayProxy timestamps, Dispatch const &d = Dispatch() ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + Result setDebugUtilsObjectNameEXT( const DebugUtilsObjectNameInfoEXT* pNameInfo, Dispatch const &d = Dispatch() ) const; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + ResultValueType::type setDebugUtilsObjectNameEXT( const DebugUtilsObjectNameInfoEXT & nameInfo, Dispatch const &d = Dispatch() ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + Result setDebugUtilsObjectTagEXT( const DebugUtilsObjectTagInfoEXT* pTagInfo, Dispatch const &d = Dispatch() ) const; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + ResultValueType::type setDebugUtilsObjectTagEXT( const DebugUtilsObjectTagInfoEXT & tagInfo, Dispatch const &d = Dispatch() ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + Result getMemoryHostPointerPropertiesEXT( ExternalMemoryHandleTypeFlagBits handleType, const void* pHostPointer, MemoryHostPointerPropertiesEXT* pMemoryHostPointerProperties, Dispatch const &d = Dispatch() ) const; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + ResultValueType::type getMemoryHostPointerPropertiesEXT( ExternalMemoryHandleTypeFlagBits handleType, const void* pHostPointer, Dispatch const &d = Dispatch() ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + Result createRenderPass2KHR( const RenderPassCreateInfo2KHR* pCreateInfo, const AllocationCallbacks* pAllocator, RenderPass* pRenderPass, Dispatch const &d = Dispatch() ) const; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + ResultValueType::type createRenderPass2KHR( const RenderPassCreateInfo2KHR & createInfo, Optional allocator = nullptr, Dispatch const &d = Dispatch() ) const; +#ifndef VULKAN_HPP_NO_SMART_HANDLE + template + typename ResultValueType>::type createRenderPass2KHRUnique( const RenderPassCreateInfo2KHR & createInfo, Optional allocator = nullptr, Dispatch const &d = Dispatch() ) const; +#endif /*VULKAN_HPP_NO_SMART_HANDLE*/ +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + +#ifdef VK_USE_PLATFORM_ANDROID_ANDROID + template + Result getAndroidHardwareBufferPropertiesANDROID( const struct AHardwareBuffer* buffer, AndroidHardwareBufferPropertiesANDROID* pProperties, Dispatch const &d = Dispatch() ) const; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + ResultValueType::type getAndroidHardwareBufferPropertiesANDROID( const struct AHardwareBuffer & buffer, Dispatch const &d = Dispatch() ) const; + template + typename ResultValueType>::type getAndroidHardwareBufferPropertiesANDROID( const struct AHardwareBuffer & buffer, Dispatch const &d = Dispatch() ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VK_USE_PLATFORM_ANDROID_ANDROID*/ + +#ifdef VK_USE_PLATFORM_ANDROID_ANDROID + template + Result getMemoryAndroidHardwareBufferANDROID( const MemoryGetAndroidHardwareBufferInfoANDROID* pInfo, struct AHardwareBuffer** pBuffer, Dispatch const &d = Dispatch() ) const; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + ResultValueType::type getMemoryAndroidHardwareBufferANDROID( const MemoryGetAndroidHardwareBufferInfoANDROID & info, Dispatch const &d = Dispatch() ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VK_USE_PLATFORM_ANDROID_ANDROID*/ + +#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + Result compileDeferredNV( Pipeline pipeline, uint32_t shader, Dispatch const &d = Dispatch() ) const; +#else + template + ResultValueType::type compileDeferredNV( Pipeline pipeline, uint32_t shader, Dispatch const &d = Dispatch() ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + Result createAccelerationStructureNV( const AccelerationStructureCreateInfoNV* pCreateInfo, const AllocationCallbacks* pAllocator, AccelerationStructureNV* pAccelerationStructure, Dispatch const &d = Dispatch() ) const; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + ResultValueType::type createAccelerationStructureNV( const AccelerationStructureCreateInfoNV & createInfo, Optional allocator = nullptr, Dispatch const &d = Dispatch() ) const; +#ifndef VULKAN_HPP_NO_SMART_HANDLE + template + typename ResultValueType>::type createAccelerationStructureNVUnique( const AccelerationStructureCreateInfoNV & createInfo, Optional allocator = nullptr, Dispatch const &d = Dispatch() ) const; +#endif /*VULKAN_HPP_NO_SMART_HANDLE*/ +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void destroyAccelerationStructureNV( AccelerationStructureNV accelerationStructure, const AllocationCallbacks* pAllocator, Dispatch const &d = Dispatch() ) const; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void destroyAccelerationStructureNV( AccelerationStructureNV accelerationStructure, Optional allocator = nullptr, Dispatch const &d = Dispatch() ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void destroy( AccelerationStructureNV accelerationStructure, const AllocationCallbacks* pAllocator, Dispatch const &d = Dispatch() ) const; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void destroy( AccelerationStructureNV accelerationStructure, Optional allocator = nullptr, Dispatch const &d = Dispatch() ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void getAccelerationStructureMemoryRequirementsNV( const AccelerationStructureMemoryRequirementsInfoNV* pInfo, MemoryRequirements2KHR* pMemoryRequirements, Dispatch const &d = Dispatch() ) const; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + MemoryRequirements2KHR getAccelerationStructureMemoryRequirementsNV( const AccelerationStructureMemoryRequirementsInfoNV & info, Dispatch const &d = Dispatch() ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + Result bindAccelerationStructureMemoryNV( uint32_t bindInfoCount, const BindAccelerationStructureMemoryInfoNV* pBindInfos, Dispatch const &d = Dispatch() ) const; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + ResultValueType::type bindAccelerationStructureMemoryNV( ArrayProxy bindInfos, Dispatch const &d = Dispatch() ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + Result getRayTracingShaderGroupHandlesNV( Pipeline pipeline, uint32_t firstGroup, uint32_t groupCount, size_t dataSize, void* pData, Dispatch const &d = Dispatch() ) const; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + ResultValueType::type getRayTracingShaderGroupHandlesNV( Pipeline pipeline, uint32_t firstGroup, uint32_t groupCount, ArrayProxy data, Dispatch const &d = Dispatch() ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + Result getAccelerationStructureHandleNV( AccelerationStructureNV accelerationStructure, size_t dataSize, void* pData, Dispatch const &d = Dispatch() ) const; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + ResultValueType::type getAccelerationStructureHandleNV( AccelerationStructureNV accelerationStructure, ArrayProxy data, Dispatch const &d = Dispatch() ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + Result createRayTracingPipelinesNV( PipelineCache pipelineCache, uint32_t createInfoCount, const RayTracingPipelineCreateInfoNV* pCreateInfos, const AllocationCallbacks* pAllocator, Pipeline* pPipelines, Dispatch const &d = Dispatch() ) const; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template , typename Dispatch = DispatchLoaderStatic> + typename ResultValueType>::type createRayTracingPipelinesNV( PipelineCache pipelineCache, ArrayProxy createInfos, Optional allocator = nullptr, Dispatch const &d = Dispatch() ) const; + template , typename Dispatch = DispatchLoaderStatic> + typename ResultValueType>::type createRayTracingPipelinesNV( PipelineCache pipelineCache, ArrayProxy createInfos, Optional allocator, Allocator const& vectorAllocator, Dispatch const &d ) const; + template + ResultValueType::type createRayTracingPipelineNV( PipelineCache pipelineCache, const RayTracingPipelineCreateInfoNV & createInfo, Optional allocator = nullptr, Dispatch const &d = Dispatch() ) const; +#ifndef VULKAN_HPP_NO_SMART_HANDLE + template , typename Dispatch = DispatchLoaderStatic> + typename ResultValueType,Allocator>>::type createRayTracingPipelinesNVUnique( PipelineCache pipelineCache, ArrayProxy createInfos, Optional allocator = nullptr, Dispatch const &d = Dispatch() ) const; + template , typename Dispatch = DispatchLoaderStatic> + typename ResultValueType,Allocator>>::type createRayTracingPipelinesNVUnique( PipelineCache pipelineCache, ArrayProxy createInfos, Optional allocator, Allocator const& vectorAllocator, Dispatch const &d ) const; + template + typename ResultValueType>::type createRayTracingPipelineNVUnique( PipelineCache pipelineCache, const RayTracingPipelineCreateInfoNV & createInfo, Optional allocator = nullptr, Dispatch const &d = Dispatch() ) const; +#endif /*VULKAN_HPP_NO_SMART_HANDLE*/ +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + Result getImageDrmFormatModifierPropertiesEXT( Image image, ImageDrmFormatModifierPropertiesEXT* pProperties, Dispatch const &d = Dispatch() ) const; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + ResultValueType::type getImageDrmFormatModifierPropertiesEXT( Image image, Dispatch const &d = Dispatch() ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + + + VULKAN_HPP_TYPESAFE_EXPLICIT operator VkDevice() const + { + return m_device; + } + + explicit operator bool() const + { + return m_device != VK_NULL_HANDLE; + } + + bool operator!() const + { + return m_device == VK_NULL_HANDLE; + } + + private: + VkDevice m_device; + }; + + static_assert( sizeof( Device ) == sizeof( VkDevice ), "handle and wrapper have different size!" ); + + template + VULKAN_HPP_INLINE PFN_vkVoidFunction Device::getProcAddr( const char* pName, Dispatch const &d) const + { + return d.vkGetDeviceProcAddr( m_device, pName ); + } +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE PFN_vkVoidFunction Device::getProcAddr( const std::string & name, Dispatch const &d ) const + { + return d.vkGetDeviceProcAddr( m_device, name.c_str() ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void Device::destroy( const AllocationCallbacks* pAllocator, Dispatch const &d) const + { + d.vkDestroyDevice( m_device, reinterpret_cast( pAllocator ) ); + } +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void Device::destroy( Optional allocator, Dispatch const &d ) const + { + d.vkDestroyDevice( m_device, reinterpret_cast( static_cast( allocator ) ) ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void Device::getQueue( uint32_t queueFamilyIndex, uint32_t queueIndex, Queue* pQueue, Dispatch const &d) const + { + d.vkGetDeviceQueue( m_device, queueFamilyIndex, queueIndex, reinterpret_cast( pQueue ) ); + } +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE Queue Device::getQueue( uint32_t queueFamilyIndex, uint32_t queueIndex, Dispatch const &d ) const + { + Queue queue; + d.vkGetDeviceQueue( m_device, queueFamilyIndex, queueIndex, reinterpret_cast( &queue ) ); + return queue; + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + +#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE Result Device::waitIdle(Dispatch const &d) const + { + return static_cast( d.vkDeviceWaitIdle( m_device ) ); + } +#else + template + VULKAN_HPP_INLINE ResultValueType::type Device::waitIdle(Dispatch const &d ) const + { + Result result = static_cast( d.vkDeviceWaitIdle( m_device ) ); + return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING"::Device::waitIdle" ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE Result Device::allocateMemory( const MemoryAllocateInfo* pAllocateInfo, const AllocationCallbacks* pAllocator, DeviceMemory* pMemory, Dispatch const &d) const + { + return static_cast( d.vkAllocateMemory( m_device, reinterpret_cast( pAllocateInfo ), reinterpret_cast( pAllocator ), reinterpret_cast( pMemory ) ) ); + } +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE ResultValueType::type Device::allocateMemory( const MemoryAllocateInfo & allocateInfo, Optional allocator, Dispatch const &d ) const + { + DeviceMemory memory; + Result result = static_cast( d.vkAllocateMemory( m_device, reinterpret_cast( &allocateInfo ), reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( &memory ) ) ); + return createResultValue( result, memory, VULKAN_HPP_NAMESPACE_STRING"::Device::allocateMemory" ); + } +#ifndef VULKAN_HPP_NO_SMART_HANDLE + template + VULKAN_HPP_INLINE typename ResultValueType>::type Device::allocateMemoryUnique( const MemoryAllocateInfo & allocateInfo, Optional allocator, Dispatch const &d ) const + { + DeviceMemory memory; + Result result = static_cast( d.vkAllocateMemory( m_device, reinterpret_cast( &allocateInfo ), reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( &memory ) ) ); + + ObjectFree deleter( *this, allocator, d ); + return createResultValue( result, memory, VULKAN_HPP_NAMESPACE_STRING"::Device::allocateMemoryUnique", deleter ); + } +#endif /*VULKAN_HPP_NO_SMART_HANDLE*/ +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void Device::freeMemory( DeviceMemory memory, const AllocationCallbacks* pAllocator, Dispatch const &d) const + { + d.vkFreeMemory( m_device, static_cast( memory ), reinterpret_cast( pAllocator ) ); + } +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void Device::freeMemory( DeviceMemory memory, Optional allocator, Dispatch const &d ) const + { + d.vkFreeMemory( m_device, static_cast( memory ), reinterpret_cast( static_cast( allocator ) ) ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void Device::free( DeviceMemory memory, const AllocationCallbacks* pAllocator, Dispatch const &d) const + { + d.vkFreeMemory( m_device, static_cast( memory ), reinterpret_cast( pAllocator ) ); + } +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void Device::free( DeviceMemory memory, Optional allocator, Dispatch const &d ) const + { + d.vkFreeMemory( m_device, static_cast( memory ), reinterpret_cast( static_cast( allocator ) ) ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE Result Device::mapMemory( DeviceMemory memory, DeviceSize offset, DeviceSize size, MemoryMapFlags flags, void** ppData, Dispatch const &d) const + { + return static_cast( d.vkMapMemory( m_device, static_cast( memory ), offset, size, static_cast( flags ), ppData ) ); + } +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE ResultValueType::type Device::mapMemory( DeviceMemory memory, DeviceSize offset, DeviceSize size, MemoryMapFlags flags, Dispatch const &d ) const + { + void* pData; + Result result = static_cast( d.vkMapMemory( m_device, static_cast( memory ), offset, size, static_cast( flags ), &pData ) ); + return createResultValue( result, pData, VULKAN_HPP_NAMESPACE_STRING"::Device::mapMemory" ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + +#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void Device::unmapMemory( DeviceMemory memory, Dispatch const &d) const + { + d.vkUnmapMemory( m_device, static_cast( memory ) ); + } +#else + template + VULKAN_HPP_INLINE void Device::unmapMemory( DeviceMemory memory, Dispatch const &d ) const + { + d.vkUnmapMemory( m_device, static_cast( memory ) ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE Result Device::flushMappedMemoryRanges( uint32_t memoryRangeCount, const MappedMemoryRange* pMemoryRanges, Dispatch const &d) const + { + return static_cast( d.vkFlushMappedMemoryRanges( m_device, memoryRangeCount, reinterpret_cast( pMemoryRanges ) ) ); + } +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE ResultValueType::type Device::flushMappedMemoryRanges( ArrayProxy memoryRanges, Dispatch const &d ) const + { + Result result = static_cast( d.vkFlushMappedMemoryRanges( m_device, memoryRanges.size() , reinterpret_cast( memoryRanges.data() ) ) ); + return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING"::Device::flushMappedMemoryRanges" ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE Result Device::invalidateMappedMemoryRanges( uint32_t memoryRangeCount, const MappedMemoryRange* pMemoryRanges, Dispatch const &d) const + { + return static_cast( d.vkInvalidateMappedMemoryRanges( m_device, memoryRangeCount, reinterpret_cast( pMemoryRanges ) ) ); + } +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE ResultValueType::type Device::invalidateMappedMemoryRanges( ArrayProxy memoryRanges, Dispatch const &d ) const + { + Result result = static_cast( d.vkInvalidateMappedMemoryRanges( m_device, memoryRanges.size() , reinterpret_cast( memoryRanges.data() ) ) ); + return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING"::Device::invalidateMappedMemoryRanges" ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void Device::getMemoryCommitment( DeviceMemory memory, DeviceSize* pCommittedMemoryInBytes, Dispatch const &d) const + { + d.vkGetDeviceMemoryCommitment( m_device, static_cast( memory ), pCommittedMemoryInBytes ); + } +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE DeviceSize Device::getMemoryCommitment( DeviceMemory memory, Dispatch const &d ) const + { + DeviceSize committedMemoryInBytes; + d.vkGetDeviceMemoryCommitment( m_device, static_cast( memory ), &committedMemoryInBytes ); + return committedMemoryInBytes; + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void Device::getBufferMemoryRequirements( Buffer buffer, MemoryRequirements* pMemoryRequirements, Dispatch const &d) const + { + d.vkGetBufferMemoryRequirements( m_device, static_cast( buffer ), reinterpret_cast( pMemoryRequirements ) ); + } +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE MemoryRequirements Device::getBufferMemoryRequirements( Buffer buffer, Dispatch const &d ) const + { + MemoryRequirements memoryRequirements; + d.vkGetBufferMemoryRequirements( m_device, static_cast( buffer ), reinterpret_cast( &memoryRequirements ) ); + return memoryRequirements; + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + +#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE Result Device::bindBufferMemory( Buffer buffer, DeviceMemory memory, DeviceSize memoryOffset, Dispatch const &d) const + { + return static_cast( d.vkBindBufferMemory( m_device, static_cast( buffer ), static_cast( memory ), memoryOffset ) ); + } +#else + template + VULKAN_HPP_INLINE ResultValueType::type Device::bindBufferMemory( Buffer buffer, DeviceMemory memory, DeviceSize memoryOffset, Dispatch const &d ) const + { + Result result = static_cast( d.vkBindBufferMemory( m_device, static_cast( buffer ), static_cast( memory ), memoryOffset ) ); + return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING"::Device::bindBufferMemory" ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void Device::getImageMemoryRequirements( Image image, MemoryRequirements* pMemoryRequirements, Dispatch const &d) const + { + d.vkGetImageMemoryRequirements( m_device, static_cast( image ), reinterpret_cast( pMemoryRequirements ) ); + } +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE MemoryRequirements Device::getImageMemoryRequirements( Image image, Dispatch const &d ) const + { + MemoryRequirements memoryRequirements; + d.vkGetImageMemoryRequirements( m_device, static_cast( image ), reinterpret_cast( &memoryRequirements ) ); + return memoryRequirements; + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + +#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE Result Device::bindImageMemory( Image image, DeviceMemory memory, DeviceSize memoryOffset, Dispatch const &d) const + { + return static_cast( d.vkBindImageMemory( m_device, static_cast( image ), static_cast( memory ), memoryOffset ) ); + } +#else + template + VULKAN_HPP_INLINE ResultValueType::type Device::bindImageMemory( Image image, DeviceMemory memory, DeviceSize memoryOffset, Dispatch const &d ) const + { + Result result = static_cast( d.vkBindImageMemory( m_device, static_cast( image ), static_cast( memory ), memoryOffset ) ); + return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING"::Device::bindImageMemory" ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void Device::getImageSparseMemoryRequirements( Image image, uint32_t* pSparseMemoryRequirementCount, SparseImageMemoryRequirements* pSparseMemoryRequirements, Dispatch const &d) const + { + d.vkGetImageSparseMemoryRequirements( m_device, static_cast( image ), pSparseMemoryRequirementCount, reinterpret_cast( pSparseMemoryRequirements ) ); + } +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE std::vector Device::getImageSparseMemoryRequirements( Image image, Dispatch const &d ) const + { + std::vector sparseMemoryRequirements; + uint32_t sparseMemoryRequirementCount; + d.vkGetImageSparseMemoryRequirements( m_device, static_cast( image ), &sparseMemoryRequirementCount, nullptr ); + sparseMemoryRequirements.resize( sparseMemoryRequirementCount ); + d.vkGetImageSparseMemoryRequirements( m_device, static_cast( image ), &sparseMemoryRequirementCount, reinterpret_cast( sparseMemoryRequirements.data() ) ); + return sparseMemoryRequirements; + } + template + VULKAN_HPP_INLINE std::vector Device::getImageSparseMemoryRequirements( Image image, Allocator const& vectorAllocator, Dispatch const &d ) const + { + std::vector sparseMemoryRequirements( vectorAllocator ); + uint32_t sparseMemoryRequirementCount; + d.vkGetImageSparseMemoryRequirements( m_device, static_cast( image ), &sparseMemoryRequirementCount, nullptr ); + sparseMemoryRequirements.resize( sparseMemoryRequirementCount ); + d.vkGetImageSparseMemoryRequirements( m_device, static_cast( image ), &sparseMemoryRequirementCount, reinterpret_cast( sparseMemoryRequirements.data() ) ); + return sparseMemoryRequirements; + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE Result Device::createFence( const FenceCreateInfo* pCreateInfo, const AllocationCallbacks* pAllocator, Fence* pFence, Dispatch const &d) const + { + return static_cast( d.vkCreateFence( m_device, reinterpret_cast( pCreateInfo ), reinterpret_cast( pAllocator ), reinterpret_cast( pFence ) ) ); + } +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE ResultValueType::type Device::createFence( const FenceCreateInfo & createInfo, Optional allocator, Dispatch const &d ) const + { + Fence fence; + Result result = static_cast( d.vkCreateFence( m_device, reinterpret_cast( &createInfo ), reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( &fence ) ) ); + return createResultValue( result, fence, VULKAN_HPP_NAMESPACE_STRING"::Device::createFence" ); + } +#ifndef VULKAN_HPP_NO_SMART_HANDLE + template + VULKAN_HPP_INLINE typename ResultValueType>::type Device::createFenceUnique( const FenceCreateInfo & createInfo, Optional allocator, Dispatch const &d ) const + { + Fence fence; + Result result = static_cast( d.vkCreateFence( m_device, reinterpret_cast( &createInfo ), reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( &fence ) ) ); + + ObjectDestroy deleter( *this, allocator, d ); + return createResultValue( result, fence, VULKAN_HPP_NAMESPACE_STRING"::Device::createFenceUnique", deleter ); + } +#endif /*VULKAN_HPP_NO_SMART_HANDLE*/ +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void Device::destroyFence( Fence fence, const AllocationCallbacks* pAllocator, Dispatch const &d) const + { + d.vkDestroyFence( m_device, static_cast( fence ), reinterpret_cast( pAllocator ) ); + } +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void Device::destroyFence( Fence fence, Optional allocator, Dispatch const &d ) const + { + d.vkDestroyFence( m_device, static_cast( fence ), reinterpret_cast( static_cast( allocator ) ) ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void Device::destroy( Fence fence, const AllocationCallbacks* pAllocator, Dispatch const &d) const + { + d.vkDestroyFence( m_device, static_cast( fence ), reinterpret_cast( pAllocator ) ); + } +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void Device::destroy( Fence fence, Optional allocator, Dispatch const &d ) const + { + d.vkDestroyFence( m_device, static_cast( fence ), reinterpret_cast( static_cast( allocator ) ) ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE Result Device::resetFences( uint32_t fenceCount, const Fence* pFences, Dispatch const &d) const + { + return static_cast( d.vkResetFences( m_device, fenceCount, reinterpret_cast( pFences ) ) ); + } +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE ResultValueType::type Device::resetFences( ArrayProxy fences, Dispatch const &d ) const + { + Result result = static_cast( d.vkResetFences( m_device, fences.size() , reinterpret_cast( fences.data() ) ) ); + return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING"::Device::resetFences" ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + +#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE Result Device::getFenceStatus( Fence fence, Dispatch const &d) const + { + return static_cast( d.vkGetFenceStatus( m_device, static_cast( fence ) ) ); + } +#else + template + VULKAN_HPP_INLINE Result Device::getFenceStatus( Fence fence, Dispatch const &d ) const + { + Result result = static_cast( d.vkGetFenceStatus( m_device, static_cast( fence ) ) ); + return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING"::Device::getFenceStatus", { Result::eSuccess, Result::eNotReady } ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE Result Device::waitForFences( uint32_t fenceCount, const Fence* pFences, Bool32 waitAll, uint64_t timeout, Dispatch const &d) const + { + return static_cast( d.vkWaitForFences( m_device, fenceCount, reinterpret_cast( pFences ), waitAll, timeout ) ); + } +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE Result Device::waitForFences( ArrayProxy fences, Bool32 waitAll, uint64_t timeout, Dispatch const &d ) const + { + Result result = static_cast( d.vkWaitForFences( m_device, fences.size() , reinterpret_cast( fences.data() ), waitAll, timeout ) ); + return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING"::Device::waitForFences", { Result::eSuccess, Result::eTimeout } ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE Result Device::createSemaphore( const SemaphoreCreateInfo* pCreateInfo, const AllocationCallbacks* pAllocator, Semaphore* pSemaphore, Dispatch const &d) const + { + return static_cast( d.vkCreateSemaphore( m_device, reinterpret_cast( pCreateInfo ), reinterpret_cast( pAllocator ), reinterpret_cast( pSemaphore ) ) ); + } +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE ResultValueType::type Device::createSemaphore( const SemaphoreCreateInfo & createInfo, Optional allocator, Dispatch const &d ) const + { + Semaphore semaphore; + Result result = static_cast( d.vkCreateSemaphore( m_device, reinterpret_cast( &createInfo ), reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( &semaphore ) ) ); + return createResultValue( result, semaphore, VULKAN_HPP_NAMESPACE_STRING"::Device::createSemaphore" ); + } +#ifndef VULKAN_HPP_NO_SMART_HANDLE + template + VULKAN_HPP_INLINE typename ResultValueType>::type Device::createSemaphoreUnique( const SemaphoreCreateInfo & createInfo, Optional allocator, Dispatch const &d ) const + { + Semaphore semaphore; + Result result = static_cast( d.vkCreateSemaphore( m_device, reinterpret_cast( &createInfo ), reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( &semaphore ) ) ); + + ObjectDestroy deleter( *this, allocator, d ); + return createResultValue( result, semaphore, VULKAN_HPP_NAMESPACE_STRING"::Device::createSemaphoreUnique", deleter ); + } +#endif /*VULKAN_HPP_NO_SMART_HANDLE*/ +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void Device::destroySemaphore( Semaphore semaphore, const AllocationCallbacks* pAllocator, Dispatch const &d) const + { + d.vkDestroySemaphore( m_device, static_cast( semaphore ), reinterpret_cast( pAllocator ) ); + } +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void Device::destroySemaphore( Semaphore semaphore, Optional allocator, Dispatch const &d ) const + { + d.vkDestroySemaphore( m_device, static_cast( semaphore ), reinterpret_cast( static_cast( allocator ) ) ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void Device::destroy( Semaphore semaphore, const AllocationCallbacks* pAllocator, Dispatch const &d) const + { + d.vkDestroySemaphore( m_device, static_cast( semaphore ), reinterpret_cast( pAllocator ) ); + } +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void Device::destroy( Semaphore semaphore, Optional allocator, Dispatch const &d ) const + { + d.vkDestroySemaphore( m_device, static_cast( semaphore ), reinterpret_cast( static_cast( allocator ) ) ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE Result Device::createEvent( const EventCreateInfo* pCreateInfo, const AllocationCallbacks* pAllocator, Event* pEvent, Dispatch const &d) const + { + return static_cast( d.vkCreateEvent( m_device, reinterpret_cast( pCreateInfo ), reinterpret_cast( pAllocator ), reinterpret_cast( pEvent ) ) ); + } +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE ResultValueType::type Device::createEvent( const EventCreateInfo & createInfo, Optional allocator, Dispatch const &d ) const + { + Event event; + Result result = static_cast( d.vkCreateEvent( m_device, reinterpret_cast( &createInfo ), reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( &event ) ) ); + return createResultValue( result, event, VULKAN_HPP_NAMESPACE_STRING"::Device::createEvent" ); + } +#ifndef VULKAN_HPP_NO_SMART_HANDLE + template + VULKAN_HPP_INLINE typename ResultValueType>::type Device::createEventUnique( const EventCreateInfo & createInfo, Optional allocator, Dispatch const &d ) const + { + Event event; + Result result = static_cast( d.vkCreateEvent( m_device, reinterpret_cast( &createInfo ), reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( &event ) ) ); + + ObjectDestroy deleter( *this, allocator, d ); + return createResultValue( result, event, VULKAN_HPP_NAMESPACE_STRING"::Device::createEventUnique", deleter ); + } +#endif /*VULKAN_HPP_NO_SMART_HANDLE*/ +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void Device::destroyEvent( Event event, const AllocationCallbacks* pAllocator, Dispatch const &d) const + { + d.vkDestroyEvent( m_device, static_cast( event ), reinterpret_cast( pAllocator ) ); + } +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void Device::destroyEvent( Event event, Optional allocator, Dispatch const &d ) const + { + d.vkDestroyEvent( m_device, static_cast( event ), reinterpret_cast( static_cast( allocator ) ) ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void Device::destroy( Event event, const AllocationCallbacks* pAllocator, Dispatch const &d) const + { + d.vkDestroyEvent( m_device, static_cast( event ), reinterpret_cast( pAllocator ) ); + } +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void Device::destroy( Event event, Optional allocator, Dispatch const &d ) const + { + d.vkDestroyEvent( m_device, static_cast( event ), reinterpret_cast( static_cast( allocator ) ) ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + +#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE Result Device::getEventStatus( Event event, Dispatch const &d) const + { + return static_cast( d.vkGetEventStatus( m_device, static_cast( event ) ) ); + } +#else + template + VULKAN_HPP_INLINE Result Device::getEventStatus( Event event, Dispatch const &d ) const + { + Result result = static_cast( d.vkGetEventStatus( m_device, static_cast( event ) ) ); + return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING"::Device::getEventStatus", { Result::eEventSet, Result::eEventReset } ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + +#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE Result Device::setEvent( Event event, Dispatch const &d) const + { + return static_cast( d.vkSetEvent( m_device, static_cast( event ) ) ); + } +#else + template + VULKAN_HPP_INLINE ResultValueType::type Device::setEvent( Event event, Dispatch const &d ) const + { + Result result = static_cast( d.vkSetEvent( m_device, static_cast( event ) ) ); + return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING"::Device::setEvent" ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + +#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE Result Device::resetEvent( Event event, Dispatch const &d) const + { + return static_cast( d.vkResetEvent( m_device, static_cast( event ) ) ); + } +#else + template + VULKAN_HPP_INLINE ResultValueType::type Device::resetEvent( Event event, Dispatch const &d ) const + { + Result result = static_cast( d.vkResetEvent( m_device, static_cast( event ) ) ); + return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING"::Device::resetEvent" ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE Result Device::createQueryPool( const QueryPoolCreateInfo* pCreateInfo, const AllocationCallbacks* pAllocator, QueryPool* pQueryPool, Dispatch const &d) const + { + return static_cast( d.vkCreateQueryPool( m_device, reinterpret_cast( pCreateInfo ), reinterpret_cast( pAllocator ), reinterpret_cast( pQueryPool ) ) ); + } +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE ResultValueType::type Device::createQueryPool( const QueryPoolCreateInfo & createInfo, Optional allocator, Dispatch const &d ) const + { + QueryPool queryPool; + Result result = static_cast( d.vkCreateQueryPool( m_device, reinterpret_cast( &createInfo ), reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( &queryPool ) ) ); + return createResultValue( result, queryPool, VULKAN_HPP_NAMESPACE_STRING"::Device::createQueryPool" ); + } +#ifndef VULKAN_HPP_NO_SMART_HANDLE + template + VULKAN_HPP_INLINE typename ResultValueType>::type Device::createQueryPoolUnique( const QueryPoolCreateInfo & createInfo, Optional allocator, Dispatch const &d ) const + { + QueryPool queryPool; + Result result = static_cast( d.vkCreateQueryPool( m_device, reinterpret_cast( &createInfo ), reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( &queryPool ) ) ); + + ObjectDestroy deleter( *this, allocator, d ); + return createResultValue( result, queryPool, VULKAN_HPP_NAMESPACE_STRING"::Device::createQueryPoolUnique", deleter ); + } +#endif /*VULKAN_HPP_NO_SMART_HANDLE*/ +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void Device::destroyQueryPool( QueryPool queryPool, const AllocationCallbacks* pAllocator, Dispatch const &d) const + { + d.vkDestroyQueryPool( m_device, static_cast( queryPool ), reinterpret_cast( pAllocator ) ); + } +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void Device::destroyQueryPool( QueryPool queryPool, Optional allocator, Dispatch const &d ) const + { + d.vkDestroyQueryPool( m_device, static_cast( queryPool ), reinterpret_cast( static_cast( allocator ) ) ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void Device::destroy( QueryPool queryPool, const AllocationCallbacks* pAllocator, Dispatch const &d) const + { + d.vkDestroyQueryPool( m_device, static_cast( queryPool ), reinterpret_cast( pAllocator ) ); + } +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void Device::destroy( QueryPool queryPool, Optional allocator, Dispatch const &d ) const + { + d.vkDestroyQueryPool( m_device, static_cast( queryPool ), reinterpret_cast( static_cast( allocator ) ) ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE Result Device::getQueryPoolResults( QueryPool queryPool, uint32_t firstQuery, uint32_t queryCount, size_t dataSize, void* pData, DeviceSize stride, QueryResultFlags flags, Dispatch const &d) const + { + return static_cast( d.vkGetQueryPoolResults( m_device, static_cast( queryPool ), firstQuery, queryCount, dataSize, pData, stride, static_cast( flags ) ) ); + } +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE Result Device::getQueryPoolResults( QueryPool queryPool, uint32_t firstQuery, uint32_t queryCount, ArrayProxy data, DeviceSize stride, QueryResultFlags flags, Dispatch const &d ) const + { + Result result = static_cast( d.vkGetQueryPoolResults( m_device, static_cast( queryPool ), firstQuery, queryCount, data.size() * sizeof( T ) , reinterpret_cast( data.data() ), stride, static_cast( flags ) ) ); + return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING"::Device::getQueryPoolResults", { Result::eSuccess, Result::eNotReady } ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE Result Device::createBuffer( const BufferCreateInfo* pCreateInfo, const AllocationCallbacks* pAllocator, Buffer* pBuffer, Dispatch const &d) const + { + return static_cast( d.vkCreateBuffer( m_device, reinterpret_cast( pCreateInfo ), reinterpret_cast( pAllocator ), reinterpret_cast( pBuffer ) ) ); + } +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE ResultValueType::type Device::createBuffer( const BufferCreateInfo & createInfo, Optional allocator, Dispatch const &d ) const + { + Buffer buffer; + Result result = static_cast( d.vkCreateBuffer( m_device, reinterpret_cast( &createInfo ), reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( &buffer ) ) ); + return createResultValue( result, buffer, VULKAN_HPP_NAMESPACE_STRING"::Device::createBuffer" ); + } +#ifndef VULKAN_HPP_NO_SMART_HANDLE + template + VULKAN_HPP_INLINE typename ResultValueType>::type Device::createBufferUnique( const BufferCreateInfo & createInfo, Optional allocator, Dispatch const &d ) const + { + Buffer buffer; + Result result = static_cast( d.vkCreateBuffer( m_device, reinterpret_cast( &createInfo ), reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( &buffer ) ) ); + + ObjectDestroy deleter( *this, allocator, d ); + return createResultValue( result, buffer, VULKAN_HPP_NAMESPACE_STRING"::Device::createBufferUnique", deleter ); + } +#endif /*VULKAN_HPP_NO_SMART_HANDLE*/ +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void Device::destroyBuffer( Buffer buffer, const AllocationCallbacks* pAllocator, Dispatch const &d) const + { + d.vkDestroyBuffer( m_device, static_cast( buffer ), reinterpret_cast( pAllocator ) ); + } +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void Device::destroyBuffer( Buffer buffer, Optional allocator, Dispatch const &d ) const + { + d.vkDestroyBuffer( m_device, static_cast( buffer ), reinterpret_cast( static_cast( allocator ) ) ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void Device::destroy( Buffer buffer, const AllocationCallbacks* pAllocator, Dispatch const &d) const + { + d.vkDestroyBuffer( m_device, static_cast( buffer ), reinterpret_cast( pAllocator ) ); + } +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void Device::destroy( Buffer buffer, Optional allocator, Dispatch const &d ) const + { + d.vkDestroyBuffer( m_device, static_cast( buffer ), reinterpret_cast( static_cast( allocator ) ) ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE Result Device::createBufferView( const BufferViewCreateInfo* pCreateInfo, const AllocationCallbacks* pAllocator, BufferView* pView, Dispatch const &d) const + { + return static_cast( d.vkCreateBufferView( m_device, reinterpret_cast( pCreateInfo ), reinterpret_cast( pAllocator ), reinterpret_cast( pView ) ) ); + } +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE ResultValueType::type Device::createBufferView( const BufferViewCreateInfo & createInfo, Optional allocator, Dispatch const &d ) const + { + BufferView view; + Result result = static_cast( d.vkCreateBufferView( m_device, reinterpret_cast( &createInfo ), reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( &view ) ) ); + return createResultValue( result, view, VULKAN_HPP_NAMESPACE_STRING"::Device::createBufferView" ); + } +#ifndef VULKAN_HPP_NO_SMART_HANDLE + template + VULKAN_HPP_INLINE typename ResultValueType>::type Device::createBufferViewUnique( const BufferViewCreateInfo & createInfo, Optional allocator, Dispatch const &d ) const + { + BufferView view; + Result result = static_cast( d.vkCreateBufferView( m_device, reinterpret_cast( &createInfo ), reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( &view ) ) ); + + ObjectDestroy deleter( *this, allocator, d ); + return createResultValue( result, view, VULKAN_HPP_NAMESPACE_STRING"::Device::createBufferViewUnique", deleter ); + } +#endif /*VULKAN_HPP_NO_SMART_HANDLE*/ +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void Device::destroyBufferView( BufferView bufferView, const AllocationCallbacks* pAllocator, Dispatch const &d) const + { + d.vkDestroyBufferView( m_device, static_cast( bufferView ), reinterpret_cast( pAllocator ) ); + } +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void Device::destroyBufferView( BufferView bufferView, Optional allocator, Dispatch const &d ) const + { + d.vkDestroyBufferView( m_device, static_cast( bufferView ), reinterpret_cast( static_cast( allocator ) ) ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void Device::destroy( BufferView bufferView, const AllocationCallbacks* pAllocator, Dispatch const &d) const + { + d.vkDestroyBufferView( m_device, static_cast( bufferView ), reinterpret_cast( pAllocator ) ); + } +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void Device::destroy( BufferView bufferView, Optional allocator, Dispatch const &d ) const + { + d.vkDestroyBufferView( m_device, static_cast( bufferView ), reinterpret_cast( static_cast( allocator ) ) ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE Result Device::createImage( const ImageCreateInfo* pCreateInfo, const AllocationCallbacks* pAllocator, Image* pImage, Dispatch const &d) const + { + return static_cast( d.vkCreateImage( m_device, reinterpret_cast( pCreateInfo ), reinterpret_cast( pAllocator ), reinterpret_cast( pImage ) ) ); + } +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE ResultValueType::type Device::createImage( const ImageCreateInfo & createInfo, Optional allocator, Dispatch const &d ) const + { + Image image; + Result result = static_cast( d.vkCreateImage( m_device, reinterpret_cast( &createInfo ), reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( &image ) ) ); + return createResultValue( result, image, VULKAN_HPP_NAMESPACE_STRING"::Device::createImage" ); + } +#ifndef VULKAN_HPP_NO_SMART_HANDLE + template + VULKAN_HPP_INLINE typename ResultValueType>::type Device::createImageUnique( const ImageCreateInfo & createInfo, Optional allocator, Dispatch const &d ) const + { + Image image; + Result result = static_cast( d.vkCreateImage( m_device, reinterpret_cast( &createInfo ), reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( &image ) ) ); + + ObjectDestroy deleter( *this, allocator, d ); + return createResultValue( result, image, VULKAN_HPP_NAMESPACE_STRING"::Device::createImageUnique", deleter ); + } +#endif /*VULKAN_HPP_NO_SMART_HANDLE*/ +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void Device::destroyImage( Image image, const AllocationCallbacks* pAllocator, Dispatch const &d) const + { + d.vkDestroyImage( m_device, static_cast( image ), reinterpret_cast( pAllocator ) ); + } +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void Device::destroyImage( Image image, Optional allocator, Dispatch const &d ) const + { + d.vkDestroyImage( m_device, static_cast( image ), reinterpret_cast( static_cast( allocator ) ) ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void Device::destroy( Image image, const AllocationCallbacks* pAllocator, Dispatch const &d) const + { + d.vkDestroyImage( m_device, static_cast( image ), reinterpret_cast( pAllocator ) ); + } +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void Device::destroy( Image image, Optional allocator, Dispatch const &d ) const + { + d.vkDestroyImage( m_device, static_cast( image ), reinterpret_cast( static_cast( allocator ) ) ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void Device::getImageSubresourceLayout( Image image, const ImageSubresource* pSubresource, SubresourceLayout* pLayout, Dispatch const &d) const + { + d.vkGetImageSubresourceLayout( m_device, static_cast( image ), reinterpret_cast( pSubresource ), reinterpret_cast( pLayout ) ); + } +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE SubresourceLayout Device::getImageSubresourceLayout( Image image, const ImageSubresource & subresource, Dispatch const &d ) const + { + SubresourceLayout layout; + d.vkGetImageSubresourceLayout( m_device, static_cast( image ), reinterpret_cast( &subresource ), reinterpret_cast( &layout ) ); + return layout; + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE Result Device::createImageView( const ImageViewCreateInfo* pCreateInfo, const AllocationCallbacks* pAllocator, ImageView* pView, Dispatch const &d) const + { + return static_cast( d.vkCreateImageView( m_device, reinterpret_cast( pCreateInfo ), reinterpret_cast( pAllocator ), reinterpret_cast( pView ) ) ); + } +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE ResultValueType::type Device::createImageView( const ImageViewCreateInfo & createInfo, Optional allocator, Dispatch const &d ) const + { + ImageView view; + Result result = static_cast( d.vkCreateImageView( m_device, reinterpret_cast( &createInfo ), reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( &view ) ) ); + return createResultValue( result, view, VULKAN_HPP_NAMESPACE_STRING"::Device::createImageView" ); + } +#ifndef VULKAN_HPP_NO_SMART_HANDLE + template + VULKAN_HPP_INLINE typename ResultValueType>::type Device::createImageViewUnique( const ImageViewCreateInfo & createInfo, Optional allocator, Dispatch const &d ) const + { + ImageView view; + Result result = static_cast( d.vkCreateImageView( m_device, reinterpret_cast( &createInfo ), reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( &view ) ) ); + + ObjectDestroy deleter( *this, allocator, d ); + return createResultValue( result, view, VULKAN_HPP_NAMESPACE_STRING"::Device::createImageViewUnique", deleter ); + } +#endif /*VULKAN_HPP_NO_SMART_HANDLE*/ +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void Device::destroyImageView( ImageView imageView, const AllocationCallbacks* pAllocator, Dispatch const &d) const + { + d.vkDestroyImageView( m_device, static_cast( imageView ), reinterpret_cast( pAllocator ) ); + } +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void Device::destroyImageView( ImageView imageView, Optional allocator, Dispatch const &d ) const + { + d.vkDestroyImageView( m_device, static_cast( imageView ), reinterpret_cast( static_cast( allocator ) ) ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void Device::destroy( ImageView imageView, const AllocationCallbacks* pAllocator, Dispatch const &d) const + { + d.vkDestroyImageView( m_device, static_cast( imageView ), reinterpret_cast( pAllocator ) ); + } +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void Device::destroy( ImageView imageView, Optional allocator, Dispatch const &d ) const + { + d.vkDestroyImageView( m_device, static_cast( imageView ), reinterpret_cast( static_cast( allocator ) ) ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE Result Device::createShaderModule( const ShaderModuleCreateInfo* pCreateInfo, const AllocationCallbacks* pAllocator, ShaderModule* pShaderModule, Dispatch const &d) const + { + return static_cast( d.vkCreateShaderModule( m_device, reinterpret_cast( pCreateInfo ), reinterpret_cast( pAllocator ), reinterpret_cast( pShaderModule ) ) ); + } +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE ResultValueType::type Device::createShaderModule( const ShaderModuleCreateInfo & createInfo, Optional allocator, Dispatch const &d ) const + { + ShaderModule shaderModule; + Result result = static_cast( d.vkCreateShaderModule( m_device, reinterpret_cast( &createInfo ), reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( &shaderModule ) ) ); + return createResultValue( result, shaderModule, VULKAN_HPP_NAMESPACE_STRING"::Device::createShaderModule" ); + } +#ifndef VULKAN_HPP_NO_SMART_HANDLE + template + VULKAN_HPP_INLINE typename ResultValueType>::type Device::createShaderModuleUnique( const ShaderModuleCreateInfo & createInfo, Optional allocator, Dispatch const &d ) const + { + ShaderModule shaderModule; + Result result = static_cast( d.vkCreateShaderModule( m_device, reinterpret_cast( &createInfo ), reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( &shaderModule ) ) ); + + ObjectDestroy deleter( *this, allocator, d ); + return createResultValue( result, shaderModule, VULKAN_HPP_NAMESPACE_STRING"::Device::createShaderModuleUnique", deleter ); + } +#endif /*VULKAN_HPP_NO_SMART_HANDLE*/ +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void Device::destroyShaderModule( ShaderModule shaderModule, const AllocationCallbacks* pAllocator, Dispatch const &d) const + { + d.vkDestroyShaderModule( m_device, static_cast( shaderModule ), reinterpret_cast( pAllocator ) ); + } +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void Device::destroyShaderModule( ShaderModule shaderModule, Optional allocator, Dispatch const &d ) const + { + d.vkDestroyShaderModule( m_device, static_cast( shaderModule ), reinterpret_cast( static_cast( allocator ) ) ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void Device::destroy( ShaderModule shaderModule, const AllocationCallbacks* pAllocator, Dispatch const &d) const + { + d.vkDestroyShaderModule( m_device, static_cast( shaderModule ), reinterpret_cast( pAllocator ) ); + } +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void Device::destroy( ShaderModule shaderModule, Optional allocator, Dispatch const &d ) const + { + d.vkDestroyShaderModule( m_device, static_cast( shaderModule ), reinterpret_cast( static_cast( allocator ) ) ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE Result Device::createPipelineCache( const PipelineCacheCreateInfo* pCreateInfo, const AllocationCallbacks* pAllocator, PipelineCache* pPipelineCache, Dispatch const &d) const + { + return static_cast( d.vkCreatePipelineCache( m_device, reinterpret_cast( pCreateInfo ), reinterpret_cast( pAllocator ), reinterpret_cast( pPipelineCache ) ) ); + } +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE ResultValueType::type Device::createPipelineCache( const PipelineCacheCreateInfo & createInfo, Optional allocator, Dispatch const &d ) const + { + PipelineCache pipelineCache; + Result result = static_cast( d.vkCreatePipelineCache( m_device, reinterpret_cast( &createInfo ), reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( &pipelineCache ) ) ); + return createResultValue( result, pipelineCache, VULKAN_HPP_NAMESPACE_STRING"::Device::createPipelineCache" ); + } +#ifndef VULKAN_HPP_NO_SMART_HANDLE + template + VULKAN_HPP_INLINE typename ResultValueType>::type Device::createPipelineCacheUnique( const PipelineCacheCreateInfo & createInfo, Optional allocator, Dispatch const &d ) const + { + PipelineCache pipelineCache; + Result result = static_cast( d.vkCreatePipelineCache( m_device, reinterpret_cast( &createInfo ), reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( &pipelineCache ) ) ); + + ObjectDestroy deleter( *this, allocator, d ); + return createResultValue( result, pipelineCache, VULKAN_HPP_NAMESPACE_STRING"::Device::createPipelineCacheUnique", deleter ); + } +#endif /*VULKAN_HPP_NO_SMART_HANDLE*/ +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void Device::destroyPipelineCache( PipelineCache pipelineCache, const AllocationCallbacks* pAllocator, Dispatch const &d) const + { + d.vkDestroyPipelineCache( m_device, static_cast( pipelineCache ), reinterpret_cast( pAllocator ) ); + } +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void Device::destroyPipelineCache( PipelineCache pipelineCache, Optional allocator, Dispatch const &d ) const + { + d.vkDestroyPipelineCache( m_device, static_cast( pipelineCache ), reinterpret_cast( static_cast( allocator ) ) ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void Device::destroy( PipelineCache pipelineCache, const AllocationCallbacks* pAllocator, Dispatch const &d) const + { + d.vkDestroyPipelineCache( m_device, static_cast( pipelineCache ), reinterpret_cast( pAllocator ) ); + } +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void Device::destroy( PipelineCache pipelineCache, Optional allocator, Dispatch const &d ) const + { + d.vkDestroyPipelineCache( m_device, static_cast( pipelineCache ), reinterpret_cast( static_cast( allocator ) ) ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE Result Device::getPipelineCacheData( PipelineCache pipelineCache, size_t* pDataSize, void* pData, Dispatch const &d) const + { + return static_cast( d.vkGetPipelineCacheData( m_device, static_cast( pipelineCache ), pDataSize, pData ) ); + } +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE typename ResultValueType>::type Device::getPipelineCacheData( PipelineCache pipelineCache, Dispatch const &d ) const + { + std::vector data; + size_t dataSize; + Result result; + do + { + result = static_cast( d.vkGetPipelineCacheData( m_device, static_cast( pipelineCache ), &dataSize, nullptr ) ); + if ( ( result == Result::eSuccess ) && dataSize ) + { + data.resize( dataSize ); + result = static_cast( d.vkGetPipelineCacheData( m_device, static_cast( pipelineCache ), &dataSize, reinterpret_cast( data.data() ) ) ); + } + } while ( result == Result::eIncomplete ); + VULKAN_HPP_ASSERT( dataSize <= data.size() ); + data.resize( dataSize ); + return createResultValue( result, data, VULKAN_HPP_NAMESPACE_STRING"::Device::getPipelineCacheData" ); + } + template + VULKAN_HPP_INLINE typename ResultValueType>::type Device::getPipelineCacheData( PipelineCache pipelineCache, Allocator const& vectorAllocator, Dispatch const &d ) const + { + std::vector data( vectorAllocator ); + size_t dataSize; + Result result; + do + { + result = static_cast( d.vkGetPipelineCacheData( m_device, static_cast( pipelineCache ), &dataSize, nullptr ) ); + if ( ( result == Result::eSuccess ) && dataSize ) + { + data.resize( dataSize ); + result = static_cast( d.vkGetPipelineCacheData( m_device, static_cast( pipelineCache ), &dataSize, reinterpret_cast( data.data() ) ) ); + } + } while ( result == Result::eIncomplete ); + VULKAN_HPP_ASSERT( dataSize <= data.size() ); + data.resize( dataSize ); + return createResultValue( result, data, VULKAN_HPP_NAMESPACE_STRING"::Device::getPipelineCacheData" ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE Result Device::mergePipelineCaches( PipelineCache dstCache, uint32_t srcCacheCount, const PipelineCache* pSrcCaches, Dispatch const &d) const + { + return static_cast( d.vkMergePipelineCaches( m_device, static_cast( dstCache ), srcCacheCount, reinterpret_cast( pSrcCaches ) ) ); + } +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE ResultValueType::type Device::mergePipelineCaches( PipelineCache dstCache, ArrayProxy srcCaches, Dispatch const &d ) const + { + Result result = static_cast( d.vkMergePipelineCaches( m_device, static_cast( dstCache ), srcCaches.size() , reinterpret_cast( srcCaches.data() ) ) ); + return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING"::Device::mergePipelineCaches" ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE Result Device::createGraphicsPipelines( PipelineCache pipelineCache, uint32_t createInfoCount, const GraphicsPipelineCreateInfo* pCreateInfos, const AllocationCallbacks* pAllocator, Pipeline* pPipelines, Dispatch const &d) const + { + return static_cast( d.vkCreateGraphicsPipelines( m_device, static_cast( pipelineCache ), createInfoCount, reinterpret_cast( pCreateInfos ), reinterpret_cast( pAllocator ), reinterpret_cast( pPipelines ) ) ); + } +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE typename ResultValueType>::type Device::createGraphicsPipelines( PipelineCache pipelineCache, ArrayProxy createInfos, Optional allocator, Dispatch const &d ) const + { + std::vector pipelines( createInfos.size() ); + Result result = static_cast( d.vkCreateGraphicsPipelines( m_device, static_cast( pipelineCache ), createInfos.size() , reinterpret_cast( createInfos.data() ), reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( pipelines.data() ) ) ); + return createResultValue( result, pipelines, VULKAN_HPP_NAMESPACE_STRING"::Device::createGraphicsPipelines" ); + } + template + VULKAN_HPP_INLINE typename ResultValueType>::type Device::createGraphicsPipelines( PipelineCache pipelineCache, ArrayProxy createInfos, Optional allocator, Allocator const& vectorAllocator, Dispatch const &d ) const + { + std::vector pipelines( createInfos.size(), vectorAllocator ); + Result result = static_cast( d.vkCreateGraphicsPipelines( m_device, static_cast( pipelineCache ), createInfos.size() , reinterpret_cast( createInfos.data() ), reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( pipelines.data() ) ) ); + return createResultValue( result, pipelines, VULKAN_HPP_NAMESPACE_STRING"::Device::createGraphicsPipelines" ); + } + template + VULKAN_HPP_INLINE ResultValueType::type Device::createGraphicsPipeline( PipelineCache pipelineCache, const GraphicsPipelineCreateInfo & createInfo, Optional allocator, Dispatch const &d ) const + { + Pipeline pipeline; + Result result = static_cast( d.vkCreateGraphicsPipelines( m_device, static_cast( pipelineCache ), 1 , reinterpret_cast( &createInfo ), reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( &pipeline ) ) ); + return createResultValue( result, pipeline, VULKAN_HPP_NAMESPACE_STRING"::Device::createGraphicsPipeline" ); + } +#ifndef VULKAN_HPP_NO_SMART_HANDLE + template + VULKAN_HPP_INLINE typename ResultValueType,Allocator>>::type Device::createGraphicsPipelinesUnique( PipelineCache pipelineCache, ArrayProxy createInfos, Optional allocator, Dispatch const &d ) const + { + static_assert( sizeof( Pipeline ) <= sizeof( UniquePipeline ), "Pipeline is greater than UniquePipeline!" ); + std::vector pipelines; + pipelines.reserve( createInfos.size() ); + Pipeline* buffer = reinterpret_cast( reinterpret_cast( pipelines.data() ) + createInfos.size() * ( sizeof( UniquePipeline ) - sizeof( Pipeline ) ) ); + Result result = static_cast(d.vkCreateGraphicsPipelines( m_device, static_cast( pipelineCache ), createInfos.size() , reinterpret_cast( createInfos.data() ), reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( buffer ) ) ); + + ObjectDestroy deleter( *this, allocator, d ); + for ( size_t i=0 ; i + VULKAN_HPP_INLINE typename ResultValueType,Allocator>>::type Device::createGraphicsPipelinesUnique( PipelineCache pipelineCache, ArrayProxy createInfos, Optional allocator, Allocator const& vectorAllocator, Dispatch const &d ) const + { + static_assert( sizeof( Pipeline ) <= sizeof( UniquePipeline ), "Pipeline is greater than UniquePipeline!" ); + std::vector pipelines; + pipelines.reserve( createInfos.size() ); + Pipeline* buffer = reinterpret_cast( reinterpret_cast( pipelines.data() ) + createInfos.size() * ( sizeof( UniquePipeline ) - sizeof( Pipeline ) ) ); + Result result = static_cast(d.vkCreateGraphicsPipelines( m_device, static_cast( pipelineCache ), createInfos.size() , reinterpret_cast( createInfos.data() ), reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( buffer ) ) ); + + ObjectDestroy deleter( *this, allocator, d ); + for ( size_t i=0 ; i + VULKAN_HPP_INLINE typename ResultValueType>::type Device::createGraphicsPipelineUnique( PipelineCache pipelineCache, const GraphicsPipelineCreateInfo & createInfo, Optional allocator, Dispatch const &d ) const + { + Pipeline pipeline; + Result result = static_cast( d.vkCreateGraphicsPipelines( m_device, static_cast( pipelineCache ), 1 , reinterpret_cast( &createInfo ), reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( &pipeline ) ) ); + + ObjectDestroy deleter( *this, allocator, d ); + return createResultValue( result, pipeline, VULKAN_HPP_NAMESPACE_STRING"::Device::createGraphicsPipelineUnique", deleter ); + } +#endif /*VULKAN_HPP_NO_SMART_HANDLE*/ +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE Result Device::createComputePipelines( PipelineCache pipelineCache, uint32_t createInfoCount, const ComputePipelineCreateInfo* pCreateInfos, const AllocationCallbacks* pAllocator, Pipeline* pPipelines, Dispatch const &d) const + { + return static_cast( d.vkCreateComputePipelines( m_device, static_cast( pipelineCache ), createInfoCount, reinterpret_cast( pCreateInfos ), reinterpret_cast( pAllocator ), reinterpret_cast( pPipelines ) ) ); + } +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE typename ResultValueType>::type Device::createComputePipelines( PipelineCache pipelineCache, ArrayProxy createInfos, Optional allocator, Dispatch const &d ) const + { + std::vector pipelines( createInfos.size() ); + Result result = static_cast( d.vkCreateComputePipelines( m_device, static_cast( pipelineCache ), createInfos.size() , reinterpret_cast( createInfos.data() ), reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( pipelines.data() ) ) ); + return createResultValue( result, pipelines, VULKAN_HPP_NAMESPACE_STRING"::Device::createComputePipelines" ); + } + template + VULKAN_HPP_INLINE typename ResultValueType>::type Device::createComputePipelines( PipelineCache pipelineCache, ArrayProxy createInfos, Optional allocator, Allocator const& vectorAllocator, Dispatch const &d ) const + { + std::vector pipelines( createInfos.size(), vectorAllocator ); + Result result = static_cast( d.vkCreateComputePipelines( m_device, static_cast( pipelineCache ), createInfos.size() , reinterpret_cast( createInfos.data() ), reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( pipelines.data() ) ) ); + return createResultValue( result, pipelines, VULKAN_HPP_NAMESPACE_STRING"::Device::createComputePipelines" ); + } + template + VULKAN_HPP_INLINE ResultValueType::type Device::createComputePipeline( PipelineCache pipelineCache, const ComputePipelineCreateInfo & createInfo, Optional allocator, Dispatch const &d ) const + { + Pipeline pipeline; + Result result = static_cast( d.vkCreateComputePipelines( m_device, static_cast( pipelineCache ), 1 , reinterpret_cast( &createInfo ), reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( &pipeline ) ) ); + return createResultValue( result, pipeline, VULKAN_HPP_NAMESPACE_STRING"::Device::createComputePipeline" ); + } +#ifndef VULKAN_HPP_NO_SMART_HANDLE + template + VULKAN_HPP_INLINE typename ResultValueType,Allocator>>::type Device::createComputePipelinesUnique( PipelineCache pipelineCache, ArrayProxy createInfos, Optional allocator, Dispatch const &d ) const + { + static_assert( sizeof( Pipeline ) <= sizeof( UniquePipeline ), "Pipeline is greater than UniquePipeline!" ); + std::vector pipelines; + pipelines.reserve( createInfos.size() ); + Pipeline* buffer = reinterpret_cast( reinterpret_cast( pipelines.data() ) + createInfos.size() * ( sizeof( UniquePipeline ) - sizeof( Pipeline ) ) ); + Result result = static_cast(d.vkCreateComputePipelines( m_device, static_cast( pipelineCache ), createInfos.size() , reinterpret_cast( createInfos.data() ), reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( buffer ) ) ); + + ObjectDestroy deleter( *this, allocator, d ); + for ( size_t i=0 ; i + VULKAN_HPP_INLINE typename ResultValueType,Allocator>>::type Device::createComputePipelinesUnique( PipelineCache pipelineCache, ArrayProxy createInfos, Optional allocator, Allocator const& vectorAllocator, Dispatch const &d ) const + { + static_assert( sizeof( Pipeline ) <= sizeof( UniquePipeline ), "Pipeline is greater than UniquePipeline!" ); + std::vector pipelines; + pipelines.reserve( createInfos.size() ); + Pipeline* buffer = reinterpret_cast( reinterpret_cast( pipelines.data() ) + createInfos.size() * ( sizeof( UniquePipeline ) - sizeof( Pipeline ) ) ); + Result result = static_cast(d.vkCreateComputePipelines( m_device, static_cast( pipelineCache ), createInfos.size() , reinterpret_cast( createInfos.data() ), reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( buffer ) ) ); + + ObjectDestroy deleter( *this, allocator, d ); + for ( size_t i=0 ; i + VULKAN_HPP_INLINE typename ResultValueType>::type Device::createComputePipelineUnique( PipelineCache pipelineCache, const ComputePipelineCreateInfo & createInfo, Optional allocator, Dispatch const &d ) const + { + Pipeline pipeline; + Result result = static_cast( d.vkCreateComputePipelines( m_device, static_cast( pipelineCache ), 1 , reinterpret_cast( &createInfo ), reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( &pipeline ) ) ); + + ObjectDestroy deleter( *this, allocator, d ); + return createResultValue( result, pipeline, VULKAN_HPP_NAMESPACE_STRING"::Device::createComputePipelineUnique", deleter ); + } +#endif /*VULKAN_HPP_NO_SMART_HANDLE*/ +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void Device::destroyPipeline( Pipeline pipeline, const AllocationCallbacks* pAllocator, Dispatch const &d) const + { + d.vkDestroyPipeline( m_device, static_cast( pipeline ), reinterpret_cast( pAllocator ) ); + } +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void Device::destroyPipeline( Pipeline pipeline, Optional allocator, Dispatch const &d ) const + { + d.vkDestroyPipeline( m_device, static_cast( pipeline ), reinterpret_cast( static_cast( allocator ) ) ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void Device::destroy( Pipeline pipeline, const AllocationCallbacks* pAllocator, Dispatch const &d) const + { + d.vkDestroyPipeline( m_device, static_cast( pipeline ), reinterpret_cast( pAllocator ) ); + } +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void Device::destroy( Pipeline pipeline, Optional allocator, Dispatch const &d ) const + { + d.vkDestroyPipeline( m_device, static_cast( pipeline ), reinterpret_cast( static_cast( allocator ) ) ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE Result Device::createPipelineLayout( const PipelineLayoutCreateInfo* pCreateInfo, const AllocationCallbacks* pAllocator, PipelineLayout* pPipelineLayout, Dispatch const &d) const + { + return static_cast( d.vkCreatePipelineLayout( m_device, reinterpret_cast( pCreateInfo ), reinterpret_cast( pAllocator ), reinterpret_cast( pPipelineLayout ) ) ); + } +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE ResultValueType::type Device::createPipelineLayout( const PipelineLayoutCreateInfo & createInfo, Optional allocator, Dispatch const &d ) const + { + PipelineLayout pipelineLayout; + Result result = static_cast( d.vkCreatePipelineLayout( m_device, reinterpret_cast( &createInfo ), reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( &pipelineLayout ) ) ); + return createResultValue( result, pipelineLayout, VULKAN_HPP_NAMESPACE_STRING"::Device::createPipelineLayout" ); + } +#ifndef VULKAN_HPP_NO_SMART_HANDLE + template + VULKAN_HPP_INLINE typename ResultValueType>::type Device::createPipelineLayoutUnique( const PipelineLayoutCreateInfo & createInfo, Optional allocator, Dispatch const &d ) const + { + PipelineLayout pipelineLayout; + Result result = static_cast( d.vkCreatePipelineLayout( m_device, reinterpret_cast( &createInfo ), reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( &pipelineLayout ) ) ); + + ObjectDestroy deleter( *this, allocator, d ); + return createResultValue( result, pipelineLayout, VULKAN_HPP_NAMESPACE_STRING"::Device::createPipelineLayoutUnique", deleter ); + } +#endif /*VULKAN_HPP_NO_SMART_HANDLE*/ +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void Device::destroyPipelineLayout( PipelineLayout pipelineLayout, const AllocationCallbacks* pAllocator, Dispatch const &d) const + { + d.vkDestroyPipelineLayout( m_device, static_cast( pipelineLayout ), reinterpret_cast( pAllocator ) ); + } +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void Device::destroyPipelineLayout( PipelineLayout pipelineLayout, Optional allocator, Dispatch const &d ) const + { + d.vkDestroyPipelineLayout( m_device, static_cast( pipelineLayout ), reinterpret_cast( static_cast( allocator ) ) ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void Device::destroy( PipelineLayout pipelineLayout, const AllocationCallbacks* pAllocator, Dispatch const &d) const + { + d.vkDestroyPipelineLayout( m_device, static_cast( pipelineLayout ), reinterpret_cast( pAllocator ) ); + } +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void Device::destroy( PipelineLayout pipelineLayout, Optional allocator, Dispatch const &d ) const + { + d.vkDestroyPipelineLayout( m_device, static_cast( pipelineLayout ), reinterpret_cast( static_cast( allocator ) ) ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE Result Device::createSampler( const SamplerCreateInfo* pCreateInfo, const AllocationCallbacks* pAllocator, Sampler* pSampler, Dispatch const &d) const + { + return static_cast( d.vkCreateSampler( m_device, reinterpret_cast( pCreateInfo ), reinterpret_cast( pAllocator ), reinterpret_cast( pSampler ) ) ); + } +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE ResultValueType::type Device::createSampler( const SamplerCreateInfo & createInfo, Optional allocator, Dispatch const &d ) const + { + Sampler sampler; + Result result = static_cast( d.vkCreateSampler( m_device, reinterpret_cast( &createInfo ), reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( &sampler ) ) ); + return createResultValue( result, sampler, VULKAN_HPP_NAMESPACE_STRING"::Device::createSampler" ); + } +#ifndef VULKAN_HPP_NO_SMART_HANDLE + template + VULKAN_HPP_INLINE typename ResultValueType>::type Device::createSamplerUnique( const SamplerCreateInfo & createInfo, Optional allocator, Dispatch const &d ) const + { + Sampler sampler; + Result result = static_cast( d.vkCreateSampler( m_device, reinterpret_cast( &createInfo ), reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( &sampler ) ) ); + + ObjectDestroy deleter( *this, allocator, d ); + return createResultValue( result, sampler, VULKAN_HPP_NAMESPACE_STRING"::Device::createSamplerUnique", deleter ); + } +#endif /*VULKAN_HPP_NO_SMART_HANDLE*/ +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void Device::destroySampler( Sampler sampler, const AllocationCallbacks* pAllocator, Dispatch const &d) const + { + d.vkDestroySampler( m_device, static_cast( sampler ), reinterpret_cast( pAllocator ) ); + } +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void Device::destroySampler( Sampler sampler, Optional allocator, Dispatch const &d ) const + { + d.vkDestroySampler( m_device, static_cast( sampler ), reinterpret_cast( static_cast( allocator ) ) ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void Device::destroy( Sampler sampler, const AllocationCallbacks* pAllocator, Dispatch const &d) const + { + d.vkDestroySampler( m_device, static_cast( sampler ), reinterpret_cast( pAllocator ) ); + } +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void Device::destroy( Sampler sampler, Optional allocator, Dispatch const &d ) const + { + d.vkDestroySampler( m_device, static_cast( sampler ), reinterpret_cast( static_cast( allocator ) ) ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE Result Device::createDescriptorSetLayout( const DescriptorSetLayoutCreateInfo* pCreateInfo, const AllocationCallbacks* pAllocator, DescriptorSetLayout* pSetLayout, Dispatch const &d) const + { + return static_cast( d.vkCreateDescriptorSetLayout( m_device, reinterpret_cast( pCreateInfo ), reinterpret_cast( pAllocator ), reinterpret_cast( pSetLayout ) ) ); + } +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE ResultValueType::type Device::createDescriptorSetLayout( const DescriptorSetLayoutCreateInfo & createInfo, Optional allocator, Dispatch const &d ) const + { + DescriptorSetLayout setLayout; + Result result = static_cast( d.vkCreateDescriptorSetLayout( m_device, reinterpret_cast( &createInfo ), reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( &setLayout ) ) ); + return createResultValue( result, setLayout, VULKAN_HPP_NAMESPACE_STRING"::Device::createDescriptorSetLayout" ); + } +#ifndef VULKAN_HPP_NO_SMART_HANDLE + template + VULKAN_HPP_INLINE typename ResultValueType>::type Device::createDescriptorSetLayoutUnique( const DescriptorSetLayoutCreateInfo & createInfo, Optional allocator, Dispatch const &d ) const + { + DescriptorSetLayout setLayout; + Result result = static_cast( d.vkCreateDescriptorSetLayout( m_device, reinterpret_cast( &createInfo ), reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( &setLayout ) ) ); + + ObjectDestroy deleter( *this, allocator, d ); + return createResultValue( result, setLayout, VULKAN_HPP_NAMESPACE_STRING"::Device::createDescriptorSetLayoutUnique", deleter ); + } +#endif /*VULKAN_HPP_NO_SMART_HANDLE*/ +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void Device::destroyDescriptorSetLayout( DescriptorSetLayout descriptorSetLayout, const AllocationCallbacks* pAllocator, Dispatch const &d) const + { + d.vkDestroyDescriptorSetLayout( m_device, static_cast( descriptorSetLayout ), reinterpret_cast( pAllocator ) ); + } +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void Device::destroyDescriptorSetLayout( DescriptorSetLayout descriptorSetLayout, Optional allocator, Dispatch const &d ) const + { + d.vkDestroyDescriptorSetLayout( m_device, static_cast( descriptorSetLayout ), reinterpret_cast( static_cast( allocator ) ) ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void Device::destroy( DescriptorSetLayout descriptorSetLayout, const AllocationCallbacks* pAllocator, Dispatch const &d) const + { + d.vkDestroyDescriptorSetLayout( m_device, static_cast( descriptorSetLayout ), reinterpret_cast( pAllocator ) ); + } +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void Device::destroy( DescriptorSetLayout descriptorSetLayout, Optional allocator, Dispatch const &d ) const + { + d.vkDestroyDescriptorSetLayout( m_device, static_cast( descriptorSetLayout ), reinterpret_cast( static_cast( allocator ) ) ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE Result Device::createDescriptorPool( const DescriptorPoolCreateInfo* pCreateInfo, const AllocationCallbacks* pAllocator, DescriptorPool* pDescriptorPool, Dispatch const &d) const + { + return static_cast( d.vkCreateDescriptorPool( m_device, reinterpret_cast( pCreateInfo ), reinterpret_cast( pAllocator ), reinterpret_cast( pDescriptorPool ) ) ); + } +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE ResultValueType::type Device::createDescriptorPool( const DescriptorPoolCreateInfo & createInfo, Optional allocator, Dispatch const &d ) const + { + DescriptorPool descriptorPool; + Result result = static_cast( d.vkCreateDescriptorPool( m_device, reinterpret_cast( &createInfo ), reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( &descriptorPool ) ) ); + return createResultValue( result, descriptorPool, VULKAN_HPP_NAMESPACE_STRING"::Device::createDescriptorPool" ); + } +#ifndef VULKAN_HPP_NO_SMART_HANDLE + template + VULKAN_HPP_INLINE typename ResultValueType>::type Device::createDescriptorPoolUnique( const DescriptorPoolCreateInfo & createInfo, Optional allocator, Dispatch const &d ) const + { + DescriptorPool descriptorPool; + Result result = static_cast( d.vkCreateDescriptorPool( m_device, reinterpret_cast( &createInfo ), reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( &descriptorPool ) ) ); + + ObjectDestroy deleter( *this, allocator, d ); + return createResultValue( result, descriptorPool, VULKAN_HPP_NAMESPACE_STRING"::Device::createDescriptorPoolUnique", deleter ); + } +#endif /*VULKAN_HPP_NO_SMART_HANDLE*/ +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void Device::destroyDescriptorPool( DescriptorPool descriptorPool, const AllocationCallbacks* pAllocator, Dispatch const &d) const + { + d.vkDestroyDescriptorPool( m_device, static_cast( descriptorPool ), reinterpret_cast( pAllocator ) ); + } +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void Device::destroyDescriptorPool( DescriptorPool descriptorPool, Optional allocator, Dispatch const &d ) const + { + d.vkDestroyDescriptorPool( m_device, static_cast( descriptorPool ), reinterpret_cast( static_cast( allocator ) ) ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void Device::destroy( DescriptorPool descriptorPool, const AllocationCallbacks* pAllocator, Dispatch const &d) const + { + d.vkDestroyDescriptorPool( m_device, static_cast( descriptorPool ), reinterpret_cast( pAllocator ) ); + } +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void Device::destroy( DescriptorPool descriptorPool, Optional allocator, Dispatch const &d ) const + { + d.vkDestroyDescriptorPool( m_device, static_cast( descriptorPool ), reinterpret_cast( static_cast( allocator ) ) ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + +#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE Result Device::resetDescriptorPool( DescriptorPool descriptorPool, DescriptorPoolResetFlags flags, Dispatch const &d) const + { + return static_cast( d.vkResetDescriptorPool( m_device, static_cast( descriptorPool ), static_cast( flags ) ) ); + } +#else + template + VULKAN_HPP_INLINE ResultValueType::type Device::resetDescriptorPool( DescriptorPool descriptorPool, DescriptorPoolResetFlags flags, Dispatch const &d ) const + { + Result result = static_cast( d.vkResetDescriptorPool( m_device, static_cast( descriptorPool ), static_cast( flags ) ) ); + return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING"::Device::resetDescriptorPool" ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE Result Device::allocateDescriptorSets( const DescriptorSetAllocateInfo* pAllocateInfo, DescriptorSet* pDescriptorSets, Dispatch const &d) const + { + return static_cast( d.vkAllocateDescriptorSets( m_device, reinterpret_cast( pAllocateInfo ), reinterpret_cast( pDescriptorSets ) ) ); + } +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE typename ResultValueType>::type Device::allocateDescriptorSets( const DescriptorSetAllocateInfo & allocateInfo, Dispatch const &d ) const + { + std::vector descriptorSets( allocateInfo.descriptorSetCount ); + Result result = static_cast( d.vkAllocateDescriptorSets( m_device, reinterpret_cast( &allocateInfo ), reinterpret_cast( descriptorSets.data() ) ) ); + return createResultValue( result, descriptorSets, VULKAN_HPP_NAMESPACE_STRING"::Device::allocateDescriptorSets" ); + } + template + VULKAN_HPP_INLINE typename ResultValueType>::type Device::allocateDescriptorSets( const DescriptorSetAllocateInfo & allocateInfo, Allocator const& vectorAllocator, Dispatch const &d ) const + { + std::vector descriptorSets( allocateInfo.descriptorSetCount, vectorAllocator ); + Result result = static_cast( d.vkAllocateDescriptorSets( m_device, reinterpret_cast( &allocateInfo ), reinterpret_cast( descriptorSets.data() ) ) ); + return createResultValue( result, descriptorSets, VULKAN_HPP_NAMESPACE_STRING"::Device::allocateDescriptorSets" ); + } +#ifndef VULKAN_HPP_NO_SMART_HANDLE + template + VULKAN_HPP_INLINE typename ResultValueType,Allocator>>::type Device::allocateDescriptorSetsUnique( const DescriptorSetAllocateInfo & allocateInfo, Dispatch const &d ) const + { + static_assert( sizeof( DescriptorSet ) <= sizeof( UniqueDescriptorSet ), "DescriptorSet is greater than UniqueDescriptorSet!" ); + std::vector descriptorSets; + descriptorSets.reserve( allocateInfo.descriptorSetCount ); + DescriptorSet* buffer = reinterpret_cast( reinterpret_cast( descriptorSets.data() ) + allocateInfo.descriptorSetCount * ( sizeof( UniqueDescriptorSet ) - sizeof( DescriptorSet ) ) ); + Result result = static_cast(d.vkAllocateDescriptorSets( m_device, reinterpret_cast( &allocateInfo ), reinterpret_cast( buffer ) ) ); + + PoolFree deleter( *this, allocateInfo.descriptorPool, d ); + for ( size_t i=0 ; i + VULKAN_HPP_INLINE typename ResultValueType,Allocator>>::type Device::allocateDescriptorSetsUnique( const DescriptorSetAllocateInfo & allocateInfo, Allocator const& vectorAllocator, Dispatch const &d ) const + { + static_assert( sizeof( DescriptorSet ) <= sizeof( UniqueDescriptorSet ), "DescriptorSet is greater than UniqueDescriptorSet!" ); + std::vector descriptorSets; + descriptorSets.reserve( allocateInfo.descriptorSetCount ); + DescriptorSet* buffer = reinterpret_cast( reinterpret_cast( descriptorSets.data() ) + allocateInfo.descriptorSetCount * ( sizeof( UniqueDescriptorSet ) - sizeof( DescriptorSet ) ) ); + Result result = static_cast(d.vkAllocateDescriptorSets( m_device, reinterpret_cast( &allocateInfo ), reinterpret_cast( buffer ) ) ); + + PoolFree deleter( *this, allocateInfo.descriptorPool, d ); + for ( size_t i=0 ; i + VULKAN_HPP_INLINE Result Device::freeDescriptorSets( DescriptorPool descriptorPool, uint32_t descriptorSetCount, const DescriptorSet* pDescriptorSets, Dispatch const &d) const + { + return static_cast( d.vkFreeDescriptorSets( m_device, static_cast( descriptorPool ), descriptorSetCount, reinterpret_cast( pDescriptorSets ) ) ); + } +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE ResultValueType::type Device::freeDescriptorSets( DescriptorPool descriptorPool, ArrayProxy descriptorSets, Dispatch const &d ) const + { + Result result = static_cast( d.vkFreeDescriptorSets( m_device, static_cast( descriptorPool ), descriptorSets.size() , reinterpret_cast( descriptorSets.data() ) ) ); + return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING"::Device::freeDescriptorSets" ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE Result Device::free( DescriptorPool descriptorPool, uint32_t descriptorSetCount, const DescriptorSet* pDescriptorSets, Dispatch const &d) const + { + return static_cast( d.vkFreeDescriptorSets( m_device, static_cast( descriptorPool ), descriptorSetCount, reinterpret_cast( pDescriptorSets ) ) ); + } +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE ResultValueType::type Device::free( DescriptorPool descriptorPool, ArrayProxy descriptorSets, Dispatch const &d ) const + { + Result result = static_cast( d.vkFreeDescriptorSets( m_device, static_cast( descriptorPool ), descriptorSets.size() , reinterpret_cast( descriptorSets.data() ) ) ); + return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING"::Device::free" ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void Device::updateDescriptorSets( uint32_t descriptorWriteCount, const WriteDescriptorSet* pDescriptorWrites, uint32_t descriptorCopyCount, const CopyDescriptorSet* pDescriptorCopies, Dispatch const &d) const + { + d.vkUpdateDescriptorSets( m_device, descriptorWriteCount, reinterpret_cast( pDescriptorWrites ), descriptorCopyCount, reinterpret_cast( pDescriptorCopies ) ); + } +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void Device::updateDescriptorSets( ArrayProxy descriptorWrites, ArrayProxy descriptorCopies, Dispatch const &d ) const + { + d.vkUpdateDescriptorSets( m_device, descriptorWrites.size() , reinterpret_cast( descriptorWrites.data() ), descriptorCopies.size() , reinterpret_cast( descriptorCopies.data() ) ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE Result Device::createFramebuffer( const FramebufferCreateInfo* pCreateInfo, const AllocationCallbacks* pAllocator, Framebuffer* pFramebuffer, Dispatch const &d) const + { + return static_cast( d.vkCreateFramebuffer( m_device, reinterpret_cast( pCreateInfo ), reinterpret_cast( pAllocator ), reinterpret_cast( pFramebuffer ) ) ); + } +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE ResultValueType::type Device::createFramebuffer( const FramebufferCreateInfo & createInfo, Optional allocator, Dispatch const &d ) const + { + Framebuffer framebuffer; + Result result = static_cast( d.vkCreateFramebuffer( m_device, reinterpret_cast( &createInfo ), reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( &framebuffer ) ) ); + return createResultValue( result, framebuffer, VULKAN_HPP_NAMESPACE_STRING"::Device::createFramebuffer" ); + } +#ifndef VULKAN_HPP_NO_SMART_HANDLE + template + VULKAN_HPP_INLINE typename ResultValueType>::type Device::createFramebufferUnique( const FramebufferCreateInfo & createInfo, Optional allocator, Dispatch const &d ) const + { + Framebuffer framebuffer; + Result result = static_cast( d.vkCreateFramebuffer( m_device, reinterpret_cast( &createInfo ), reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( &framebuffer ) ) ); + + ObjectDestroy deleter( *this, allocator, d ); + return createResultValue( result, framebuffer, VULKAN_HPP_NAMESPACE_STRING"::Device::createFramebufferUnique", deleter ); + } +#endif /*VULKAN_HPP_NO_SMART_HANDLE*/ +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void Device::destroyFramebuffer( Framebuffer framebuffer, const AllocationCallbacks* pAllocator, Dispatch const &d) const + { + d.vkDestroyFramebuffer( m_device, static_cast( framebuffer ), reinterpret_cast( pAllocator ) ); + } +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void Device::destroyFramebuffer( Framebuffer framebuffer, Optional allocator, Dispatch const &d ) const + { + d.vkDestroyFramebuffer( m_device, static_cast( framebuffer ), reinterpret_cast( static_cast( allocator ) ) ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void Device::destroy( Framebuffer framebuffer, const AllocationCallbacks* pAllocator, Dispatch const &d) const + { + d.vkDestroyFramebuffer( m_device, static_cast( framebuffer ), reinterpret_cast( pAllocator ) ); + } +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void Device::destroy( Framebuffer framebuffer, Optional allocator, Dispatch const &d ) const + { + d.vkDestroyFramebuffer( m_device, static_cast( framebuffer ), reinterpret_cast( static_cast( allocator ) ) ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE Result Device::createRenderPass( const RenderPassCreateInfo* pCreateInfo, const AllocationCallbacks* pAllocator, RenderPass* pRenderPass, Dispatch const &d) const + { + return static_cast( d.vkCreateRenderPass( m_device, reinterpret_cast( pCreateInfo ), reinterpret_cast( pAllocator ), reinterpret_cast( pRenderPass ) ) ); + } +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE ResultValueType::type Device::createRenderPass( const RenderPassCreateInfo & createInfo, Optional allocator, Dispatch const &d ) const + { + RenderPass renderPass; + Result result = static_cast( d.vkCreateRenderPass( m_device, reinterpret_cast( &createInfo ), reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( &renderPass ) ) ); + return createResultValue( result, renderPass, VULKAN_HPP_NAMESPACE_STRING"::Device::createRenderPass" ); + } +#ifndef VULKAN_HPP_NO_SMART_HANDLE + template + VULKAN_HPP_INLINE typename ResultValueType>::type Device::createRenderPassUnique( const RenderPassCreateInfo & createInfo, Optional allocator, Dispatch const &d ) const + { + RenderPass renderPass; + Result result = static_cast( d.vkCreateRenderPass( m_device, reinterpret_cast( &createInfo ), reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( &renderPass ) ) ); + + ObjectDestroy deleter( *this, allocator, d ); + return createResultValue( result, renderPass, VULKAN_HPP_NAMESPACE_STRING"::Device::createRenderPassUnique", deleter ); + } +#endif /*VULKAN_HPP_NO_SMART_HANDLE*/ +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void Device::destroyRenderPass( RenderPass renderPass, const AllocationCallbacks* pAllocator, Dispatch const &d) const + { + d.vkDestroyRenderPass( m_device, static_cast( renderPass ), reinterpret_cast( pAllocator ) ); + } +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void Device::destroyRenderPass( RenderPass renderPass, Optional allocator, Dispatch const &d ) const + { + d.vkDestroyRenderPass( m_device, static_cast( renderPass ), reinterpret_cast( static_cast( allocator ) ) ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void Device::destroy( RenderPass renderPass, const AllocationCallbacks* pAllocator, Dispatch const &d) const + { + d.vkDestroyRenderPass( m_device, static_cast( renderPass ), reinterpret_cast( pAllocator ) ); + } +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void Device::destroy( RenderPass renderPass, Optional allocator, Dispatch const &d ) const + { + d.vkDestroyRenderPass( m_device, static_cast( renderPass ), reinterpret_cast( static_cast( allocator ) ) ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void Device::getRenderAreaGranularity( RenderPass renderPass, Extent2D* pGranularity, Dispatch const &d) const + { + d.vkGetRenderAreaGranularity( m_device, static_cast( renderPass ), reinterpret_cast( pGranularity ) ); + } +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE Extent2D Device::getRenderAreaGranularity( RenderPass renderPass, Dispatch const &d ) const + { + Extent2D granularity; + d.vkGetRenderAreaGranularity( m_device, static_cast( renderPass ), reinterpret_cast( &granularity ) ); + return granularity; + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE Result Device::createCommandPool( const CommandPoolCreateInfo* pCreateInfo, const AllocationCallbacks* pAllocator, CommandPool* pCommandPool, Dispatch const &d) const + { + return static_cast( d.vkCreateCommandPool( m_device, reinterpret_cast( pCreateInfo ), reinterpret_cast( pAllocator ), reinterpret_cast( pCommandPool ) ) ); + } +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE ResultValueType::type Device::createCommandPool( const CommandPoolCreateInfo & createInfo, Optional allocator, Dispatch const &d ) const + { + CommandPool commandPool; + Result result = static_cast( d.vkCreateCommandPool( m_device, reinterpret_cast( &createInfo ), reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( &commandPool ) ) ); + return createResultValue( result, commandPool, VULKAN_HPP_NAMESPACE_STRING"::Device::createCommandPool" ); + } +#ifndef VULKAN_HPP_NO_SMART_HANDLE + template + VULKAN_HPP_INLINE typename ResultValueType>::type Device::createCommandPoolUnique( const CommandPoolCreateInfo & createInfo, Optional allocator, Dispatch const &d ) const + { + CommandPool commandPool; + Result result = static_cast( d.vkCreateCommandPool( m_device, reinterpret_cast( &createInfo ), reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( &commandPool ) ) ); + + ObjectDestroy deleter( *this, allocator, d ); + return createResultValue( result, commandPool, VULKAN_HPP_NAMESPACE_STRING"::Device::createCommandPoolUnique", deleter ); + } +#endif /*VULKAN_HPP_NO_SMART_HANDLE*/ +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void Device::destroyCommandPool( CommandPool commandPool, const AllocationCallbacks* pAllocator, Dispatch const &d) const + { + d.vkDestroyCommandPool( m_device, static_cast( commandPool ), reinterpret_cast( pAllocator ) ); + } +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void Device::destroyCommandPool( CommandPool commandPool, Optional allocator, Dispatch const &d ) const + { + d.vkDestroyCommandPool( m_device, static_cast( commandPool ), reinterpret_cast( static_cast( allocator ) ) ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void Device::destroy( CommandPool commandPool, const AllocationCallbacks* pAllocator, Dispatch const &d) const + { + d.vkDestroyCommandPool( m_device, static_cast( commandPool ), reinterpret_cast( pAllocator ) ); + } +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void Device::destroy( CommandPool commandPool, Optional allocator, Dispatch const &d ) const + { + d.vkDestroyCommandPool( m_device, static_cast( commandPool ), reinterpret_cast( static_cast( allocator ) ) ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + +#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE Result Device::resetCommandPool( CommandPool commandPool, CommandPoolResetFlags flags, Dispatch const &d) const + { + return static_cast( d.vkResetCommandPool( m_device, static_cast( commandPool ), static_cast( flags ) ) ); + } +#else + template + VULKAN_HPP_INLINE ResultValueType::type Device::resetCommandPool( CommandPool commandPool, CommandPoolResetFlags flags, Dispatch const &d ) const + { + Result result = static_cast( d.vkResetCommandPool( m_device, static_cast( commandPool ), static_cast( flags ) ) ); + return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING"::Device::resetCommandPool" ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE Result Device::allocateCommandBuffers( const CommandBufferAllocateInfo* pAllocateInfo, CommandBuffer* pCommandBuffers, Dispatch const &d) const + { + return static_cast( d.vkAllocateCommandBuffers( m_device, reinterpret_cast( pAllocateInfo ), reinterpret_cast( pCommandBuffers ) ) ); + } +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE typename ResultValueType>::type Device::allocateCommandBuffers( const CommandBufferAllocateInfo & allocateInfo, Dispatch const &d ) const + { + std::vector commandBuffers( allocateInfo.commandBufferCount ); + Result result = static_cast( d.vkAllocateCommandBuffers( m_device, reinterpret_cast( &allocateInfo ), reinterpret_cast( commandBuffers.data() ) ) ); + return createResultValue( result, commandBuffers, VULKAN_HPP_NAMESPACE_STRING"::Device::allocateCommandBuffers" ); + } + template + VULKAN_HPP_INLINE typename ResultValueType>::type Device::allocateCommandBuffers( const CommandBufferAllocateInfo & allocateInfo, Allocator const& vectorAllocator, Dispatch const &d ) const + { + std::vector commandBuffers( allocateInfo.commandBufferCount, vectorAllocator ); + Result result = static_cast( d.vkAllocateCommandBuffers( m_device, reinterpret_cast( &allocateInfo ), reinterpret_cast( commandBuffers.data() ) ) ); + return createResultValue( result, commandBuffers, VULKAN_HPP_NAMESPACE_STRING"::Device::allocateCommandBuffers" ); + } +#ifndef VULKAN_HPP_NO_SMART_HANDLE + template + VULKAN_HPP_INLINE typename ResultValueType,Allocator>>::type Device::allocateCommandBuffersUnique( const CommandBufferAllocateInfo & allocateInfo, Dispatch const &d ) const + { + static_assert( sizeof( CommandBuffer ) <= sizeof( UniqueCommandBuffer ), "CommandBuffer is greater than UniqueCommandBuffer!" ); + std::vector commandBuffers; + commandBuffers.reserve( allocateInfo.commandBufferCount ); + CommandBuffer* buffer = reinterpret_cast( reinterpret_cast( commandBuffers.data() ) + allocateInfo.commandBufferCount * ( sizeof( UniqueCommandBuffer ) - sizeof( CommandBuffer ) ) ); + Result result = static_cast(d.vkAllocateCommandBuffers( m_device, reinterpret_cast( &allocateInfo ), reinterpret_cast( buffer ) ) ); + + PoolFree deleter( *this, allocateInfo.commandPool, d ); + for ( size_t i=0 ; i + VULKAN_HPP_INLINE typename ResultValueType,Allocator>>::type Device::allocateCommandBuffersUnique( const CommandBufferAllocateInfo & allocateInfo, Allocator const& vectorAllocator, Dispatch const &d ) const + { + static_assert( sizeof( CommandBuffer ) <= sizeof( UniqueCommandBuffer ), "CommandBuffer is greater than UniqueCommandBuffer!" ); + std::vector commandBuffers; + commandBuffers.reserve( allocateInfo.commandBufferCount ); + CommandBuffer* buffer = reinterpret_cast( reinterpret_cast( commandBuffers.data() ) + allocateInfo.commandBufferCount * ( sizeof( UniqueCommandBuffer ) - sizeof( CommandBuffer ) ) ); + Result result = static_cast(d.vkAllocateCommandBuffers( m_device, reinterpret_cast( &allocateInfo ), reinterpret_cast( buffer ) ) ); + + PoolFree deleter( *this, allocateInfo.commandPool, d ); + for ( size_t i=0 ; i + VULKAN_HPP_INLINE void Device::freeCommandBuffers( CommandPool commandPool, uint32_t commandBufferCount, const CommandBuffer* pCommandBuffers, Dispatch const &d) const + { + d.vkFreeCommandBuffers( m_device, static_cast( commandPool ), commandBufferCount, reinterpret_cast( pCommandBuffers ) ); + } +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void Device::freeCommandBuffers( CommandPool commandPool, ArrayProxy commandBuffers, Dispatch const &d ) const + { + d.vkFreeCommandBuffers( m_device, static_cast( commandPool ), commandBuffers.size() , reinterpret_cast( commandBuffers.data() ) ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void Device::free( CommandPool commandPool, uint32_t commandBufferCount, const CommandBuffer* pCommandBuffers, Dispatch const &d) const + { + d.vkFreeCommandBuffers( m_device, static_cast( commandPool ), commandBufferCount, reinterpret_cast( pCommandBuffers ) ); + } +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void Device::free( CommandPool commandPool, ArrayProxy commandBuffers, Dispatch const &d ) const + { + d.vkFreeCommandBuffers( m_device, static_cast( commandPool ), commandBuffers.size() , reinterpret_cast( commandBuffers.data() ) ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE Result Device::createSharedSwapchainsKHR( uint32_t swapchainCount, const SwapchainCreateInfoKHR* pCreateInfos, const AllocationCallbacks* pAllocator, SwapchainKHR* pSwapchains, Dispatch const &d) const + { + return static_cast( d.vkCreateSharedSwapchainsKHR( m_device, swapchainCount, reinterpret_cast( pCreateInfos ), reinterpret_cast( pAllocator ), reinterpret_cast( pSwapchains ) ) ); + } +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE typename ResultValueType>::type Device::createSharedSwapchainsKHR( ArrayProxy createInfos, Optional allocator, Dispatch const &d ) const + { + std::vector swapchains( createInfos.size() ); + Result result = static_cast( d.vkCreateSharedSwapchainsKHR( m_device, createInfos.size() , reinterpret_cast( createInfos.data() ), reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( swapchains.data() ) ) ); + return createResultValue( result, swapchains, VULKAN_HPP_NAMESPACE_STRING"::Device::createSharedSwapchainsKHR" ); + } + template + VULKAN_HPP_INLINE typename ResultValueType>::type Device::createSharedSwapchainsKHR( ArrayProxy createInfos, Optional allocator, Allocator const& vectorAllocator, Dispatch const &d ) const + { + std::vector swapchains( createInfos.size(), vectorAllocator ); + Result result = static_cast( d.vkCreateSharedSwapchainsKHR( m_device, createInfos.size() , reinterpret_cast( createInfos.data() ), reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( swapchains.data() ) ) ); + return createResultValue( result, swapchains, VULKAN_HPP_NAMESPACE_STRING"::Device::createSharedSwapchainsKHR" ); + } + template + VULKAN_HPP_INLINE ResultValueType::type Device::createSharedSwapchainKHR( const SwapchainCreateInfoKHR & createInfo, Optional allocator, Dispatch const &d ) const + { + SwapchainKHR swapchain; + Result result = static_cast( d.vkCreateSharedSwapchainsKHR( m_device, 1 , reinterpret_cast( &createInfo ), reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( &swapchain ) ) ); + return createResultValue( result, swapchain, VULKAN_HPP_NAMESPACE_STRING"::Device::createSharedSwapchainKHR" ); + } +#ifndef VULKAN_HPP_NO_SMART_HANDLE + template + VULKAN_HPP_INLINE typename ResultValueType,Allocator>>::type Device::createSharedSwapchainsKHRUnique( ArrayProxy createInfos, Optional allocator, Dispatch const &d ) const + { + static_assert( sizeof( SwapchainKHR ) <= sizeof( UniqueSwapchainKHR ), "SwapchainKHR is greater than UniqueSwapchainKHR!" ); + std::vector swapchainKHRs; + swapchainKHRs.reserve( createInfos.size() ); + SwapchainKHR* buffer = reinterpret_cast( reinterpret_cast( swapchainKHRs.data() ) + createInfos.size() * ( sizeof( UniqueSwapchainKHR ) - sizeof( SwapchainKHR ) ) ); + Result result = static_cast(d.vkCreateSharedSwapchainsKHR( m_device, createInfos.size() , reinterpret_cast( createInfos.data() ), reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( buffer ) ) ); + + ObjectDestroy deleter( *this, allocator, d ); + for ( size_t i=0 ; i + VULKAN_HPP_INLINE typename ResultValueType,Allocator>>::type Device::createSharedSwapchainsKHRUnique( ArrayProxy createInfos, Optional allocator, Allocator const& vectorAllocator, Dispatch const &d ) const + { + static_assert( sizeof( SwapchainKHR ) <= sizeof( UniqueSwapchainKHR ), "SwapchainKHR is greater than UniqueSwapchainKHR!" ); + std::vector swapchainKHRs; + swapchainKHRs.reserve( createInfos.size() ); + SwapchainKHR* buffer = reinterpret_cast( reinterpret_cast( swapchainKHRs.data() ) + createInfos.size() * ( sizeof( UniqueSwapchainKHR ) - sizeof( SwapchainKHR ) ) ); + Result result = static_cast(d.vkCreateSharedSwapchainsKHR( m_device, createInfos.size() , reinterpret_cast( createInfos.data() ), reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( buffer ) ) ); + + ObjectDestroy deleter( *this, allocator, d ); + for ( size_t i=0 ; i + VULKAN_HPP_INLINE typename ResultValueType>::type Device::createSharedSwapchainKHRUnique( const SwapchainCreateInfoKHR & createInfo, Optional allocator, Dispatch const &d ) const + { + SwapchainKHR swapchain; + Result result = static_cast( d.vkCreateSharedSwapchainsKHR( m_device, 1 , reinterpret_cast( &createInfo ), reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( &swapchain ) ) ); + + ObjectDestroy deleter( *this, allocator, d ); + return createResultValue( result, swapchain, VULKAN_HPP_NAMESPACE_STRING"::Device::createSharedSwapchainKHRUnique", deleter ); + } +#endif /*VULKAN_HPP_NO_SMART_HANDLE*/ +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE Result Device::createSwapchainKHR( const SwapchainCreateInfoKHR* pCreateInfo, const AllocationCallbacks* pAllocator, SwapchainKHR* pSwapchain, Dispatch const &d) const + { + return static_cast( d.vkCreateSwapchainKHR( m_device, reinterpret_cast( pCreateInfo ), reinterpret_cast( pAllocator ), reinterpret_cast( pSwapchain ) ) ); + } +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE ResultValueType::type Device::createSwapchainKHR( const SwapchainCreateInfoKHR & createInfo, Optional allocator, Dispatch const &d ) const + { + SwapchainKHR swapchain; + Result result = static_cast( d.vkCreateSwapchainKHR( m_device, reinterpret_cast( &createInfo ), reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( &swapchain ) ) ); + return createResultValue( result, swapchain, VULKAN_HPP_NAMESPACE_STRING"::Device::createSwapchainKHR" ); + } +#ifndef VULKAN_HPP_NO_SMART_HANDLE + template + VULKAN_HPP_INLINE typename ResultValueType>::type Device::createSwapchainKHRUnique( const SwapchainCreateInfoKHR & createInfo, Optional allocator, Dispatch const &d ) const + { + SwapchainKHR swapchain; + Result result = static_cast( d.vkCreateSwapchainKHR( m_device, reinterpret_cast( &createInfo ), reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( &swapchain ) ) ); + + ObjectDestroy deleter( *this, allocator, d ); + return createResultValue( result, swapchain, VULKAN_HPP_NAMESPACE_STRING"::Device::createSwapchainKHRUnique", deleter ); + } +#endif /*VULKAN_HPP_NO_SMART_HANDLE*/ +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void Device::destroySwapchainKHR( SwapchainKHR swapchain, const AllocationCallbacks* pAllocator, Dispatch const &d) const + { + d.vkDestroySwapchainKHR( m_device, static_cast( swapchain ), reinterpret_cast( pAllocator ) ); + } +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void Device::destroySwapchainKHR( SwapchainKHR swapchain, Optional allocator, Dispatch const &d ) const + { + d.vkDestroySwapchainKHR( m_device, static_cast( swapchain ), reinterpret_cast( static_cast( allocator ) ) ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void Device::destroy( SwapchainKHR swapchain, const AllocationCallbacks* pAllocator, Dispatch const &d) const + { + d.vkDestroySwapchainKHR( m_device, static_cast( swapchain ), reinterpret_cast( pAllocator ) ); + } +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void Device::destroy( SwapchainKHR swapchain, Optional allocator, Dispatch const &d ) const + { + d.vkDestroySwapchainKHR( m_device, static_cast( swapchain ), reinterpret_cast( static_cast( allocator ) ) ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE Result Device::getSwapchainImagesKHR( SwapchainKHR swapchain, uint32_t* pSwapchainImageCount, Image* pSwapchainImages, Dispatch const &d) const + { + return static_cast( d.vkGetSwapchainImagesKHR( m_device, static_cast( swapchain ), pSwapchainImageCount, reinterpret_cast( pSwapchainImages ) ) ); + } +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE typename ResultValueType>::type Device::getSwapchainImagesKHR( SwapchainKHR swapchain, Dispatch const &d ) const + { + std::vector swapchainImages; + uint32_t swapchainImageCount; + Result result; + do + { + result = static_cast( d.vkGetSwapchainImagesKHR( m_device, static_cast( swapchain ), &swapchainImageCount, nullptr ) ); + if ( ( result == Result::eSuccess ) && swapchainImageCount ) + { + swapchainImages.resize( swapchainImageCount ); + result = static_cast( d.vkGetSwapchainImagesKHR( m_device, static_cast( swapchain ), &swapchainImageCount, reinterpret_cast( swapchainImages.data() ) ) ); + } + } while ( result == Result::eIncomplete ); + VULKAN_HPP_ASSERT( swapchainImageCount <= swapchainImages.size() ); + swapchainImages.resize( swapchainImageCount ); + return createResultValue( result, swapchainImages, VULKAN_HPP_NAMESPACE_STRING"::Device::getSwapchainImagesKHR" ); + } + template + VULKAN_HPP_INLINE typename ResultValueType>::type Device::getSwapchainImagesKHR( SwapchainKHR swapchain, Allocator const& vectorAllocator, Dispatch const &d ) const + { + std::vector swapchainImages( vectorAllocator ); + uint32_t swapchainImageCount; + Result result; + do + { + result = static_cast( d.vkGetSwapchainImagesKHR( m_device, static_cast( swapchain ), &swapchainImageCount, nullptr ) ); + if ( ( result == Result::eSuccess ) && swapchainImageCount ) + { + swapchainImages.resize( swapchainImageCount ); + result = static_cast( d.vkGetSwapchainImagesKHR( m_device, static_cast( swapchain ), &swapchainImageCount, reinterpret_cast( swapchainImages.data() ) ) ); + } + } while ( result == Result::eIncomplete ); + VULKAN_HPP_ASSERT( swapchainImageCount <= swapchainImages.size() ); + swapchainImages.resize( swapchainImageCount ); + return createResultValue( result, swapchainImages, VULKAN_HPP_NAMESPACE_STRING"::Device::getSwapchainImagesKHR" ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE Result Device::acquireNextImageKHR( SwapchainKHR swapchain, uint64_t timeout, Semaphore semaphore, Fence fence, uint32_t* pImageIndex, Dispatch const &d) const + { + return static_cast( d.vkAcquireNextImageKHR( m_device, static_cast( swapchain ), timeout, static_cast( semaphore ), static_cast( fence ), pImageIndex ) ); + } +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE ResultValue Device::acquireNextImageKHR( SwapchainKHR swapchain, uint64_t timeout, Semaphore semaphore, Fence fence, Dispatch const &d ) const + { + uint32_t imageIndex; + Result result = static_cast( d.vkAcquireNextImageKHR( m_device, static_cast( swapchain ), timeout, static_cast( semaphore ), static_cast( fence ), &imageIndex ) ); + return createResultValue( result, imageIndex, VULKAN_HPP_NAMESPACE_STRING"::Device::acquireNextImageKHR", { Result::eSuccess, Result::eTimeout, Result::eNotReady, Result::eSuboptimalKHR } ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE Result Device::debugMarkerSetObjectNameEXT( const DebugMarkerObjectNameInfoEXT* pNameInfo, Dispatch const &d) const + { + return static_cast( d.vkDebugMarkerSetObjectNameEXT( m_device, reinterpret_cast( pNameInfo ) ) ); + } +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE ResultValueType::type Device::debugMarkerSetObjectNameEXT( const DebugMarkerObjectNameInfoEXT & nameInfo, Dispatch const &d ) const + { + Result result = static_cast( d.vkDebugMarkerSetObjectNameEXT( m_device, reinterpret_cast( &nameInfo ) ) ); + return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING"::Device::debugMarkerSetObjectNameEXT" ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE Result Device::debugMarkerSetObjectTagEXT( const DebugMarkerObjectTagInfoEXT* pTagInfo, Dispatch const &d) const + { + return static_cast( d.vkDebugMarkerSetObjectTagEXT( m_device, reinterpret_cast( pTagInfo ) ) ); + } +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE ResultValueType::type Device::debugMarkerSetObjectTagEXT( const DebugMarkerObjectTagInfoEXT & tagInfo, Dispatch const &d ) const + { + Result result = static_cast( d.vkDebugMarkerSetObjectTagEXT( m_device, reinterpret_cast( &tagInfo ) ) ); + return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING"::Device::debugMarkerSetObjectTagEXT" ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + +#ifdef VK_USE_PLATFORM_WIN32_NV + template + VULKAN_HPP_INLINE Result Device::getMemoryWin32HandleNV( DeviceMemory memory, ExternalMemoryHandleTypeFlagsNV handleType, HANDLE* pHandle, Dispatch const &d) const + { + return static_cast( d.vkGetMemoryWin32HandleNV( m_device, static_cast( memory ), static_cast( handleType ), pHandle ) ); + } +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE ResultValueType::type Device::getMemoryWin32HandleNV( DeviceMemory memory, ExternalMemoryHandleTypeFlagsNV handleType, Dispatch const &d ) const + { + HANDLE handle; + Result result = static_cast( d.vkGetMemoryWin32HandleNV( m_device, static_cast( memory ), static_cast( handleType ), &handle ) ); + return createResultValue( result, handle, VULKAN_HPP_NAMESPACE_STRING"::Device::getMemoryWin32HandleNV" ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VK_USE_PLATFORM_WIN32_NV*/ + + template + VULKAN_HPP_INLINE Result Device::createIndirectCommandsLayoutNVX( const IndirectCommandsLayoutCreateInfoNVX* pCreateInfo, const AllocationCallbacks* pAllocator, IndirectCommandsLayoutNVX* pIndirectCommandsLayout, Dispatch const &d) const + { + return static_cast( d.vkCreateIndirectCommandsLayoutNVX( m_device, reinterpret_cast( pCreateInfo ), reinterpret_cast( pAllocator ), reinterpret_cast( pIndirectCommandsLayout ) ) ); + } +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE ResultValueType::type Device::createIndirectCommandsLayoutNVX( const IndirectCommandsLayoutCreateInfoNVX & createInfo, Optional allocator, Dispatch const &d ) const + { + IndirectCommandsLayoutNVX indirectCommandsLayout; + Result result = static_cast( d.vkCreateIndirectCommandsLayoutNVX( m_device, reinterpret_cast( &createInfo ), reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( &indirectCommandsLayout ) ) ); + return createResultValue( result, indirectCommandsLayout, VULKAN_HPP_NAMESPACE_STRING"::Device::createIndirectCommandsLayoutNVX" ); + } +#ifndef VULKAN_HPP_NO_SMART_HANDLE + template + VULKAN_HPP_INLINE typename ResultValueType>::type Device::createIndirectCommandsLayoutNVXUnique( const IndirectCommandsLayoutCreateInfoNVX & createInfo, Optional allocator, Dispatch const &d ) const + { + IndirectCommandsLayoutNVX indirectCommandsLayout; + Result result = static_cast( d.vkCreateIndirectCommandsLayoutNVX( m_device, reinterpret_cast( &createInfo ), reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( &indirectCommandsLayout ) ) ); + + ObjectDestroy deleter( *this, allocator, d ); + return createResultValue( result, indirectCommandsLayout, VULKAN_HPP_NAMESPACE_STRING"::Device::createIndirectCommandsLayoutNVXUnique", deleter ); + } +#endif /*VULKAN_HPP_NO_SMART_HANDLE*/ +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void Device::destroyIndirectCommandsLayoutNVX( IndirectCommandsLayoutNVX indirectCommandsLayout, const AllocationCallbacks* pAllocator, Dispatch const &d) const + { + d.vkDestroyIndirectCommandsLayoutNVX( m_device, static_cast( indirectCommandsLayout ), reinterpret_cast( pAllocator ) ); + } +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void Device::destroyIndirectCommandsLayoutNVX( IndirectCommandsLayoutNVX indirectCommandsLayout, Optional allocator, Dispatch const &d ) const + { + d.vkDestroyIndirectCommandsLayoutNVX( m_device, static_cast( indirectCommandsLayout ), reinterpret_cast( static_cast( allocator ) ) ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void Device::destroy( IndirectCommandsLayoutNVX indirectCommandsLayout, const AllocationCallbacks* pAllocator, Dispatch const &d) const + { + d.vkDestroyIndirectCommandsLayoutNVX( m_device, static_cast( indirectCommandsLayout ), reinterpret_cast( pAllocator ) ); + } +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void Device::destroy( IndirectCommandsLayoutNVX indirectCommandsLayout, Optional allocator, Dispatch const &d ) const + { + d.vkDestroyIndirectCommandsLayoutNVX( m_device, static_cast( indirectCommandsLayout ), reinterpret_cast( static_cast( allocator ) ) ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE Result Device::createObjectTableNVX( const ObjectTableCreateInfoNVX* pCreateInfo, const AllocationCallbacks* pAllocator, ObjectTableNVX* pObjectTable, Dispatch const &d) const + { + return static_cast( d.vkCreateObjectTableNVX( m_device, reinterpret_cast( pCreateInfo ), reinterpret_cast( pAllocator ), reinterpret_cast( pObjectTable ) ) ); + } +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE ResultValueType::type Device::createObjectTableNVX( const ObjectTableCreateInfoNVX & createInfo, Optional allocator, Dispatch const &d ) const + { + ObjectTableNVX objectTable; + Result result = static_cast( d.vkCreateObjectTableNVX( m_device, reinterpret_cast( &createInfo ), reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( &objectTable ) ) ); + return createResultValue( result, objectTable, VULKAN_HPP_NAMESPACE_STRING"::Device::createObjectTableNVX" ); + } +#ifndef VULKAN_HPP_NO_SMART_HANDLE + template + VULKAN_HPP_INLINE typename ResultValueType>::type Device::createObjectTableNVXUnique( const ObjectTableCreateInfoNVX & createInfo, Optional allocator, Dispatch const &d ) const + { + ObjectTableNVX objectTable; + Result result = static_cast( d.vkCreateObjectTableNVX( m_device, reinterpret_cast( &createInfo ), reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( &objectTable ) ) ); + + ObjectDestroy deleter( *this, allocator, d ); + return createResultValue( result, objectTable, VULKAN_HPP_NAMESPACE_STRING"::Device::createObjectTableNVXUnique", deleter ); + } +#endif /*VULKAN_HPP_NO_SMART_HANDLE*/ +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void Device::destroyObjectTableNVX( ObjectTableNVX objectTable, const AllocationCallbacks* pAllocator, Dispatch const &d) const + { + d.vkDestroyObjectTableNVX( m_device, static_cast( objectTable ), reinterpret_cast( pAllocator ) ); + } +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void Device::destroyObjectTableNVX( ObjectTableNVX objectTable, Optional allocator, Dispatch const &d ) const + { + d.vkDestroyObjectTableNVX( m_device, static_cast( objectTable ), reinterpret_cast( static_cast( allocator ) ) ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void Device::destroy( ObjectTableNVX objectTable, const AllocationCallbacks* pAllocator, Dispatch const &d) const + { + d.vkDestroyObjectTableNVX( m_device, static_cast( objectTable ), reinterpret_cast( pAllocator ) ); + } +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void Device::destroy( ObjectTableNVX objectTable, Optional allocator, Dispatch const &d ) const + { + d.vkDestroyObjectTableNVX( m_device, static_cast( objectTable ), reinterpret_cast( static_cast( allocator ) ) ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE Result Device::registerObjectsNVX( ObjectTableNVX objectTable, uint32_t objectCount, const ObjectTableEntryNVX* const* ppObjectTableEntries, const uint32_t* pObjectIndices, Dispatch const &d) const + { + return static_cast( d.vkRegisterObjectsNVX( m_device, static_cast( objectTable ), objectCount, reinterpret_cast( ppObjectTableEntries ), pObjectIndices ) ); + } +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE ResultValueType::type Device::registerObjectsNVX( ObjectTableNVX objectTable, ArrayProxy pObjectTableEntries, ArrayProxy objectIndices, Dispatch const &d ) const + { +#ifdef VULKAN_HPP_NO_EXCEPTIONS + VULKAN_HPP_ASSERT( pObjectTableEntries.size() == objectIndices.size() ); +#else + if ( pObjectTableEntries.size() != objectIndices.size() ) + { + throw LogicError( VULKAN_HPP_NAMESPACE_STRING "::Device::registerObjectsNVX: pObjectTableEntries.size() != objectIndices.size()" ); + } +#endif // VULKAN_HPP_NO_EXCEPTIONS + Result result = static_cast( d.vkRegisterObjectsNVX( m_device, static_cast( objectTable ), pObjectTableEntries.size() , reinterpret_cast( pObjectTableEntries.data() ), objectIndices.data() ) ); + return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING"::Device::registerObjectsNVX" ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE Result Device::unregisterObjectsNVX( ObjectTableNVX objectTable, uint32_t objectCount, const ObjectEntryTypeNVX* pObjectEntryTypes, const uint32_t* pObjectIndices, Dispatch const &d) const + { + return static_cast( d.vkUnregisterObjectsNVX( m_device, static_cast( objectTable ), objectCount, reinterpret_cast( pObjectEntryTypes ), pObjectIndices ) ); + } +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE ResultValueType::type Device::unregisterObjectsNVX( ObjectTableNVX objectTable, ArrayProxy objectEntryTypes, ArrayProxy objectIndices, Dispatch const &d ) const + { +#ifdef VULKAN_HPP_NO_EXCEPTIONS + VULKAN_HPP_ASSERT( objectEntryTypes.size() == objectIndices.size() ); +#else + if ( objectEntryTypes.size() != objectIndices.size() ) + { + throw LogicError( VULKAN_HPP_NAMESPACE_STRING "::Device::unregisterObjectsNVX: objectEntryTypes.size() != objectIndices.size()" ); + } +#endif // VULKAN_HPP_NO_EXCEPTIONS + Result result = static_cast( d.vkUnregisterObjectsNVX( m_device, static_cast( objectTable ), objectEntryTypes.size() , reinterpret_cast( objectEntryTypes.data() ), objectIndices.data() ) ); + return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING"::Device::unregisterObjectsNVX" ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + +#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void Device::trimCommandPool( CommandPool commandPool, CommandPoolTrimFlags flags, Dispatch const &d) const + { + d.vkTrimCommandPool( m_device, static_cast( commandPool ), static_cast( flags ) ); + } +#else + template + VULKAN_HPP_INLINE void Device::trimCommandPool( CommandPool commandPool, CommandPoolTrimFlags flags, Dispatch const &d ) const + { + d.vkTrimCommandPool( m_device, static_cast( commandPool ), static_cast( flags ) ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + +#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void Device::trimCommandPoolKHR( CommandPool commandPool, CommandPoolTrimFlags flags, Dispatch const &d) const + { + d.vkTrimCommandPoolKHR( m_device, static_cast( commandPool ), static_cast( flags ) ); + } +#else + template + VULKAN_HPP_INLINE void Device::trimCommandPoolKHR( CommandPool commandPool, CommandPoolTrimFlags flags, Dispatch const &d ) const + { + d.vkTrimCommandPoolKHR( m_device, static_cast( commandPool ), static_cast( flags ) ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + +#ifdef VK_USE_PLATFORM_WIN32_KHR + template + VULKAN_HPP_INLINE Result Device::getMemoryWin32HandleKHR( const MemoryGetWin32HandleInfoKHR* pGetWin32HandleInfo, HANDLE* pHandle, Dispatch const &d) const + { + return static_cast( d.vkGetMemoryWin32HandleKHR( m_device, reinterpret_cast( pGetWin32HandleInfo ), pHandle ) ); + } +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE ResultValueType::type Device::getMemoryWin32HandleKHR( const MemoryGetWin32HandleInfoKHR & getWin32HandleInfo, Dispatch const &d ) const + { + HANDLE handle; + Result result = static_cast( d.vkGetMemoryWin32HandleKHR( m_device, reinterpret_cast( &getWin32HandleInfo ), &handle ) ); + return createResultValue( result, handle, VULKAN_HPP_NAMESPACE_STRING"::Device::getMemoryWin32HandleKHR" ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VK_USE_PLATFORM_WIN32_KHR*/ + +#ifdef VK_USE_PLATFORM_WIN32_KHR + template + VULKAN_HPP_INLINE Result Device::getMemoryWin32HandlePropertiesKHR( ExternalMemoryHandleTypeFlagBits handleType, HANDLE handle, MemoryWin32HandlePropertiesKHR* pMemoryWin32HandleProperties, Dispatch const &d) const + { + return static_cast( d.vkGetMemoryWin32HandlePropertiesKHR( m_device, static_cast( handleType ), handle, reinterpret_cast( pMemoryWin32HandleProperties ) ) ); + } +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE ResultValueType::type Device::getMemoryWin32HandlePropertiesKHR( ExternalMemoryHandleTypeFlagBits handleType, HANDLE handle, Dispatch const &d ) const + { + MemoryWin32HandlePropertiesKHR memoryWin32HandleProperties; + Result result = static_cast( d.vkGetMemoryWin32HandlePropertiesKHR( m_device, static_cast( handleType ), handle, reinterpret_cast( &memoryWin32HandleProperties ) ) ); + return createResultValue( result, memoryWin32HandleProperties, VULKAN_HPP_NAMESPACE_STRING"::Device::getMemoryWin32HandlePropertiesKHR" ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VK_USE_PLATFORM_WIN32_KHR*/ + + template + VULKAN_HPP_INLINE Result Device::getMemoryFdKHR( const MemoryGetFdInfoKHR* pGetFdInfo, int* pFd, Dispatch const &d) const + { + return static_cast( d.vkGetMemoryFdKHR( m_device, reinterpret_cast( pGetFdInfo ), pFd ) ); + } +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE ResultValueType::type Device::getMemoryFdKHR( const MemoryGetFdInfoKHR & getFdInfo, Dispatch const &d ) const + { + int fd; + Result result = static_cast( d.vkGetMemoryFdKHR( m_device, reinterpret_cast( &getFdInfo ), &fd ) ); + return createResultValue( result, fd, VULKAN_HPP_NAMESPACE_STRING"::Device::getMemoryFdKHR" ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE Result Device::getMemoryFdPropertiesKHR( ExternalMemoryHandleTypeFlagBits handleType, int fd, MemoryFdPropertiesKHR* pMemoryFdProperties, Dispatch const &d) const + { + return static_cast( d.vkGetMemoryFdPropertiesKHR( m_device, static_cast( handleType ), fd, reinterpret_cast( pMemoryFdProperties ) ) ); + } +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE ResultValueType::type Device::getMemoryFdPropertiesKHR( ExternalMemoryHandleTypeFlagBits handleType, int fd, Dispatch const &d ) const + { + MemoryFdPropertiesKHR memoryFdProperties; + Result result = static_cast( d.vkGetMemoryFdPropertiesKHR( m_device, static_cast( handleType ), fd, reinterpret_cast( &memoryFdProperties ) ) ); + return createResultValue( result, memoryFdProperties, VULKAN_HPP_NAMESPACE_STRING"::Device::getMemoryFdPropertiesKHR" ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + +#ifdef VK_USE_PLATFORM_WIN32_KHR + template + VULKAN_HPP_INLINE Result Device::getSemaphoreWin32HandleKHR( const SemaphoreGetWin32HandleInfoKHR* pGetWin32HandleInfo, HANDLE* pHandle, Dispatch const &d) const + { + return static_cast( d.vkGetSemaphoreWin32HandleKHR( m_device, reinterpret_cast( pGetWin32HandleInfo ), pHandle ) ); + } +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE ResultValueType::type Device::getSemaphoreWin32HandleKHR( const SemaphoreGetWin32HandleInfoKHR & getWin32HandleInfo, Dispatch const &d ) const + { + HANDLE handle; + Result result = static_cast( d.vkGetSemaphoreWin32HandleKHR( m_device, reinterpret_cast( &getWin32HandleInfo ), &handle ) ); + return createResultValue( result, handle, VULKAN_HPP_NAMESPACE_STRING"::Device::getSemaphoreWin32HandleKHR" ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VK_USE_PLATFORM_WIN32_KHR*/ + +#ifdef VK_USE_PLATFORM_WIN32_KHR + template + VULKAN_HPP_INLINE Result Device::importSemaphoreWin32HandleKHR( const ImportSemaphoreWin32HandleInfoKHR* pImportSemaphoreWin32HandleInfo, Dispatch const &d) const + { + return static_cast( d.vkImportSemaphoreWin32HandleKHR( m_device, reinterpret_cast( pImportSemaphoreWin32HandleInfo ) ) ); + } +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE ResultValueType::type Device::importSemaphoreWin32HandleKHR( const ImportSemaphoreWin32HandleInfoKHR & importSemaphoreWin32HandleInfo, Dispatch const &d ) const + { + Result result = static_cast( d.vkImportSemaphoreWin32HandleKHR( m_device, reinterpret_cast( &importSemaphoreWin32HandleInfo ) ) ); + return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING"::Device::importSemaphoreWin32HandleKHR" ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VK_USE_PLATFORM_WIN32_KHR*/ + + template + VULKAN_HPP_INLINE Result Device::getSemaphoreFdKHR( const SemaphoreGetFdInfoKHR* pGetFdInfo, int* pFd, Dispatch const &d) const + { + return static_cast( d.vkGetSemaphoreFdKHR( m_device, reinterpret_cast( pGetFdInfo ), pFd ) ); + } +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE ResultValueType::type Device::getSemaphoreFdKHR( const SemaphoreGetFdInfoKHR & getFdInfo, Dispatch const &d ) const + { + int fd; + Result result = static_cast( d.vkGetSemaphoreFdKHR( m_device, reinterpret_cast( &getFdInfo ), &fd ) ); + return createResultValue( result, fd, VULKAN_HPP_NAMESPACE_STRING"::Device::getSemaphoreFdKHR" ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE Result Device::importSemaphoreFdKHR( const ImportSemaphoreFdInfoKHR* pImportSemaphoreFdInfo, Dispatch const &d) const + { + return static_cast( d.vkImportSemaphoreFdKHR( m_device, reinterpret_cast( pImportSemaphoreFdInfo ) ) ); + } +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE ResultValueType::type Device::importSemaphoreFdKHR( const ImportSemaphoreFdInfoKHR & importSemaphoreFdInfo, Dispatch const &d ) const + { + Result result = static_cast( d.vkImportSemaphoreFdKHR( m_device, reinterpret_cast( &importSemaphoreFdInfo ) ) ); + return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING"::Device::importSemaphoreFdKHR" ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + +#ifdef VK_USE_PLATFORM_WIN32_KHR + template + VULKAN_HPP_INLINE Result Device::getFenceWin32HandleKHR( const FenceGetWin32HandleInfoKHR* pGetWin32HandleInfo, HANDLE* pHandle, Dispatch const &d) const + { + return static_cast( d.vkGetFenceWin32HandleKHR( m_device, reinterpret_cast( pGetWin32HandleInfo ), pHandle ) ); + } +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE ResultValueType::type Device::getFenceWin32HandleKHR( const FenceGetWin32HandleInfoKHR & getWin32HandleInfo, Dispatch const &d ) const + { + HANDLE handle; + Result result = static_cast( d.vkGetFenceWin32HandleKHR( m_device, reinterpret_cast( &getWin32HandleInfo ), &handle ) ); + return createResultValue( result, handle, VULKAN_HPP_NAMESPACE_STRING"::Device::getFenceWin32HandleKHR" ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VK_USE_PLATFORM_WIN32_KHR*/ + +#ifdef VK_USE_PLATFORM_WIN32_KHR + template + VULKAN_HPP_INLINE Result Device::importFenceWin32HandleKHR( const ImportFenceWin32HandleInfoKHR* pImportFenceWin32HandleInfo, Dispatch const &d) const + { + return static_cast( d.vkImportFenceWin32HandleKHR( m_device, reinterpret_cast( pImportFenceWin32HandleInfo ) ) ); + } +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE ResultValueType::type Device::importFenceWin32HandleKHR( const ImportFenceWin32HandleInfoKHR & importFenceWin32HandleInfo, Dispatch const &d ) const + { + Result result = static_cast( d.vkImportFenceWin32HandleKHR( m_device, reinterpret_cast( &importFenceWin32HandleInfo ) ) ); + return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING"::Device::importFenceWin32HandleKHR" ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VK_USE_PLATFORM_WIN32_KHR*/ + + template + VULKAN_HPP_INLINE Result Device::getFenceFdKHR( const FenceGetFdInfoKHR* pGetFdInfo, int* pFd, Dispatch const &d) const + { + return static_cast( d.vkGetFenceFdKHR( m_device, reinterpret_cast( pGetFdInfo ), pFd ) ); + } +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE ResultValueType::type Device::getFenceFdKHR( const FenceGetFdInfoKHR & getFdInfo, Dispatch const &d ) const + { + int fd; + Result result = static_cast( d.vkGetFenceFdKHR( m_device, reinterpret_cast( &getFdInfo ), &fd ) ); + return createResultValue( result, fd, VULKAN_HPP_NAMESPACE_STRING"::Device::getFenceFdKHR" ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE Result Device::importFenceFdKHR( const ImportFenceFdInfoKHR* pImportFenceFdInfo, Dispatch const &d) const + { + return static_cast( d.vkImportFenceFdKHR( m_device, reinterpret_cast( pImportFenceFdInfo ) ) ); + } +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE ResultValueType::type Device::importFenceFdKHR( const ImportFenceFdInfoKHR & importFenceFdInfo, Dispatch const &d ) const + { + Result result = static_cast( d.vkImportFenceFdKHR( m_device, reinterpret_cast( &importFenceFdInfo ) ) ); + return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING"::Device::importFenceFdKHR" ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE Result Device::displayPowerControlEXT( DisplayKHR display, const DisplayPowerInfoEXT* pDisplayPowerInfo, Dispatch const &d) const + { + return static_cast( d.vkDisplayPowerControlEXT( m_device, static_cast( display ), reinterpret_cast( pDisplayPowerInfo ) ) ); + } +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE ResultValueType::type Device::displayPowerControlEXT( DisplayKHR display, const DisplayPowerInfoEXT & displayPowerInfo, Dispatch const &d ) const + { + Result result = static_cast( d.vkDisplayPowerControlEXT( m_device, static_cast( display ), reinterpret_cast( &displayPowerInfo ) ) ); + return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING"::Device::displayPowerControlEXT" ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE Result Device::registerEventEXT( const DeviceEventInfoEXT* pDeviceEventInfo, const AllocationCallbacks* pAllocator, Fence* pFence, Dispatch const &d) const + { + return static_cast( d.vkRegisterDeviceEventEXT( m_device, reinterpret_cast( pDeviceEventInfo ), reinterpret_cast( pAllocator ), reinterpret_cast( pFence ) ) ); + } +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE ResultValueType::type Device::registerEventEXT( const DeviceEventInfoEXT & deviceEventInfo, Optional allocator, Dispatch const &d ) const + { + Fence fence; + Result result = static_cast( d.vkRegisterDeviceEventEXT( m_device, reinterpret_cast( &deviceEventInfo ), reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( &fence ) ) ); + return createResultValue( result, fence, VULKAN_HPP_NAMESPACE_STRING"::Device::registerEventEXT" ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE Result Device::registerDisplayEventEXT( DisplayKHR display, const DisplayEventInfoEXT* pDisplayEventInfo, const AllocationCallbacks* pAllocator, Fence* pFence, Dispatch const &d) const + { + return static_cast( d.vkRegisterDisplayEventEXT( m_device, static_cast( display ), reinterpret_cast( pDisplayEventInfo ), reinterpret_cast( pAllocator ), reinterpret_cast( pFence ) ) ); + } +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE ResultValueType::type Device::registerDisplayEventEXT( DisplayKHR display, const DisplayEventInfoEXT & displayEventInfo, Optional allocator, Dispatch const &d ) const + { + Fence fence; + Result result = static_cast( d.vkRegisterDisplayEventEXT( m_device, static_cast( display ), reinterpret_cast( &displayEventInfo ), reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( &fence ) ) ); + return createResultValue( result, fence, VULKAN_HPP_NAMESPACE_STRING"::Device::registerDisplayEventEXT" ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE Result Device::getSwapchainCounterEXT( SwapchainKHR swapchain, SurfaceCounterFlagBitsEXT counter, uint64_t* pCounterValue, Dispatch const &d) const + { + return static_cast( d.vkGetSwapchainCounterEXT( m_device, static_cast( swapchain ), static_cast( counter ), pCounterValue ) ); + } +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE ResultValueType::type Device::getSwapchainCounterEXT( SwapchainKHR swapchain, SurfaceCounterFlagBitsEXT counter, Dispatch const &d ) const + { + uint64_t counterValue; + Result result = static_cast( d.vkGetSwapchainCounterEXT( m_device, static_cast( swapchain ), static_cast( counter ), &counterValue ) ); + return createResultValue( result, counterValue, VULKAN_HPP_NAMESPACE_STRING"::Device::getSwapchainCounterEXT" ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void Device::getGroupPeerMemoryFeatures( uint32_t heapIndex, uint32_t localDeviceIndex, uint32_t remoteDeviceIndex, PeerMemoryFeatureFlags* pPeerMemoryFeatures, Dispatch const &d) const + { + d.vkGetDeviceGroupPeerMemoryFeatures( m_device, heapIndex, localDeviceIndex, remoteDeviceIndex, reinterpret_cast( pPeerMemoryFeatures ) ); + } +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE PeerMemoryFeatureFlags Device::getGroupPeerMemoryFeatures( uint32_t heapIndex, uint32_t localDeviceIndex, uint32_t remoteDeviceIndex, Dispatch const &d ) const + { + PeerMemoryFeatureFlags peerMemoryFeatures; + d.vkGetDeviceGroupPeerMemoryFeatures( m_device, heapIndex, localDeviceIndex, remoteDeviceIndex, reinterpret_cast( &peerMemoryFeatures ) ); + return peerMemoryFeatures; + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void Device::getGroupPeerMemoryFeaturesKHR( uint32_t heapIndex, uint32_t localDeviceIndex, uint32_t remoteDeviceIndex, PeerMemoryFeatureFlags* pPeerMemoryFeatures, Dispatch const &d) const + { + d.vkGetDeviceGroupPeerMemoryFeaturesKHR( m_device, heapIndex, localDeviceIndex, remoteDeviceIndex, reinterpret_cast( pPeerMemoryFeatures ) ); + } +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE PeerMemoryFeatureFlags Device::getGroupPeerMemoryFeaturesKHR( uint32_t heapIndex, uint32_t localDeviceIndex, uint32_t remoteDeviceIndex, Dispatch const &d ) const + { + PeerMemoryFeatureFlags peerMemoryFeatures; + d.vkGetDeviceGroupPeerMemoryFeaturesKHR( m_device, heapIndex, localDeviceIndex, remoteDeviceIndex, reinterpret_cast( &peerMemoryFeatures ) ); + return peerMemoryFeatures; + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE Result Device::bindBufferMemory2( uint32_t bindInfoCount, const BindBufferMemoryInfo* pBindInfos, Dispatch const &d) const + { + return static_cast( d.vkBindBufferMemory2( m_device, bindInfoCount, reinterpret_cast( pBindInfos ) ) ); + } +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE ResultValueType::type Device::bindBufferMemory2( ArrayProxy bindInfos, Dispatch const &d ) const + { + Result result = static_cast( d.vkBindBufferMemory2( m_device, bindInfos.size() , reinterpret_cast( bindInfos.data() ) ) ); + return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING"::Device::bindBufferMemory2" ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE Result Device::bindBufferMemory2KHR( uint32_t bindInfoCount, const BindBufferMemoryInfo* pBindInfos, Dispatch const &d) const + { + return static_cast( d.vkBindBufferMemory2KHR( m_device, bindInfoCount, reinterpret_cast( pBindInfos ) ) ); + } +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE ResultValueType::type Device::bindBufferMemory2KHR( ArrayProxy bindInfos, Dispatch const &d ) const + { + Result result = static_cast( d.vkBindBufferMemory2KHR( m_device, bindInfos.size() , reinterpret_cast( bindInfos.data() ) ) ); + return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING"::Device::bindBufferMemory2KHR" ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE Result Device::bindImageMemory2( uint32_t bindInfoCount, const BindImageMemoryInfo* pBindInfos, Dispatch const &d) const + { + return static_cast( d.vkBindImageMemory2( m_device, bindInfoCount, reinterpret_cast( pBindInfos ) ) ); + } +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE ResultValueType::type Device::bindImageMemory2( ArrayProxy bindInfos, Dispatch const &d ) const + { + Result result = static_cast( d.vkBindImageMemory2( m_device, bindInfos.size() , reinterpret_cast( bindInfos.data() ) ) ); + return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING"::Device::bindImageMemory2" ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE Result Device::bindImageMemory2KHR( uint32_t bindInfoCount, const BindImageMemoryInfo* pBindInfos, Dispatch const &d) const + { + return static_cast( d.vkBindImageMemory2KHR( m_device, bindInfoCount, reinterpret_cast( pBindInfos ) ) ); + } +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE ResultValueType::type Device::bindImageMemory2KHR( ArrayProxy bindInfos, Dispatch const &d ) const + { + Result result = static_cast( d.vkBindImageMemory2KHR( m_device, bindInfos.size() , reinterpret_cast( bindInfos.data() ) ) ); + return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING"::Device::bindImageMemory2KHR" ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE Result Device::getGroupPresentCapabilitiesKHR( DeviceGroupPresentCapabilitiesKHR* pDeviceGroupPresentCapabilities, Dispatch const &d) const + { + return static_cast( d.vkGetDeviceGroupPresentCapabilitiesKHR( m_device, reinterpret_cast( pDeviceGroupPresentCapabilities ) ) ); + } +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE ResultValueType::type Device::getGroupPresentCapabilitiesKHR(Dispatch const &d ) const + { + DeviceGroupPresentCapabilitiesKHR deviceGroupPresentCapabilities; + Result result = static_cast( d.vkGetDeviceGroupPresentCapabilitiesKHR( m_device, reinterpret_cast( &deviceGroupPresentCapabilities ) ) ); + return createResultValue( result, deviceGroupPresentCapabilities, VULKAN_HPP_NAMESPACE_STRING"::Device::getGroupPresentCapabilitiesKHR" ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE Result Device::getGroupSurfacePresentModesKHR( SurfaceKHR surface, DeviceGroupPresentModeFlagsKHR* pModes, Dispatch const &d) const + { + return static_cast( d.vkGetDeviceGroupSurfacePresentModesKHR( m_device, static_cast( surface ), reinterpret_cast( pModes ) ) ); + } +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE ResultValueType::type Device::getGroupSurfacePresentModesKHR( SurfaceKHR surface, Dispatch const &d ) const + { + DeviceGroupPresentModeFlagsKHR modes; + Result result = static_cast( d.vkGetDeviceGroupSurfacePresentModesKHR( m_device, static_cast( surface ), reinterpret_cast( &modes ) ) ); + return createResultValue( result, modes, VULKAN_HPP_NAMESPACE_STRING"::Device::getGroupSurfacePresentModesKHR" ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE Result Device::acquireNextImage2KHR( const AcquireNextImageInfoKHR* pAcquireInfo, uint32_t* pImageIndex, Dispatch const &d) const + { + return static_cast( d.vkAcquireNextImage2KHR( m_device, reinterpret_cast( pAcquireInfo ), pImageIndex ) ); + } +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE ResultValue Device::acquireNextImage2KHR( const AcquireNextImageInfoKHR & acquireInfo, Dispatch const &d ) const + { + uint32_t imageIndex; + Result result = static_cast( d.vkAcquireNextImage2KHR( m_device, reinterpret_cast( &acquireInfo ), &imageIndex ) ); + return createResultValue( result, imageIndex, VULKAN_HPP_NAMESPACE_STRING"::Device::acquireNextImage2KHR", { Result::eSuccess, Result::eTimeout, Result::eNotReady, Result::eSuboptimalKHR } ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE Result Device::createDescriptorUpdateTemplate( const DescriptorUpdateTemplateCreateInfo* pCreateInfo, const AllocationCallbacks* pAllocator, DescriptorUpdateTemplate* pDescriptorUpdateTemplate, Dispatch const &d) const + { + return static_cast( d.vkCreateDescriptorUpdateTemplate( m_device, reinterpret_cast( pCreateInfo ), reinterpret_cast( pAllocator ), reinterpret_cast( pDescriptorUpdateTemplate ) ) ); + } +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE ResultValueType::type Device::createDescriptorUpdateTemplate( const DescriptorUpdateTemplateCreateInfo & createInfo, Optional allocator, Dispatch const &d ) const + { + DescriptorUpdateTemplate descriptorUpdateTemplate; + Result result = static_cast( d.vkCreateDescriptorUpdateTemplate( m_device, reinterpret_cast( &createInfo ), reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( &descriptorUpdateTemplate ) ) ); + return createResultValue( result, descriptorUpdateTemplate, VULKAN_HPP_NAMESPACE_STRING"::Device::createDescriptorUpdateTemplate" ); + } +#ifndef VULKAN_HPP_NO_SMART_HANDLE + template + VULKAN_HPP_INLINE typename ResultValueType>::type Device::createDescriptorUpdateTemplateUnique( const DescriptorUpdateTemplateCreateInfo & createInfo, Optional allocator, Dispatch const &d ) const + { + DescriptorUpdateTemplate descriptorUpdateTemplate; + Result result = static_cast( d.vkCreateDescriptorUpdateTemplate( m_device, reinterpret_cast( &createInfo ), reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( &descriptorUpdateTemplate ) ) ); + + ObjectDestroy deleter( *this, allocator, d ); + return createResultValue( result, descriptorUpdateTemplate, VULKAN_HPP_NAMESPACE_STRING"::Device::createDescriptorUpdateTemplateUnique", deleter ); + } +#endif /*VULKAN_HPP_NO_SMART_HANDLE*/ +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE Result Device::createDescriptorUpdateTemplateKHR( const DescriptorUpdateTemplateCreateInfo* pCreateInfo, const AllocationCallbacks* pAllocator, DescriptorUpdateTemplate* pDescriptorUpdateTemplate, Dispatch const &d) const + { + return static_cast( d.vkCreateDescriptorUpdateTemplateKHR( m_device, reinterpret_cast( pCreateInfo ), reinterpret_cast( pAllocator ), reinterpret_cast( pDescriptorUpdateTemplate ) ) ); + } +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE ResultValueType::type Device::createDescriptorUpdateTemplateKHR( const DescriptorUpdateTemplateCreateInfo & createInfo, Optional allocator, Dispatch const &d ) const + { + DescriptorUpdateTemplate descriptorUpdateTemplate; + Result result = static_cast( d.vkCreateDescriptorUpdateTemplateKHR( m_device, reinterpret_cast( &createInfo ), reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( &descriptorUpdateTemplate ) ) ); + return createResultValue( result, descriptorUpdateTemplate, VULKAN_HPP_NAMESPACE_STRING"::Device::createDescriptorUpdateTemplateKHR" ); + } +#ifndef VULKAN_HPP_NO_SMART_HANDLE + template + VULKAN_HPP_INLINE typename ResultValueType>::type Device::createDescriptorUpdateTemplateKHRUnique( const DescriptorUpdateTemplateCreateInfo & createInfo, Optional allocator, Dispatch const &d ) const + { + DescriptorUpdateTemplate descriptorUpdateTemplate; + Result result = static_cast( d.vkCreateDescriptorUpdateTemplateKHR( m_device, reinterpret_cast( &createInfo ), reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( &descriptorUpdateTemplate ) ) ); + + ObjectDestroy deleter( *this, allocator, d ); + return createResultValue( result, descriptorUpdateTemplate, VULKAN_HPP_NAMESPACE_STRING"::Device::createDescriptorUpdateTemplateKHRUnique", deleter ); + } +#endif /*VULKAN_HPP_NO_SMART_HANDLE*/ +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void Device::destroyDescriptorUpdateTemplate( DescriptorUpdateTemplate descriptorUpdateTemplate, const AllocationCallbacks* pAllocator, Dispatch const &d) const + { + d.vkDestroyDescriptorUpdateTemplate( m_device, static_cast( descriptorUpdateTemplate ), reinterpret_cast( pAllocator ) ); + } +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void Device::destroyDescriptorUpdateTemplate( DescriptorUpdateTemplate descriptorUpdateTemplate, Optional allocator, Dispatch const &d ) const + { + d.vkDestroyDescriptorUpdateTemplate( m_device, static_cast( descriptorUpdateTemplate ), reinterpret_cast( static_cast( allocator ) ) ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void Device::destroy( DescriptorUpdateTemplate descriptorUpdateTemplate, const AllocationCallbacks* pAllocator, Dispatch const &d) const + { + d.vkDestroyDescriptorUpdateTemplate( m_device, static_cast( descriptorUpdateTemplate ), reinterpret_cast( pAllocator ) ); + } +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void Device::destroy( DescriptorUpdateTemplate descriptorUpdateTemplate, Optional allocator, Dispatch const &d ) const + { + d.vkDestroyDescriptorUpdateTemplate( m_device, static_cast( descriptorUpdateTemplate ), reinterpret_cast( static_cast( allocator ) ) ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void Device::destroyDescriptorUpdateTemplateKHR( DescriptorUpdateTemplate descriptorUpdateTemplate, const AllocationCallbacks* pAllocator, Dispatch const &d) const + { + d.vkDestroyDescriptorUpdateTemplateKHR( m_device, static_cast( descriptorUpdateTemplate ), reinterpret_cast( pAllocator ) ); + } +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void Device::destroyDescriptorUpdateTemplateKHR( DescriptorUpdateTemplate descriptorUpdateTemplate, Optional allocator, Dispatch const &d ) const + { + d.vkDestroyDescriptorUpdateTemplateKHR( m_device, static_cast( descriptorUpdateTemplate ), reinterpret_cast( static_cast( allocator ) ) ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + +#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void Device::updateDescriptorSetWithTemplate( DescriptorSet descriptorSet, DescriptorUpdateTemplate descriptorUpdateTemplate, const void* pData, Dispatch const &d) const + { + d.vkUpdateDescriptorSetWithTemplate( m_device, static_cast( descriptorSet ), static_cast( descriptorUpdateTemplate ), pData ); + } +#else + template + VULKAN_HPP_INLINE void Device::updateDescriptorSetWithTemplate( DescriptorSet descriptorSet, DescriptorUpdateTemplate descriptorUpdateTemplate, const void* pData, Dispatch const &d ) const + { + d.vkUpdateDescriptorSetWithTemplate( m_device, static_cast( descriptorSet ), static_cast( descriptorUpdateTemplate ), pData ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + +#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void Device::updateDescriptorSetWithTemplateKHR( DescriptorSet descriptorSet, DescriptorUpdateTemplate descriptorUpdateTemplate, const void* pData, Dispatch const &d) const + { + d.vkUpdateDescriptorSetWithTemplateKHR( m_device, static_cast( descriptorSet ), static_cast( descriptorUpdateTemplate ), pData ); + } +#else + template + VULKAN_HPP_INLINE void Device::updateDescriptorSetWithTemplateKHR( DescriptorSet descriptorSet, DescriptorUpdateTemplate descriptorUpdateTemplate, const void* pData, Dispatch const &d ) const + { + d.vkUpdateDescriptorSetWithTemplateKHR( m_device, static_cast( descriptorSet ), static_cast( descriptorUpdateTemplate ), pData ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void Device::setHdrMetadataEXT( uint32_t swapchainCount, const SwapchainKHR* pSwapchains, const HdrMetadataEXT* pMetadata, Dispatch const &d) const + { + d.vkSetHdrMetadataEXT( m_device, swapchainCount, reinterpret_cast( pSwapchains ), reinterpret_cast( pMetadata ) ); + } +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void Device::setHdrMetadataEXT( ArrayProxy swapchains, ArrayProxy metadata, Dispatch const &d ) const + { +#ifdef VULKAN_HPP_NO_EXCEPTIONS + VULKAN_HPP_ASSERT( swapchains.size() == metadata.size() ); +#else + if ( swapchains.size() != metadata.size() ) + { + throw LogicError( VULKAN_HPP_NAMESPACE_STRING "::Device::setHdrMetadataEXT: swapchains.size() != metadata.size()" ); + } +#endif // VULKAN_HPP_NO_EXCEPTIONS + d.vkSetHdrMetadataEXT( m_device, swapchains.size() , reinterpret_cast( swapchains.data() ), reinterpret_cast( metadata.data() ) ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + +#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE Result Device::getSwapchainStatusKHR( SwapchainKHR swapchain, Dispatch const &d) const + { + return static_cast( d.vkGetSwapchainStatusKHR( m_device, static_cast( swapchain ) ) ); + } +#else + template + VULKAN_HPP_INLINE Result Device::getSwapchainStatusKHR( SwapchainKHR swapchain, Dispatch const &d ) const + { + Result result = static_cast( d.vkGetSwapchainStatusKHR( m_device, static_cast( swapchain ) ) ); + return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING"::Device::getSwapchainStatusKHR", { Result::eSuccess, Result::eSuboptimalKHR } ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE Result Device::getRefreshCycleDurationGOOGLE( SwapchainKHR swapchain, RefreshCycleDurationGOOGLE* pDisplayTimingProperties, Dispatch const &d) const + { + return static_cast( d.vkGetRefreshCycleDurationGOOGLE( m_device, static_cast( swapchain ), reinterpret_cast( pDisplayTimingProperties ) ) ); + } +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE ResultValueType::type Device::getRefreshCycleDurationGOOGLE( SwapchainKHR swapchain, Dispatch const &d ) const + { + RefreshCycleDurationGOOGLE displayTimingProperties; + Result result = static_cast( d.vkGetRefreshCycleDurationGOOGLE( m_device, static_cast( swapchain ), reinterpret_cast( &displayTimingProperties ) ) ); + return createResultValue( result, displayTimingProperties, VULKAN_HPP_NAMESPACE_STRING"::Device::getRefreshCycleDurationGOOGLE" ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE Result Device::getPastPresentationTimingGOOGLE( SwapchainKHR swapchain, uint32_t* pPresentationTimingCount, PastPresentationTimingGOOGLE* pPresentationTimings, Dispatch const &d) const + { + return static_cast( d.vkGetPastPresentationTimingGOOGLE( m_device, static_cast( swapchain ), pPresentationTimingCount, reinterpret_cast( pPresentationTimings ) ) ); + } +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE typename ResultValueType>::type Device::getPastPresentationTimingGOOGLE( SwapchainKHR swapchain, Dispatch const &d ) const + { + std::vector presentationTimings; + uint32_t presentationTimingCount; + Result result; + do + { + result = static_cast( d.vkGetPastPresentationTimingGOOGLE( m_device, static_cast( swapchain ), &presentationTimingCount, nullptr ) ); + if ( ( result == Result::eSuccess ) && presentationTimingCount ) + { + presentationTimings.resize( presentationTimingCount ); + result = static_cast( d.vkGetPastPresentationTimingGOOGLE( m_device, static_cast( swapchain ), &presentationTimingCount, reinterpret_cast( presentationTimings.data() ) ) ); + } + } while ( result == Result::eIncomplete ); + VULKAN_HPP_ASSERT( presentationTimingCount <= presentationTimings.size() ); + presentationTimings.resize( presentationTimingCount ); + return createResultValue( result, presentationTimings, VULKAN_HPP_NAMESPACE_STRING"::Device::getPastPresentationTimingGOOGLE" ); + } + template + VULKAN_HPP_INLINE typename ResultValueType>::type Device::getPastPresentationTimingGOOGLE( SwapchainKHR swapchain, Allocator const& vectorAllocator, Dispatch const &d ) const + { + std::vector presentationTimings( vectorAllocator ); + uint32_t presentationTimingCount; + Result result; + do + { + result = static_cast( d.vkGetPastPresentationTimingGOOGLE( m_device, static_cast( swapchain ), &presentationTimingCount, nullptr ) ); + if ( ( result == Result::eSuccess ) && presentationTimingCount ) + { + presentationTimings.resize( presentationTimingCount ); + result = static_cast( d.vkGetPastPresentationTimingGOOGLE( m_device, static_cast( swapchain ), &presentationTimingCount, reinterpret_cast( presentationTimings.data() ) ) ); + } + } while ( result == Result::eIncomplete ); + VULKAN_HPP_ASSERT( presentationTimingCount <= presentationTimings.size() ); + presentationTimings.resize( presentationTimingCount ); + return createResultValue( result, presentationTimings, VULKAN_HPP_NAMESPACE_STRING"::Device::getPastPresentationTimingGOOGLE" ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void Device::getBufferMemoryRequirements2( const BufferMemoryRequirementsInfo2* pInfo, MemoryRequirements2* pMemoryRequirements, Dispatch const &d) const + { + d.vkGetBufferMemoryRequirements2( m_device, reinterpret_cast( pInfo ), reinterpret_cast( pMemoryRequirements ) ); + } +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE MemoryRequirements2 Device::getBufferMemoryRequirements2( const BufferMemoryRequirementsInfo2 & info, Dispatch const &d ) const + { + MemoryRequirements2 memoryRequirements; + d.vkGetBufferMemoryRequirements2( m_device, reinterpret_cast( &info ), reinterpret_cast( &memoryRequirements ) ); + return memoryRequirements; + } + template + VULKAN_HPP_INLINE StructureChain Device::getBufferMemoryRequirements2( const BufferMemoryRequirementsInfo2 & info, Dispatch const &d ) const + { + StructureChain structureChain; + MemoryRequirements2& memoryRequirements = structureChain.template get(); + d.vkGetBufferMemoryRequirements2( m_device, reinterpret_cast( &info ), reinterpret_cast( &memoryRequirements ) ); + return structureChain; + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void Device::getBufferMemoryRequirements2KHR( const BufferMemoryRequirementsInfo2* pInfo, MemoryRequirements2* pMemoryRequirements, Dispatch const &d) const + { + d.vkGetBufferMemoryRequirements2KHR( m_device, reinterpret_cast( pInfo ), reinterpret_cast( pMemoryRequirements ) ); + } +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE MemoryRequirements2 Device::getBufferMemoryRequirements2KHR( const BufferMemoryRequirementsInfo2 & info, Dispatch const &d ) const + { + MemoryRequirements2 memoryRequirements; + d.vkGetBufferMemoryRequirements2KHR( m_device, reinterpret_cast( &info ), reinterpret_cast( &memoryRequirements ) ); + return memoryRequirements; + } + template + VULKAN_HPP_INLINE StructureChain Device::getBufferMemoryRequirements2KHR( const BufferMemoryRequirementsInfo2 & info, Dispatch const &d ) const + { + StructureChain structureChain; + MemoryRequirements2& memoryRequirements = structureChain.template get(); + d.vkGetBufferMemoryRequirements2KHR( m_device, reinterpret_cast( &info ), reinterpret_cast( &memoryRequirements ) ); + return structureChain; + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void Device::getImageMemoryRequirements2( const ImageMemoryRequirementsInfo2* pInfo, MemoryRequirements2* pMemoryRequirements, Dispatch const &d) const + { + d.vkGetImageMemoryRequirements2( m_device, reinterpret_cast( pInfo ), reinterpret_cast( pMemoryRequirements ) ); + } +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE MemoryRequirements2 Device::getImageMemoryRequirements2( const ImageMemoryRequirementsInfo2 & info, Dispatch const &d ) const + { + MemoryRequirements2 memoryRequirements; + d.vkGetImageMemoryRequirements2( m_device, reinterpret_cast( &info ), reinterpret_cast( &memoryRequirements ) ); + return memoryRequirements; + } + template + VULKAN_HPP_INLINE StructureChain Device::getImageMemoryRequirements2( const ImageMemoryRequirementsInfo2 & info, Dispatch const &d ) const + { + StructureChain structureChain; + MemoryRequirements2& memoryRequirements = structureChain.template get(); + d.vkGetImageMemoryRequirements2( m_device, reinterpret_cast( &info ), reinterpret_cast( &memoryRequirements ) ); + return structureChain; + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void Device::getImageMemoryRequirements2KHR( const ImageMemoryRequirementsInfo2* pInfo, MemoryRequirements2* pMemoryRequirements, Dispatch const &d) const + { + d.vkGetImageMemoryRequirements2KHR( m_device, reinterpret_cast( pInfo ), reinterpret_cast( pMemoryRequirements ) ); + } +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE MemoryRequirements2 Device::getImageMemoryRequirements2KHR( const ImageMemoryRequirementsInfo2 & info, Dispatch const &d ) const + { + MemoryRequirements2 memoryRequirements; + d.vkGetImageMemoryRequirements2KHR( m_device, reinterpret_cast( &info ), reinterpret_cast( &memoryRequirements ) ); + return memoryRequirements; + } + template + VULKAN_HPP_INLINE StructureChain Device::getImageMemoryRequirements2KHR( const ImageMemoryRequirementsInfo2 & info, Dispatch const &d ) const + { + StructureChain structureChain; + MemoryRequirements2& memoryRequirements = structureChain.template get(); + d.vkGetImageMemoryRequirements2KHR( m_device, reinterpret_cast( &info ), reinterpret_cast( &memoryRequirements ) ); + return structureChain; + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void Device::getImageSparseMemoryRequirements2( const ImageSparseMemoryRequirementsInfo2* pInfo, uint32_t* pSparseMemoryRequirementCount, SparseImageMemoryRequirements2* pSparseMemoryRequirements, Dispatch const &d) const + { + d.vkGetImageSparseMemoryRequirements2( m_device, reinterpret_cast( pInfo ), pSparseMemoryRequirementCount, reinterpret_cast( pSparseMemoryRequirements ) ); + } +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE std::vector Device::getImageSparseMemoryRequirements2( const ImageSparseMemoryRequirementsInfo2 & info, Dispatch const &d ) const + { + std::vector sparseMemoryRequirements; + uint32_t sparseMemoryRequirementCount; + d.vkGetImageSparseMemoryRequirements2( m_device, reinterpret_cast( &info ), &sparseMemoryRequirementCount, nullptr ); + sparseMemoryRequirements.resize( sparseMemoryRequirementCount ); + d.vkGetImageSparseMemoryRequirements2( m_device, reinterpret_cast( &info ), &sparseMemoryRequirementCount, reinterpret_cast( sparseMemoryRequirements.data() ) ); + return sparseMemoryRequirements; + } + template + VULKAN_HPP_INLINE std::vector Device::getImageSparseMemoryRequirements2( const ImageSparseMemoryRequirementsInfo2 & info, Allocator const& vectorAllocator, Dispatch const &d ) const + { + std::vector sparseMemoryRequirements( vectorAllocator ); + uint32_t sparseMemoryRequirementCount; + d.vkGetImageSparseMemoryRequirements2( m_device, reinterpret_cast( &info ), &sparseMemoryRequirementCount, nullptr ); + sparseMemoryRequirements.resize( sparseMemoryRequirementCount ); + d.vkGetImageSparseMemoryRequirements2( m_device, reinterpret_cast( &info ), &sparseMemoryRequirementCount, reinterpret_cast( sparseMemoryRequirements.data() ) ); + return sparseMemoryRequirements; + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void Device::getImageSparseMemoryRequirements2KHR( const ImageSparseMemoryRequirementsInfo2* pInfo, uint32_t* pSparseMemoryRequirementCount, SparseImageMemoryRequirements2* pSparseMemoryRequirements, Dispatch const &d) const + { + d.vkGetImageSparseMemoryRequirements2KHR( m_device, reinterpret_cast( pInfo ), pSparseMemoryRequirementCount, reinterpret_cast( pSparseMemoryRequirements ) ); + } +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE std::vector Device::getImageSparseMemoryRequirements2KHR( const ImageSparseMemoryRequirementsInfo2 & info, Dispatch const &d ) const + { + std::vector sparseMemoryRequirements; + uint32_t sparseMemoryRequirementCount; + d.vkGetImageSparseMemoryRequirements2KHR( m_device, reinterpret_cast( &info ), &sparseMemoryRequirementCount, nullptr ); + sparseMemoryRequirements.resize( sparseMemoryRequirementCount ); + d.vkGetImageSparseMemoryRequirements2KHR( m_device, reinterpret_cast( &info ), &sparseMemoryRequirementCount, reinterpret_cast( sparseMemoryRequirements.data() ) ); + return sparseMemoryRequirements; + } + template + VULKAN_HPP_INLINE std::vector Device::getImageSparseMemoryRequirements2KHR( const ImageSparseMemoryRequirementsInfo2 & info, Allocator const& vectorAllocator, Dispatch const &d ) const + { + std::vector sparseMemoryRequirements( vectorAllocator ); + uint32_t sparseMemoryRequirementCount; + d.vkGetImageSparseMemoryRequirements2KHR( m_device, reinterpret_cast( &info ), &sparseMemoryRequirementCount, nullptr ); + sparseMemoryRequirements.resize( sparseMemoryRequirementCount ); + d.vkGetImageSparseMemoryRequirements2KHR( m_device, reinterpret_cast( &info ), &sparseMemoryRequirementCount, reinterpret_cast( sparseMemoryRequirements.data() ) ); + return sparseMemoryRequirements; + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE Result Device::createSamplerYcbcrConversion( const SamplerYcbcrConversionCreateInfo* pCreateInfo, const AllocationCallbacks* pAllocator, SamplerYcbcrConversion* pYcbcrConversion, Dispatch const &d) const + { + return static_cast( d.vkCreateSamplerYcbcrConversion( m_device, reinterpret_cast( pCreateInfo ), reinterpret_cast( pAllocator ), reinterpret_cast( pYcbcrConversion ) ) ); + } +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE ResultValueType::type Device::createSamplerYcbcrConversion( const SamplerYcbcrConversionCreateInfo & createInfo, Optional allocator, Dispatch const &d ) const + { + SamplerYcbcrConversion ycbcrConversion; + Result result = static_cast( d.vkCreateSamplerYcbcrConversion( m_device, reinterpret_cast( &createInfo ), reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( &ycbcrConversion ) ) ); + return createResultValue( result, ycbcrConversion, VULKAN_HPP_NAMESPACE_STRING"::Device::createSamplerYcbcrConversion" ); + } +#ifndef VULKAN_HPP_NO_SMART_HANDLE + template + VULKAN_HPP_INLINE typename ResultValueType>::type Device::createSamplerYcbcrConversionUnique( const SamplerYcbcrConversionCreateInfo & createInfo, Optional allocator, Dispatch const &d ) const + { + SamplerYcbcrConversion ycbcrConversion; + Result result = static_cast( d.vkCreateSamplerYcbcrConversion( m_device, reinterpret_cast( &createInfo ), reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( &ycbcrConversion ) ) ); + + ObjectDestroy deleter( *this, allocator, d ); + return createResultValue( result, ycbcrConversion, VULKAN_HPP_NAMESPACE_STRING"::Device::createSamplerYcbcrConversionUnique", deleter ); + } +#endif /*VULKAN_HPP_NO_SMART_HANDLE*/ +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE Result Device::createSamplerYcbcrConversionKHR( const SamplerYcbcrConversionCreateInfo* pCreateInfo, const AllocationCallbacks* pAllocator, SamplerYcbcrConversion* pYcbcrConversion, Dispatch const &d) const + { + return static_cast( d.vkCreateSamplerYcbcrConversionKHR( m_device, reinterpret_cast( pCreateInfo ), reinterpret_cast( pAllocator ), reinterpret_cast( pYcbcrConversion ) ) ); + } +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE ResultValueType::type Device::createSamplerYcbcrConversionKHR( const SamplerYcbcrConversionCreateInfo & createInfo, Optional allocator, Dispatch const &d ) const + { + SamplerYcbcrConversion ycbcrConversion; + Result result = static_cast( d.vkCreateSamplerYcbcrConversionKHR( m_device, reinterpret_cast( &createInfo ), reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( &ycbcrConversion ) ) ); + return createResultValue( result, ycbcrConversion, VULKAN_HPP_NAMESPACE_STRING"::Device::createSamplerYcbcrConversionKHR" ); + } +#ifndef VULKAN_HPP_NO_SMART_HANDLE + template + VULKAN_HPP_INLINE typename ResultValueType>::type Device::createSamplerYcbcrConversionKHRUnique( const SamplerYcbcrConversionCreateInfo & createInfo, Optional allocator, Dispatch const &d ) const + { + SamplerYcbcrConversion ycbcrConversion; + Result result = static_cast( d.vkCreateSamplerYcbcrConversionKHR( m_device, reinterpret_cast( &createInfo ), reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( &ycbcrConversion ) ) ); + + ObjectDestroy deleter( *this, allocator, d ); + return createResultValue( result, ycbcrConversion, VULKAN_HPP_NAMESPACE_STRING"::Device::createSamplerYcbcrConversionKHRUnique", deleter ); + } +#endif /*VULKAN_HPP_NO_SMART_HANDLE*/ +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void Device::destroySamplerYcbcrConversion( SamplerYcbcrConversion ycbcrConversion, const AllocationCallbacks* pAllocator, Dispatch const &d) const + { + d.vkDestroySamplerYcbcrConversion( m_device, static_cast( ycbcrConversion ), reinterpret_cast( pAllocator ) ); + } +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void Device::destroySamplerYcbcrConversion( SamplerYcbcrConversion ycbcrConversion, Optional allocator, Dispatch const &d ) const + { + d.vkDestroySamplerYcbcrConversion( m_device, static_cast( ycbcrConversion ), reinterpret_cast( static_cast( allocator ) ) ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void Device::destroy( SamplerYcbcrConversion ycbcrConversion, const AllocationCallbacks* pAllocator, Dispatch const &d) const + { + d.vkDestroySamplerYcbcrConversion( m_device, static_cast( ycbcrConversion ), reinterpret_cast( pAllocator ) ); + } +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void Device::destroy( SamplerYcbcrConversion ycbcrConversion, Optional allocator, Dispatch const &d ) const + { + d.vkDestroySamplerYcbcrConversion( m_device, static_cast( ycbcrConversion ), reinterpret_cast( static_cast( allocator ) ) ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void Device::destroySamplerYcbcrConversionKHR( SamplerYcbcrConversion ycbcrConversion, const AllocationCallbacks* pAllocator, Dispatch const &d) const + { + d.vkDestroySamplerYcbcrConversionKHR( m_device, static_cast( ycbcrConversion ), reinterpret_cast( pAllocator ) ); + } +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void Device::destroySamplerYcbcrConversionKHR( SamplerYcbcrConversion ycbcrConversion, Optional allocator, Dispatch const &d ) const + { + d.vkDestroySamplerYcbcrConversionKHR( m_device, static_cast( ycbcrConversion ), reinterpret_cast( static_cast( allocator ) ) ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void Device::getQueue2( const DeviceQueueInfo2* pQueueInfo, Queue* pQueue, Dispatch const &d) const + { + d.vkGetDeviceQueue2( m_device, reinterpret_cast( pQueueInfo ), reinterpret_cast( pQueue ) ); + } +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE Queue Device::getQueue2( const DeviceQueueInfo2 & queueInfo, Dispatch const &d ) const + { + Queue queue; + d.vkGetDeviceQueue2( m_device, reinterpret_cast( &queueInfo ), reinterpret_cast( &queue ) ); + return queue; + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE Result Device::createValidationCacheEXT( const ValidationCacheCreateInfoEXT* pCreateInfo, const AllocationCallbacks* pAllocator, ValidationCacheEXT* pValidationCache, Dispatch const &d) const + { + return static_cast( d.vkCreateValidationCacheEXT( m_device, reinterpret_cast( pCreateInfo ), reinterpret_cast( pAllocator ), reinterpret_cast( pValidationCache ) ) ); + } +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE ResultValueType::type Device::createValidationCacheEXT( const ValidationCacheCreateInfoEXT & createInfo, Optional allocator, Dispatch const &d ) const + { + ValidationCacheEXT validationCache; + Result result = static_cast( d.vkCreateValidationCacheEXT( m_device, reinterpret_cast( &createInfo ), reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( &validationCache ) ) ); + return createResultValue( result, validationCache, VULKAN_HPP_NAMESPACE_STRING"::Device::createValidationCacheEXT" ); + } +#ifndef VULKAN_HPP_NO_SMART_HANDLE + template + VULKAN_HPP_INLINE typename ResultValueType>::type Device::createValidationCacheEXTUnique( const ValidationCacheCreateInfoEXT & createInfo, Optional allocator, Dispatch const &d ) const + { + ValidationCacheEXT validationCache; + Result result = static_cast( d.vkCreateValidationCacheEXT( m_device, reinterpret_cast( &createInfo ), reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( &validationCache ) ) ); + + ObjectDestroy deleter( *this, allocator, d ); + return createResultValue( result, validationCache, VULKAN_HPP_NAMESPACE_STRING"::Device::createValidationCacheEXTUnique", deleter ); + } +#endif /*VULKAN_HPP_NO_SMART_HANDLE*/ +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void Device::destroyValidationCacheEXT( ValidationCacheEXT validationCache, const AllocationCallbacks* pAllocator, Dispatch const &d) const + { + d.vkDestroyValidationCacheEXT( m_device, static_cast( validationCache ), reinterpret_cast( pAllocator ) ); + } +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void Device::destroyValidationCacheEXT( ValidationCacheEXT validationCache, Optional allocator, Dispatch const &d ) const + { + d.vkDestroyValidationCacheEXT( m_device, static_cast( validationCache ), reinterpret_cast( static_cast( allocator ) ) ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void Device::destroy( ValidationCacheEXT validationCache, const AllocationCallbacks* pAllocator, Dispatch const &d) const + { + d.vkDestroyValidationCacheEXT( m_device, static_cast( validationCache ), reinterpret_cast( pAllocator ) ); + } +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void Device::destroy( ValidationCacheEXT validationCache, Optional allocator, Dispatch const &d ) const + { + d.vkDestroyValidationCacheEXT( m_device, static_cast( validationCache ), reinterpret_cast( static_cast( allocator ) ) ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE Result Device::getValidationCacheDataEXT( ValidationCacheEXT validationCache, size_t* pDataSize, void* pData, Dispatch const &d) const + { + return static_cast( d.vkGetValidationCacheDataEXT( m_device, static_cast( validationCache ), pDataSize, pData ) ); + } +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE typename ResultValueType>::type Device::getValidationCacheDataEXT( ValidationCacheEXT validationCache, Dispatch const &d ) const + { + std::vector data; + size_t dataSize; + Result result; + do + { + result = static_cast( d.vkGetValidationCacheDataEXT( m_device, static_cast( validationCache ), &dataSize, nullptr ) ); + if ( ( result == Result::eSuccess ) && dataSize ) + { + data.resize( dataSize ); + result = static_cast( d.vkGetValidationCacheDataEXT( m_device, static_cast( validationCache ), &dataSize, reinterpret_cast( data.data() ) ) ); + } + } while ( result == Result::eIncomplete ); + VULKAN_HPP_ASSERT( dataSize <= data.size() ); + data.resize( dataSize ); + return createResultValue( result, data, VULKAN_HPP_NAMESPACE_STRING"::Device::getValidationCacheDataEXT" ); + } + template + VULKAN_HPP_INLINE typename ResultValueType>::type Device::getValidationCacheDataEXT( ValidationCacheEXT validationCache, Allocator const& vectorAllocator, Dispatch const &d ) const + { + std::vector data( vectorAllocator ); + size_t dataSize; + Result result; + do + { + result = static_cast( d.vkGetValidationCacheDataEXT( m_device, static_cast( validationCache ), &dataSize, nullptr ) ); + if ( ( result == Result::eSuccess ) && dataSize ) + { + data.resize( dataSize ); + result = static_cast( d.vkGetValidationCacheDataEXT( m_device, static_cast( validationCache ), &dataSize, reinterpret_cast( data.data() ) ) ); + } + } while ( result == Result::eIncomplete ); + VULKAN_HPP_ASSERT( dataSize <= data.size() ); + data.resize( dataSize ); + return createResultValue( result, data, VULKAN_HPP_NAMESPACE_STRING"::Device::getValidationCacheDataEXT" ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE Result Device::mergeValidationCachesEXT( ValidationCacheEXT dstCache, uint32_t srcCacheCount, const ValidationCacheEXT* pSrcCaches, Dispatch const &d) const + { + return static_cast( d.vkMergeValidationCachesEXT( m_device, static_cast( dstCache ), srcCacheCount, reinterpret_cast( pSrcCaches ) ) ); + } +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE ResultValueType::type Device::mergeValidationCachesEXT( ValidationCacheEXT dstCache, ArrayProxy srcCaches, Dispatch const &d ) const + { + Result result = static_cast( d.vkMergeValidationCachesEXT( m_device, static_cast( dstCache ), srcCaches.size() , reinterpret_cast( srcCaches.data() ) ) ); + return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING"::Device::mergeValidationCachesEXT" ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void Device::getDescriptorSetLayoutSupport( const DescriptorSetLayoutCreateInfo* pCreateInfo, DescriptorSetLayoutSupport* pSupport, Dispatch const &d) const + { + d.vkGetDescriptorSetLayoutSupport( m_device, reinterpret_cast( pCreateInfo ), reinterpret_cast( pSupport ) ); + } +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE DescriptorSetLayoutSupport Device::getDescriptorSetLayoutSupport( const DescriptorSetLayoutCreateInfo & createInfo, Dispatch const &d ) const + { + DescriptorSetLayoutSupport support; + d.vkGetDescriptorSetLayoutSupport( m_device, reinterpret_cast( &createInfo ), reinterpret_cast( &support ) ); + return support; + } + template + VULKAN_HPP_INLINE StructureChain Device::getDescriptorSetLayoutSupport( const DescriptorSetLayoutCreateInfo & createInfo, Dispatch const &d ) const + { + StructureChain structureChain; + DescriptorSetLayoutSupport& support = structureChain.template get(); + d.vkGetDescriptorSetLayoutSupport( m_device, reinterpret_cast( &createInfo ), reinterpret_cast( &support ) ); + return structureChain; + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void Device::getDescriptorSetLayoutSupportKHR( const DescriptorSetLayoutCreateInfo* pCreateInfo, DescriptorSetLayoutSupport* pSupport, Dispatch const &d) const + { + d.vkGetDescriptorSetLayoutSupportKHR( m_device, reinterpret_cast( pCreateInfo ), reinterpret_cast( pSupport ) ); + } +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE DescriptorSetLayoutSupport Device::getDescriptorSetLayoutSupportKHR( const DescriptorSetLayoutCreateInfo & createInfo, Dispatch const &d ) const + { + DescriptorSetLayoutSupport support; + d.vkGetDescriptorSetLayoutSupportKHR( m_device, reinterpret_cast( &createInfo ), reinterpret_cast( &support ) ); + return support; + } + template + VULKAN_HPP_INLINE StructureChain Device::getDescriptorSetLayoutSupportKHR( const DescriptorSetLayoutCreateInfo & createInfo, Dispatch const &d ) const + { + StructureChain structureChain; + DescriptorSetLayoutSupport& support = structureChain.template get(); + d.vkGetDescriptorSetLayoutSupportKHR( m_device, reinterpret_cast( &createInfo ), reinterpret_cast( &support ) ); + return structureChain; + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE Result Device::getShaderInfoAMD( Pipeline pipeline, ShaderStageFlagBits shaderStage, ShaderInfoTypeAMD infoType, size_t* pInfoSize, void* pInfo, Dispatch const &d) const + { + return static_cast( d.vkGetShaderInfoAMD( m_device, static_cast( pipeline ), static_cast( shaderStage ), static_cast( infoType ), pInfoSize, pInfo ) ); + } +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE typename ResultValueType>::type Device::getShaderInfoAMD( Pipeline pipeline, ShaderStageFlagBits shaderStage, ShaderInfoTypeAMD infoType, Dispatch const &d ) const + { + std::vector info; + size_t infoSize; + Result result; + do + { + result = static_cast( d.vkGetShaderInfoAMD( m_device, static_cast( pipeline ), static_cast( shaderStage ), static_cast( infoType ), &infoSize, nullptr ) ); + if ( ( result == Result::eSuccess ) && infoSize ) + { + info.resize( infoSize ); + result = static_cast( d.vkGetShaderInfoAMD( m_device, static_cast( pipeline ), static_cast( shaderStage ), static_cast( infoType ), &infoSize, reinterpret_cast( info.data() ) ) ); + } + } while ( result == Result::eIncomplete ); + VULKAN_HPP_ASSERT( infoSize <= info.size() ); + info.resize( infoSize ); + return createResultValue( result, info, VULKAN_HPP_NAMESPACE_STRING"::Device::getShaderInfoAMD" ); + } + template + VULKAN_HPP_INLINE typename ResultValueType>::type Device::getShaderInfoAMD( Pipeline pipeline, ShaderStageFlagBits shaderStage, ShaderInfoTypeAMD infoType, Allocator const& vectorAllocator, Dispatch const &d ) const + { + std::vector info( vectorAllocator ); + size_t infoSize; + Result result; + do + { + result = static_cast( d.vkGetShaderInfoAMD( m_device, static_cast( pipeline ), static_cast( shaderStage ), static_cast( infoType ), &infoSize, nullptr ) ); + if ( ( result == Result::eSuccess ) && infoSize ) + { + info.resize( infoSize ); + result = static_cast( d.vkGetShaderInfoAMD( m_device, static_cast( pipeline ), static_cast( shaderStage ), static_cast( infoType ), &infoSize, reinterpret_cast( info.data() ) ) ); + } + } while ( result == Result::eIncomplete ); + VULKAN_HPP_ASSERT( infoSize <= info.size() ); + info.resize( infoSize ); + return createResultValue( result, info, VULKAN_HPP_NAMESPACE_STRING"::Device::getShaderInfoAMD" ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE Result Device::getCalibratedTimestampsEXT( uint32_t timestampCount, const CalibratedTimestampInfoEXT* pTimestampInfos, uint64_t* pTimestamps, uint64_t* pMaxDeviation, Dispatch const &d) const + { + return static_cast( d.vkGetCalibratedTimestampsEXT( m_device, timestampCount, reinterpret_cast( pTimestampInfos ), pTimestamps, pMaxDeviation ) ); + } +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE ResultValueType::type Device::getCalibratedTimestampsEXT( ArrayProxy timestampInfos, ArrayProxy timestamps, Dispatch const &d ) const + { +#ifdef VULKAN_HPP_NO_EXCEPTIONS + VULKAN_HPP_ASSERT( timestampInfos.size() == timestamps.size() ); +#else + if ( timestampInfos.size() != timestamps.size() ) + { + throw LogicError( VULKAN_HPP_NAMESPACE_STRING "::Device::getCalibratedTimestampsEXT: timestampInfos.size() != timestamps.size()" ); + } +#endif // VULKAN_HPP_NO_EXCEPTIONS + uint64_t maxDeviation; + Result result = static_cast( d.vkGetCalibratedTimestampsEXT( m_device, timestampInfos.size() , reinterpret_cast( timestampInfos.data() ), timestamps.data(), &maxDeviation ) ); + return createResultValue( result, maxDeviation, VULKAN_HPP_NAMESPACE_STRING"::Device::getCalibratedTimestampsEXT" ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE Result Device::setDebugUtilsObjectNameEXT( const DebugUtilsObjectNameInfoEXT* pNameInfo, Dispatch const &d) const + { + return static_cast( d.vkSetDebugUtilsObjectNameEXT( m_device, reinterpret_cast( pNameInfo ) ) ); + } +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE ResultValueType::type Device::setDebugUtilsObjectNameEXT( const DebugUtilsObjectNameInfoEXT & nameInfo, Dispatch const &d ) const + { + Result result = static_cast( d.vkSetDebugUtilsObjectNameEXT( m_device, reinterpret_cast( &nameInfo ) ) ); + return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING"::Device::setDebugUtilsObjectNameEXT" ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE Result Device::setDebugUtilsObjectTagEXT( const DebugUtilsObjectTagInfoEXT* pTagInfo, Dispatch const &d) const + { + return static_cast( d.vkSetDebugUtilsObjectTagEXT( m_device, reinterpret_cast( pTagInfo ) ) ); + } +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE ResultValueType::type Device::setDebugUtilsObjectTagEXT( const DebugUtilsObjectTagInfoEXT & tagInfo, Dispatch const &d ) const + { + Result result = static_cast( d.vkSetDebugUtilsObjectTagEXT( m_device, reinterpret_cast( &tagInfo ) ) ); + return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING"::Device::setDebugUtilsObjectTagEXT" ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE Result Device::getMemoryHostPointerPropertiesEXT( ExternalMemoryHandleTypeFlagBits handleType, const void* pHostPointer, MemoryHostPointerPropertiesEXT* pMemoryHostPointerProperties, Dispatch const &d) const + { + return static_cast( d.vkGetMemoryHostPointerPropertiesEXT( m_device, static_cast( handleType ), pHostPointer, reinterpret_cast( pMemoryHostPointerProperties ) ) ); + } +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE ResultValueType::type Device::getMemoryHostPointerPropertiesEXT( ExternalMemoryHandleTypeFlagBits handleType, const void* pHostPointer, Dispatch const &d ) const + { + MemoryHostPointerPropertiesEXT memoryHostPointerProperties; + Result result = static_cast( d.vkGetMemoryHostPointerPropertiesEXT( m_device, static_cast( handleType ), pHostPointer, reinterpret_cast( &memoryHostPointerProperties ) ) ); + return createResultValue( result, memoryHostPointerProperties, VULKAN_HPP_NAMESPACE_STRING"::Device::getMemoryHostPointerPropertiesEXT" ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE Result Device::createRenderPass2KHR( const RenderPassCreateInfo2KHR* pCreateInfo, const AllocationCallbacks* pAllocator, RenderPass* pRenderPass, Dispatch const &d) const + { + return static_cast( d.vkCreateRenderPass2KHR( m_device, reinterpret_cast( pCreateInfo ), reinterpret_cast( pAllocator ), reinterpret_cast( pRenderPass ) ) ); + } +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE ResultValueType::type Device::createRenderPass2KHR( const RenderPassCreateInfo2KHR & createInfo, Optional allocator, Dispatch const &d ) const + { + RenderPass renderPass; + Result result = static_cast( d.vkCreateRenderPass2KHR( m_device, reinterpret_cast( &createInfo ), reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( &renderPass ) ) ); + return createResultValue( result, renderPass, VULKAN_HPP_NAMESPACE_STRING"::Device::createRenderPass2KHR" ); + } +#ifndef VULKAN_HPP_NO_SMART_HANDLE + template + VULKAN_HPP_INLINE typename ResultValueType>::type Device::createRenderPass2KHRUnique( const RenderPassCreateInfo2KHR & createInfo, Optional allocator, Dispatch const &d ) const + { + RenderPass renderPass; + Result result = static_cast( d.vkCreateRenderPass2KHR( m_device, reinterpret_cast( &createInfo ), reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( &renderPass ) ) ); + + ObjectDestroy deleter( *this, allocator, d ); + return createResultValue( result, renderPass, VULKAN_HPP_NAMESPACE_STRING"::Device::createRenderPass2KHRUnique", deleter ); + } +#endif /*VULKAN_HPP_NO_SMART_HANDLE*/ +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + +#ifdef VK_USE_PLATFORM_ANDROID_ANDROID + template + VULKAN_HPP_INLINE Result Device::getAndroidHardwareBufferPropertiesANDROID( const struct AHardwareBuffer* buffer, AndroidHardwareBufferPropertiesANDROID* pProperties, Dispatch const &d) const + { + return static_cast( d.vkGetAndroidHardwareBufferPropertiesANDROID( m_device, buffer, reinterpret_cast( pProperties ) ) ); + } +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE ResultValueType::type Device::getAndroidHardwareBufferPropertiesANDROID( const struct AHardwareBuffer & buffer, Dispatch const &d ) const + { + AndroidHardwareBufferPropertiesANDROID properties; + Result result = static_cast( d.vkGetAndroidHardwareBufferPropertiesANDROID( m_device, buffer, reinterpret_cast( &properties ) ) ); + return createResultValue( result, properties, VULKAN_HPP_NAMESPACE_STRING"::Device::getAndroidHardwareBufferPropertiesANDROID" ); + } + template + VULKAN_HPP_INLINE typename ResultValueType>::type Device::getAndroidHardwareBufferPropertiesANDROID( const struct AHardwareBuffer & buffer, Dispatch const &d ) const + { + StructureChain structureChain; + AndroidHardwareBufferPropertiesANDROID& properties = structureChain.template get(); + Result result = static_cast( d.vkGetAndroidHardwareBufferPropertiesANDROID( m_device, buffer, reinterpret_cast( &properties ) ) ); + return createResultValue( result, structureChain, VULKAN_HPP_NAMESPACE_STRING"::Device::getAndroidHardwareBufferPropertiesANDROID" ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VK_USE_PLATFORM_ANDROID_ANDROID*/ + +#ifdef VK_USE_PLATFORM_ANDROID_ANDROID + template + VULKAN_HPP_INLINE Result Device::getMemoryAndroidHardwareBufferANDROID( const MemoryGetAndroidHardwareBufferInfoANDROID* pInfo, struct AHardwareBuffer** pBuffer, Dispatch const &d) const + { + return static_cast( d.vkGetMemoryAndroidHardwareBufferANDROID( m_device, reinterpret_cast( pInfo ), pBuffer ) ); + } +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE ResultValueType::type Device::getMemoryAndroidHardwareBufferANDROID( const MemoryGetAndroidHardwareBufferInfoANDROID & info, Dispatch const &d ) const + { + struct AHardwareBuffer* buffer; + Result result = static_cast( d.vkGetMemoryAndroidHardwareBufferANDROID( m_device, reinterpret_cast( &info ), &buffer ) ); + return createResultValue( result, buffer, VULKAN_HPP_NAMESPACE_STRING"::Device::getMemoryAndroidHardwareBufferANDROID" ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VK_USE_PLATFORM_ANDROID_ANDROID*/ + +#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE Result Device::compileDeferredNV( Pipeline pipeline, uint32_t shader, Dispatch const &d) const + { + return static_cast( d.vkCompileDeferredNV( m_device, static_cast( pipeline ), shader ) ); + } +#else + template + VULKAN_HPP_INLINE ResultValueType::type Device::compileDeferredNV( Pipeline pipeline, uint32_t shader, Dispatch const &d ) const + { + Result result = static_cast( d.vkCompileDeferredNV( m_device, static_cast( pipeline ), shader ) ); + return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING"::Device::compileDeferredNV" ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE Result Device::createAccelerationStructureNV( const AccelerationStructureCreateInfoNV* pCreateInfo, const AllocationCallbacks* pAllocator, AccelerationStructureNV* pAccelerationStructure, Dispatch const &d) const + { + return static_cast( d.vkCreateAccelerationStructureNV( m_device, reinterpret_cast( pCreateInfo ), reinterpret_cast( pAllocator ), reinterpret_cast( pAccelerationStructure ) ) ); + } +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE ResultValueType::type Device::createAccelerationStructureNV( const AccelerationStructureCreateInfoNV & createInfo, Optional allocator, Dispatch const &d ) const + { + AccelerationStructureNV accelerationStructure; + Result result = static_cast( d.vkCreateAccelerationStructureNV( m_device, reinterpret_cast( &createInfo ), reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( &accelerationStructure ) ) ); + return createResultValue( result, accelerationStructure, VULKAN_HPP_NAMESPACE_STRING"::Device::createAccelerationStructureNV" ); + } +#ifndef VULKAN_HPP_NO_SMART_HANDLE + template + VULKAN_HPP_INLINE typename ResultValueType>::type Device::createAccelerationStructureNVUnique( const AccelerationStructureCreateInfoNV & createInfo, Optional allocator, Dispatch const &d ) const + { + AccelerationStructureNV accelerationStructure; + Result result = static_cast( d.vkCreateAccelerationStructureNV( m_device, reinterpret_cast( &createInfo ), reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( &accelerationStructure ) ) ); + + ObjectDestroy deleter( *this, allocator, d ); + return createResultValue( result, accelerationStructure, VULKAN_HPP_NAMESPACE_STRING"::Device::createAccelerationStructureNVUnique", deleter ); + } +#endif /*VULKAN_HPP_NO_SMART_HANDLE*/ +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void Device::destroyAccelerationStructureNV( AccelerationStructureNV accelerationStructure, const AllocationCallbacks* pAllocator, Dispatch const &d) const + { + d.vkDestroyAccelerationStructureNV( m_device, static_cast( accelerationStructure ), reinterpret_cast( pAllocator ) ); + } +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void Device::destroyAccelerationStructureNV( AccelerationStructureNV accelerationStructure, Optional allocator, Dispatch const &d ) const + { + d.vkDestroyAccelerationStructureNV( m_device, static_cast( accelerationStructure ), reinterpret_cast( static_cast( allocator ) ) ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void Device::destroy( AccelerationStructureNV accelerationStructure, const AllocationCallbacks* pAllocator, Dispatch const &d) const + { + d.vkDestroyAccelerationStructureNV( m_device, static_cast( accelerationStructure ), reinterpret_cast( pAllocator ) ); + } +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void Device::destroy( AccelerationStructureNV accelerationStructure, Optional allocator, Dispatch const &d ) const + { + d.vkDestroyAccelerationStructureNV( m_device, static_cast( accelerationStructure ), reinterpret_cast( static_cast( allocator ) ) ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void Device::getAccelerationStructureMemoryRequirementsNV( const AccelerationStructureMemoryRequirementsInfoNV* pInfo, MemoryRequirements2KHR* pMemoryRequirements, Dispatch const &d) const + { + d.vkGetAccelerationStructureMemoryRequirementsNV( m_device, reinterpret_cast( pInfo ), reinterpret_cast( pMemoryRequirements ) ); + } +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE MemoryRequirements2KHR Device::getAccelerationStructureMemoryRequirementsNV( const AccelerationStructureMemoryRequirementsInfoNV & info, Dispatch const &d ) const + { + MemoryRequirements2KHR memoryRequirements; + d.vkGetAccelerationStructureMemoryRequirementsNV( m_device, reinterpret_cast( &info ), reinterpret_cast( &memoryRequirements ) ); + return memoryRequirements; + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE Result Device::bindAccelerationStructureMemoryNV( uint32_t bindInfoCount, const BindAccelerationStructureMemoryInfoNV* pBindInfos, Dispatch const &d) const + { + return static_cast( d.vkBindAccelerationStructureMemoryNV( m_device, bindInfoCount, reinterpret_cast( pBindInfos ) ) ); + } +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE ResultValueType::type Device::bindAccelerationStructureMemoryNV( ArrayProxy bindInfos, Dispatch const &d ) const + { + Result result = static_cast( d.vkBindAccelerationStructureMemoryNV( m_device, bindInfos.size() , reinterpret_cast( bindInfos.data() ) ) ); + return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING"::Device::bindAccelerationStructureMemoryNV" ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE Result Device::getRayTracingShaderGroupHandlesNV( Pipeline pipeline, uint32_t firstGroup, uint32_t groupCount, size_t dataSize, void* pData, Dispatch const &d) const + { + return static_cast( d.vkGetRayTracingShaderGroupHandlesNV( m_device, static_cast( pipeline ), firstGroup, groupCount, dataSize, pData ) ); + } +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE ResultValueType::type Device::getRayTracingShaderGroupHandlesNV( Pipeline pipeline, uint32_t firstGroup, uint32_t groupCount, ArrayProxy data, Dispatch const &d ) const + { + Result result = static_cast( d.vkGetRayTracingShaderGroupHandlesNV( m_device, static_cast( pipeline ), firstGroup, groupCount, data.size() * sizeof( T ) , reinterpret_cast( data.data() ) ) ); + return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING"::Device::getRayTracingShaderGroupHandlesNV" ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE Result Device::getAccelerationStructureHandleNV( AccelerationStructureNV accelerationStructure, size_t dataSize, void* pData, Dispatch const &d) const + { + return static_cast( d.vkGetAccelerationStructureHandleNV( m_device, static_cast( accelerationStructure ), dataSize, pData ) ); + } +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE ResultValueType::type Device::getAccelerationStructureHandleNV( AccelerationStructureNV accelerationStructure, ArrayProxy data, Dispatch const &d ) const + { + Result result = static_cast( d.vkGetAccelerationStructureHandleNV( m_device, static_cast( accelerationStructure ), data.size() * sizeof( T ) , reinterpret_cast( data.data() ) ) ); + return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING"::Device::getAccelerationStructureHandleNV" ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE Result Device::createRayTracingPipelinesNV( PipelineCache pipelineCache, uint32_t createInfoCount, const RayTracingPipelineCreateInfoNV* pCreateInfos, const AllocationCallbacks* pAllocator, Pipeline* pPipelines, Dispatch const &d) const + { + return static_cast( d.vkCreateRayTracingPipelinesNV( m_device, static_cast( pipelineCache ), createInfoCount, reinterpret_cast( pCreateInfos ), reinterpret_cast( pAllocator ), reinterpret_cast( pPipelines ) ) ); + } +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE typename ResultValueType>::type Device::createRayTracingPipelinesNV( PipelineCache pipelineCache, ArrayProxy createInfos, Optional allocator, Dispatch const &d ) const + { + std::vector pipelines( createInfos.size() ); + Result result = static_cast( d.vkCreateRayTracingPipelinesNV( m_device, static_cast( pipelineCache ), createInfos.size() , reinterpret_cast( createInfos.data() ), reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( pipelines.data() ) ) ); + return createResultValue( result, pipelines, VULKAN_HPP_NAMESPACE_STRING"::Device::createRayTracingPipelinesNV" ); + } + template + VULKAN_HPP_INLINE typename ResultValueType>::type Device::createRayTracingPipelinesNV( PipelineCache pipelineCache, ArrayProxy createInfos, Optional allocator, Allocator const& vectorAllocator, Dispatch const &d ) const + { + std::vector pipelines( createInfos.size(), vectorAllocator ); + Result result = static_cast( d.vkCreateRayTracingPipelinesNV( m_device, static_cast( pipelineCache ), createInfos.size() , reinterpret_cast( createInfos.data() ), reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( pipelines.data() ) ) ); + return createResultValue( result, pipelines, VULKAN_HPP_NAMESPACE_STRING"::Device::createRayTracingPipelinesNV" ); + } + template + VULKAN_HPP_INLINE ResultValueType::type Device::createRayTracingPipelineNV( PipelineCache pipelineCache, const RayTracingPipelineCreateInfoNV & createInfo, Optional allocator, Dispatch const &d ) const + { + Pipeline pipeline; + Result result = static_cast( d.vkCreateRayTracingPipelinesNV( m_device, static_cast( pipelineCache ), 1 , reinterpret_cast( &createInfo ), reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( &pipeline ) ) ); + return createResultValue( result, pipeline, VULKAN_HPP_NAMESPACE_STRING"::Device::createRayTracingPipelineNV" ); + } +#ifndef VULKAN_HPP_NO_SMART_HANDLE + template + VULKAN_HPP_INLINE typename ResultValueType,Allocator>>::type Device::createRayTracingPipelinesNVUnique( PipelineCache pipelineCache, ArrayProxy createInfos, Optional allocator, Dispatch const &d ) const + { + static_assert( sizeof( Pipeline ) <= sizeof( UniquePipeline ), "Pipeline is greater than UniquePipeline!" ); + std::vector pipelines; + pipelines.reserve( createInfos.size() ); + Pipeline* buffer = reinterpret_cast( reinterpret_cast( pipelines.data() ) + createInfos.size() * ( sizeof( UniquePipeline ) - sizeof( Pipeline ) ) ); + Result result = static_cast(d.vkCreateRayTracingPipelinesNV( m_device, static_cast( pipelineCache ), createInfos.size() , reinterpret_cast( createInfos.data() ), reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( buffer ) ) ); + + ObjectDestroy deleter( *this, allocator, d ); + for ( size_t i=0 ; i + VULKAN_HPP_INLINE typename ResultValueType,Allocator>>::type Device::createRayTracingPipelinesNVUnique( PipelineCache pipelineCache, ArrayProxy createInfos, Optional allocator, Allocator const& vectorAllocator, Dispatch const &d ) const + { + static_assert( sizeof( Pipeline ) <= sizeof( UniquePipeline ), "Pipeline is greater than UniquePipeline!" ); + std::vector pipelines; + pipelines.reserve( createInfos.size() ); + Pipeline* buffer = reinterpret_cast( reinterpret_cast( pipelines.data() ) + createInfos.size() * ( sizeof( UniquePipeline ) - sizeof( Pipeline ) ) ); + Result result = static_cast(d.vkCreateRayTracingPipelinesNV( m_device, static_cast( pipelineCache ), createInfos.size() , reinterpret_cast( createInfos.data() ), reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( buffer ) ) ); + + ObjectDestroy deleter( *this, allocator, d ); + for ( size_t i=0 ; i + VULKAN_HPP_INLINE typename ResultValueType>::type Device::createRayTracingPipelineNVUnique( PipelineCache pipelineCache, const RayTracingPipelineCreateInfoNV & createInfo, Optional allocator, Dispatch const &d ) const + { + Pipeline pipeline; + Result result = static_cast( d.vkCreateRayTracingPipelinesNV( m_device, static_cast( pipelineCache ), 1 , reinterpret_cast( &createInfo ), reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( &pipeline ) ) ); + + ObjectDestroy deleter( *this, allocator, d ); + return createResultValue( result, pipeline, VULKAN_HPP_NAMESPACE_STRING"::Device::createRayTracingPipelineNVUnique", deleter ); + } +#endif /*VULKAN_HPP_NO_SMART_HANDLE*/ +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE Result Device::getImageDrmFormatModifierPropertiesEXT( Image image, ImageDrmFormatModifierPropertiesEXT* pProperties, Dispatch const &d) const + { + return static_cast( d.vkGetImageDrmFormatModifierPropertiesEXT( m_device, static_cast( image ), reinterpret_cast( pProperties ) ) ); + } +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE ResultValueType::type Device::getImageDrmFormatModifierPropertiesEXT( Image image, Dispatch const &d ) const + { + ImageDrmFormatModifierPropertiesEXT properties; + Result result = static_cast( d.vkGetImageDrmFormatModifierPropertiesEXT( m_device, static_cast( image ), reinterpret_cast( &properties ) ) ); + return createResultValue( result, properties, VULKAN_HPP_NAMESPACE_STRING"::Device::getImageDrmFormatModifierPropertiesEXT" ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + +#ifndef VULKAN_HPP_NO_SMART_HANDLE + + template class UniqueHandleTraits {public: using deleter = ObjectDestroy; }; + using UniqueDevice = UniqueHandle; +#endif /*VULKAN_HPP_NO_SMART_HANDLE*/ + + class PhysicalDevice + { + public: + VULKAN_HPP_CONSTEXPR PhysicalDevice() + : m_physicalDevice(VK_NULL_HANDLE) + {} + + VULKAN_HPP_CONSTEXPR PhysicalDevice( std::nullptr_t ) + : m_physicalDevice(VK_NULL_HANDLE) + {} + + VULKAN_HPP_TYPESAFE_EXPLICIT PhysicalDevice( VkPhysicalDevice physicalDevice ) + : m_physicalDevice( physicalDevice ) + {} + +#if defined(VULKAN_HPP_TYPESAFE_CONVERSION) + PhysicalDevice & operator=(VkPhysicalDevice physicalDevice) + { + m_physicalDevice = physicalDevice; + return *this; + } +#endif + + PhysicalDevice & operator=( std::nullptr_t ) + { + m_physicalDevice = VK_NULL_HANDLE; + return *this; + } + + bool operator==( PhysicalDevice const & rhs ) const + { + return m_physicalDevice == rhs.m_physicalDevice; + } + + bool operator!=(PhysicalDevice const & rhs ) const + { + return m_physicalDevice != rhs.m_physicalDevice; + } + + bool operator<(PhysicalDevice const & rhs ) const + { + return m_physicalDevice < rhs.m_physicalDevice; + } + + template + void getProperties( PhysicalDeviceProperties* pProperties, Dispatch const &d = Dispatch() ) const; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + PhysicalDeviceProperties getProperties(Dispatch const &d = Dispatch() ) const; + template + StructureChain getProperties(Dispatch const &d = Dispatch() ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void getQueueFamilyProperties( uint32_t* pQueueFamilyPropertyCount, QueueFamilyProperties* pQueueFamilyProperties, Dispatch const &d = Dispatch() ) const; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template , typename Dispatch = DispatchLoaderStatic> + std::vector getQueueFamilyProperties(Dispatch const &d = Dispatch() ) const; + template , typename Dispatch = DispatchLoaderStatic> + std::vector getQueueFamilyProperties(Allocator const& vectorAllocator, Dispatch const &d ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void getMemoryProperties( PhysicalDeviceMemoryProperties* pMemoryProperties, Dispatch const &d = Dispatch() ) const; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + PhysicalDeviceMemoryProperties getMemoryProperties(Dispatch const &d = Dispatch() ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void getFeatures( PhysicalDeviceFeatures* pFeatures, Dispatch const &d = Dispatch() ) const; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + PhysicalDeviceFeatures getFeatures(Dispatch const &d = Dispatch() ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void getFormatProperties( Format format, FormatProperties* pFormatProperties, Dispatch const &d = Dispatch() ) const; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + FormatProperties getFormatProperties( Format format, Dispatch const &d = Dispatch() ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + Result getImageFormatProperties( Format format, ImageType type, ImageTiling tiling, ImageUsageFlags usage, ImageCreateFlags flags, ImageFormatProperties* pImageFormatProperties, Dispatch const &d = Dispatch() ) const; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + ResultValueType::type getImageFormatProperties( Format format, ImageType type, ImageTiling tiling, ImageUsageFlags usage, ImageCreateFlags flags, Dispatch const &d = Dispatch() ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + Result createDevice( const DeviceCreateInfo* pCreateInfo, const AllocationCallbacks* pAllocator, Device* pDevice, Dispatch const &d = Dispatch() ) const; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + ResultValueType::type createDevice( const DeviceCreateInfo & createInfo, Optional allocator = nullptr, Dispatch const &d = Dispatch() ) const; +#ifndef VULKAN_HPP_NO_SMART_HANDLE + template + typename ResultValueType>::type createDeviceUnique( const DeviceCreateInfo & createInfo, Optional allocator = nullptr, Dispatch const &d = Dispatch() ) const; +#endif /*VULKAN_HPP_NO_SMART_HANDLE*/ +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + Result enumerateDeviceLayerProperties( uint32_t* pPropertyCount, LayerProperties* pProperties, Dispatch const &d = Dispatch() ) const; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template , typename Dispatch = DispatchLoaderStatic> + typename ResultValueType>::type enumerateDeviceLayerProperties(Dispatch const &d = Dispatch() ) const; + template , typename Dispatch = DispatchLoaderStatic> + typename ResultValueType>::type enumerateDeviceLayerProperties(Allocator const& vectorAllocator, Dispatch const &d ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + Result enumerateDeviceExtensionProperties( const char* pLayerName, uint32_t* pPropertyCount, ExtensionProperties* pProperties, Dispatch const &d = Dispatch() ) const; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template , typename Dispatch = DispatchLoaderStatic> + typename ResultValueType>::type enumerateDeviceExtensionProperties( Optional layerName = nullptr, Dispatch const &d = Dispatch() ) const; + template , typename Dispatch = DispatchLoaderStatic> + typename ResultValueType>::type enumerateDeviceExtensionProperties( Optional layerName, Allocator const& vectorAllocator, Dispatch const &d ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void getSparseImageFormatProperties( Format format, ImageType type, SampleCountFlagBits samples, ImageUsageFlags usage, ImageTiling tiling, uint32_t* pPropertyCount, SparseImageFormatProperties* pProperties, Dispatch const &d = Dispatch() ) const; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template , typename Dispatch = DispatchLoaderStatic> + std::vector getSparseImageFormatProperties( Format format, ImageType type, SampleCountFlagBits samples, ImageUsageFlags usage, ImageTiling tiling, Dispatch const &d = Dispatch() ) const; + template , typename Dispatch = DispatchLoaderStatic> + std::vector getSparseImageFormatProperties( Format format, ImageType type, SampleCountFlagBits samples, ImageUsageFlags usage, ImageTiling tiling, Allocator const& vectorAllocator, Dispatch const &d ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + Result getDisplayPropertiesKHR( uint32_t* pPropertyCount, DisplayPropertiesKHR* pProperties, Dispatch const &d = Dispatch() ) const; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template , typename Dispatch = DispatchLoaderStatic> + typename ResultValueType>::type getDisplayPropertiesKHR(Dispatch const &d = Dispatch() ) const; + template , typename Dispatch = DispatchLoaderStatic> + typename ResultValueType>::type getDisplayPropertiesKHR(Allocator const& vectorAllocator, Dispatch const &d ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + Result getDisplayPlanePropertiesKHR( uint32_t* pPropertyCount, DisplayPlanePropertiesKHR* pProperties, Dispatch const &d = Dispatch() ) const; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template , typename Dispatch = DispatchLoaderStatic> + typename ResultValueType>::type getDisplayPlanePropertiesKHR(Dispatch const &d = Dispatch() ) const; + template , typename Dispatch = DispatchLoaderStatic> + typename ResultValueType>::type getDisplayPlanePropertiesKHR(Allocator const& vectorAllocator, Dispatch const &d ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + Result getDisplayPlaneSupportedDisplaysKHR( uint32_t planeIndex, uint32_t* pDisplayCount, DisplayKHR* pDisplays, Dispatch const &d = Dispatch() ) const; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template , typename Dispatch = DispatchLoaderStatic> + typename ResultValueType>::type getDisplayPlaneSupportedDisplaysKHR( uint32_t planeIndex, Dispatch const &d = Dispatch() ) const; + template , typename Dispatch = DispatchLoaderStatic> + typename ResultValueType>::type getDisplayPlaneSupportedDisplaysKHR( uint32_t planeIndex, Allocator const& vectorAllocator, Dispatch const &d ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + Result getDisplayModePropertiesKHR( DisplayKHR display, uint32_t* pPropertyCount, DisplayModePropertiesKHR* pProperties, Dispatch const &d = Dispatch() ) const; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template , typename Dispatch = DispatchLoaderStatic> + typename ResultValueType>::type getDisplayModePropertiesKHR( DisplayKHR display, Dispatch const &d = Dispatch() ) const; + template , typename Dispatch = DispatchLoaderStatic> + typename ResultValueType>::type getDisplayModePropertiesKHR( DisplayKHR display, Allocator const& vectorAllocator, Dispatch const &d ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + Result createDisplayModeKHR( DisplayKHR display, const DisplayModeCreateInfoKHR* pCreateInfo, const AllocationCallbacks* pAllocator, DisplayModeKHR* pMode, Dispatch const &d = Dispatch() ) const; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + ResultValueType::type createDisplayModeKHR( DisplayKHR display, const DisplayModeCreateInfoKHR & createInfo, Optional allocator = nullptr, Dispatch const &d = Dispatch() ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + Result getDisplayPlaneCapabilitiesKHR( DisplayModeKHR mode, uint32_t planeIndex, DisplayPlaneCapabilitiesKHR* pCapabilities, Dispatch const &d = Dispatch() ) const; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + ResultValueType::type getDisplayPlaneCapabilitiesKHR( DisplayModeKHR mode, uint32_t planeIndex, Dispatch const &d = Dispatch() ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + Result getSurfaceSupportKHR( uint32_t queueFamilyIndex, SurfaceKHR surface, Bool32* pSupported, Dispatch const &d = Dispatch() ) const; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + ResultValueType::type getSurfaceSupportKHR( uint32_t queueFamilyIndex, SurfaceKHR surface, Dispatch const &d = Dispatch() ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + Result getSurfaceCapabilitiesKHR( SurfaceKHR surface, SurfaceCapabilitiesKHR* pSurfaceCapabilities, Dispatch const &d = Dispatch() ) const; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + ResultValueType::type getSurfaceCapabilitiesKHR( SurfaceKHR surface, Dispatch const &d = Dispatch() ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + Result getSurfaceFormatsKHR( SurfaceKHR surface, uint32_t* pSurfaceFormatCount, SurfaceFormatKHR* pSurfaceFormats, Dispatch const &d = Dispatch() ) const; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template , typename Dispatch = DispatchLoaderStatic> + typename ResultValueType>::type getSurfaceFormatsKHR( SurfaceKHR surface, Dispatch const &d = Dispatch() ) const; + template , typename Dispatch = DispatchLoaderStatic> + typename ResultValueType>::type getSurfaceFormatsKHR( SurfaceKHR surface, Allocator const& vectorAllocator, Dispatch const &d ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + Result getSurfacePresentModesKHR( SurfaceKHR surface, uint32_t* pPresentModeCount, PresentModeKHR* pPresentModes, Dispatch const &d = Dispatch() ) const; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template , typename Dispatch = DispatchLoaderStatic> + typename ResultValueType>::type getSurfacePresentModesKHR( SurfaceKHR surface, Dispatch const &d = Dispatch() ) const; + template , typename Dispatch = DispatchLoaderStatic> + typename ResultValueType>::type getSurfacePresentModesKHR( SurfaceKHR surface, Allocator const& vectorAllocator, Dispatch const &d ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + +#ifdef VK_USE_PLATFORM_WAYLAND_KHR + template + Bool32 getWaylandPresentationSupportKHR( uint32_t queueFamilyIndex, struct wl_display* display, Dispatch const &d = Dispatch() ) const; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + Bool32 getWaylandPresentationSupportKHR( uint32_t queueFamilyIndex, struct wl_display & display, Dispatch const &d = Dispatch() ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VK_USE_PLATFORM_WAYLAND_KHR*/ + +#ifdef VK_USE_PLATFORM_WIN32_KHR + template + Bool32 getWin32PresentationSupportKHR( uint32_t queueFamilyIndex, Dispatch const &d = Dispatch() ) const; +#endif /*VK_USE_PLATFORM_WIN32_KHR*/ + +#ifdef VK_USE_PLATFORM_XLIB_KHR + template + Bool32 getXlibPresentationSupportKHR( uint32_t queueFamilyIndex, Display* dpy, VisualID visualID, Dispatch const &d = Dispatch() ) const; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + Bool32 getXlibPresentationSupportKHR( uint32_t queueFamilyIndex, Display & dpy, VisualID visualID, Dispatch const &d = Dispatch() ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VK_USE_PLATFORM_XLIB_KHR*/ + +#ifdef VK_USE_PLATFORM_XCB_KHR + template + Bool32 getXcbPresentationSupportKHR( uint32_t queueFamilyIndex, xcb_connection_t* connection, xcb_visualid_t visual_id, Dispatch const &d = Dispatch() ) const; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + Bool32 getXcbPresentationSupportKHR( uint32_t queueFamilyIndex, xcb_connection_t & connection, xcb_visualid_t visual_id, Dispatch const &d = Dispatch() ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VK_USE_PLATFORM_XCB_KHR*/ + + template + Result getExternalImageFormatPropertiesNV( Format format, ImageType type, ImageTiling tiling, ImageUsageFlags usage, ImageCreateFlags flags, ExternalMemoryHandleTypeFlagsNV externalHandleType, ExternalImageFormatPropertiesNV* pExternalImageFormatProperties, Dispatch const &d = Dispatch() ) const; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + ResultValueType::type getExternalImageFormatPropertiesNV( Format format, ImageType type, ImageTiling tiling, ImageUsageFlags usage, ImageCreateFlags flags, ExternalMemoryHandleTypeFlagsNV externalHandleType, Dispatch const &d = Dispatch() ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void getGeneratedCommandsPropertiesNVX( DeviceGeneratedCommandsFeaturesNVX* pFeatures, DeviceGeneratedCommandsLimitsNVX* pLimits, Dispatch const &d = Dispatch() ) const; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + DeviceGeneratedCommandsLimitsNVX getGeneratedCommandsPropertiesNVX( DeviceGeneratedCommandsFeaturesNVX & features, Dispatch const &d = Dispatch() ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void getFeatures2( PhysicalDeviceFeatures2* pFeatures, Dispatch const &d = Dispatch() ) const; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + PhysicalDeviceFeatures2 getFeatures2(Dispatch const &d = Dispatch() ) const; + template + StructureChain getFeatures2(Dispatch const &d = Dispatch() ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void getFeatures2KHR( PhysicalDeviceFeatures2* pFeatures, Dispatch const &d = Dispatch() ) const; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + PhysicalDeviceFeatures2 getFeatures2KHR(Dispatch const &d = Dispatch() ) const; + template + StructureChain getFeatures2KHR(Dispatch const &d = Dispatch() ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void getProperties2( PhysicalDeviceProperties2* pProperties, Dispatch const &d = Dispatch() ) const; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + PhysicalDeviceProperties2 getProperties2(Dispatch const &d = Dispatch() ) const; + template + StructureChain getProperties2(Dispatch const &d = Dispatch() ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void getProperties2KHR( PhysicalDeviceProperties2* pProperties, Dispatch const &d = Dispatch() ) const; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + PhysicalDeviceProperties2 getProperties2KHR(Dispatch const &d = Dispatch() ) const; + template + StructureChain getProperties2KHR(Dispatch const &d = Dispatch() ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void getFormatProperties2( Format format, FormatProperties2* pFormatProperties, Dispatch const &d = Dispatch() ) const; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + FormatProperties2 getFormatProperties2( Format format, Dispatch const &d = Dispatch() ) const; + template + StructureChain getFormatProperties2( Format format, Dispatch const &d = Dispatch() ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void getFormatProperties2KHR( Format format, FormatProperties2* pFormatProperties, Dispatch const &d = Dispatch() ) const; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + FormatProperties2 getFormatProperties2KHR( Format format, Dispatch const &d = Dispatch() ) const; + template + StructureChain getFormatProperties2KHR( Format format, Dispatch const &d = Dispatch() ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + Result getImageFormatProperties2( const PhysicalDeviceImageFormatInfo2* pImageFormatInfo, ImageFormatProperties2* pImageFormatProperties, Dispatch const &d = Dispatch() ) const; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + ResultValueType::type getImageFormatProperties2( const PhysicalDeviceImageFormatInfo2 & imageFormatInfo, Dispatch const &d = Dispatch() ) const; + template + typename ResultValueType>::type getImageFormatProperties2( const PhysicalDeviceImageFormatInfo2 & imageFormatInfo, Dispatch const &d = Dispatch() ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + Result getImageFormatProperties2KHR( const PhysicalDeviceImageFormatInfo2* pImageFormatInfo, ImageFormatProperties2* pImageFormatProperties, Dispatch const &d = Dispatch() ) const; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + ResultValueType::type getImageFormatProperties2KHR( const PhysicalDeviceImageFormatInfo2 & imageFormatInfo, Dispatch const &d = Dispatch() ) const; + template + typename ResultValueType>::type getImageFormatProperties2KHR( const PhysicalDeviceImageFormatInfo2 & imageFormatInfo, Dispatch const &d = Dispatch() ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void getQueueFamilyProperties2( uint32_t* pQueueFamilyPropertyCount, QueueFamilyProperties2* pQueueFamilyProperties, Dispatch const &d = Dispatch() ) const; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template , typename Dispatch = DispatchLoaderStatic> + std::vector getQueueFamilyProperties2(Dispatch const &d = Dispatch() ) const; + template , typename Dispatch = DispatchLoaderStatic> + std::vector getQueueFamilyProperties2(Allocator const& vectorAllocator, Dispatch const &d ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void getQueueFamilyProperties2KHR( uint32_t* pQueueFamilyPropertyCount, QueueFamilyProperties2* pQueueFamilyProperties, Dispatch const &d = Dispatch() ) const; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template , typename Dispatch = DispatchLoaderStatic> + std::vector getQueueFamilyProperties2KHR(Dispatch const &d = Dispatch() ) const; + template , typename Dispatch = DispatchLoaderStatic> + std::vector getQueueFamilyProperties2KHR(Allocator const& vectorAllocator, Dispatch const &d ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void getMemoryProperties2( PhysicalDeviceMemoryProperties2* pMemoryProperties, Dispatch const &d = Dispatch() ) const; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + PhysicalDeviceMemoryProperties2 getMemoryProperties2(Dispatch const &d = Dispatch() ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void getMemoryProperties2KHR( PhysicalDeviceMemoryProperties2* pMemoryProperties, Dispatch const &d = Dispatch() ) const; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + PhysicalDeviceMemoryProperties2 getMemoryProperties2KHR(Dispatch const &d = Dispatch() ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void getSparseImageFormatProperties2( const PhysicalDeviceSparseImageFormatInfo2* pFormatInfo, uint32_t* pPropertyCount, SparseImageFormatProperties2* pProperties, Dispatch const &d = Dispatch() ) const; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template , typename Dispatch = DispatchLoaderStatic> + std::vector getSparseImageFormatProperties2( const PhysicalDeviceSparseImageFormatInfo2 & formatInfo, Dispatch const &d = Dispatch() ) const; + template , typename Dispatch = DispatchLoaderStatic> + std::vector getSparseImageFormatProperties2( const PhysicalDeviceSparseImageFormatInfo2 & formatInfo, Allocator const& vectorAllocator, Dispatch const &d ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void getSparseImageFormatProperties2KHR( const PhysicalDeviceSparseImageFormatInfo2* pFormatInfo, uint32_t* pPropertyCount, SparseImageFormatProperties2* pProperties, Dispatch const &d = Dispatch() ) const; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template , typename Dispatch = DispatchLoaderStatic> + std::vector getSparseImageFormatProperties2KHR( const PhysicalDeviceSparseImageFormatInfo2 & formatInfo, Dispatch const &d = Dispatch() ) const; + template , typename Dispatch = DispatchLoaderStatic> + std::vector getSparseImageFormatProperties2KHR( const PhysicalDeviceSparseImageFormatInfo2 & formatInfo, Allocator const& vectorAllocator, Dispatch const &d ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void getExternalBufferProperties( const PhysicalDeviceExternalBufferInfo* pExternalBufferInfo, ExternalBufferProperties* pExternalBufferProperties, Dispatch const &d = Dispatch() ) const; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + ExternalBufferProperties getExternalBufferProperties( const PhysicalDeviceExternalBufferInfo & externalBufferInfo, Dispatch const &d = Dispatch() ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void getExternalBufferPropertiesKHR( const PhysicalDeviceExternalBufferInfo* pExternalBufferInfo, ExternalBufferProperties* pExternalBufferProperties, Dispatch const &d = Dispatch() ) const; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + ExternalBufferProperties getExternalBufferPropertiesKHR( const PhysicalDeviceExternalBufferInfo & externalBufferInfo, Dispatch const &d = Dispatch() ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void getExternalSemaphoreProperties( const PhysicalDeviceExternalSemaphoreInfo* pExternalSemaphoreInfo, ExternalSemaphoreProperties* pExternalSemaphoreProperties, Dispatch const &d = Dispatch() ) const; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + ExternalSemaphoreProperties getExternalSemaphoreProperties( const PhysicalDeviceExternalSemaphoreInfo & externalSemaphoreInfo, Dispatch const &d = Dispatch() ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void getExternalSemaphorePropertiesKHR( const PhysicalDeviceExternalSemaphoreInfo* pExternalSemaphoreInfo, ExternalSemaphoreProperties* pExternalSemaphoreProperties, Dispatch const &d = Dispatch() ) const; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + ExternalSemaphoreProperties getExternalSemaphorePropertiesKHR( const PhysicalDeviceExternalSemaphoreInfo & externalSemaphoreInfo, Dispatch const &d = Dispatch() ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void getExternalFenceProperties( const PhysicalDeviceExternalFenceInfo* pExternalFenceInfo, ExternalFenceProperties* pExternalFenceProperties, Dispatch const &d = Dispatch() ) const; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + ExternalFenceProperties getExternalFenceProperties( const PhysicalDeviceExternalFenceInfo & externalFenceInfo, Dispatch const &d = Dispatch() ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void getExternalFencePropertiesKHR( const PhysicalDeviceExternalFenceInfo* pExternalFenceInfo, ExternalFenceProperties* pExternalFenceProperties, Dispatch const &d = Dispatch() ) const; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + ExternalFenceProperties getExternalFencePropertiesKHR( const PhysicalDeviceExternalFenceInfo & externalFenceInfo, Dispatch const &d = Dispatch() ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + +#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + Result releaseDisplayEXT( DisplayKHR display, Dispatch const &d = Dispatch() ) const; +#else + template + ResultValueType::type releaseDisplayEXT( DisplayKHR display, Dispatch const &d = Dispatch() ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + +#ifdef VK_USE_PLATFORM_XLIB_XRANDR_NV + template + Result acquireXlibDisplayEXT( Display* dpy, DisplayKHR display, Dispatch const &d = Dispatch() ) const; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + ResultValueType::type acquireXlibDisplayEXT( DisplayKHR display, Dispatch const &d = Dispatch() ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VK_USE_PLATFORM_XLIB_XRANDR_NV*/ + +#ifdef VK_USE_PLATFORM_XLIB_XRANDR_NV + template + Result getRandROutputDisplayEXT( Display* dpy, RROutput rrOutput, DisplayKHR* pDisplay, Dispatch const &d = Dispatch() ) const; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + ResultValueType::type getRandROutputDisplayEXT( Display & dpy, RROutput rrOutput, Dispatch const &d = Dispatch() ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VK_USE_PLATFORM_XLIB_XRANDR_NV*/ + + template + Result getSurfaceCapabilities2EXT( SurfaceKHR surface, SurfaceCapabilities2EXT* pSurfaceCapabilities, Dispatch const &d = Dispatch() ) const; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + ResultValueType::type getSurfaceCapabilities2EXT( SurfaceKHR surface, Dispatch const &d = Dispatch() ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + Result getPresentRectanglesKHR( SurfaceKHR surface, uint32_t* pRectCount, Rect2D* pRects, Dispatch const &d = Dispatch() ) const; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template , typename Dispatch = DispatchLoaderStatic> + typename ResultValueType>::type getPresentRectanglesKHR( SurfaceKHR surface, Dispatch const &d = Dispatch() ) const; + template , typename Dispatch = DispatchLoaderStatic> + typename ResultValueType>::type getPresentRectanglesKHR( SurfaceKHR surface, Allocator const& vectorAllocator, Dispatch const &d ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void getMultisamplePropertiesEXT( SampleCountFlagBits samples, MultisamplePropertiesEXT* pMultisampleProperties, Dispatch const &d = Dispatch() ) const; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + MultisamplePropertiesEXT getMultisamplePropertiesEXT( SampleCountFlagBits samples, Dispatch const &d = Dispatch() ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + Result getSurfaceCapabilities2KHR( const PhysicalDeviceSurfaceInfo2KHR* pSurfaceInfo, SurfaceCapabilities2KHR* pSurfaceCapabilities, Dispatch const &d = Dispatch() ) const; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + ResultValueType::type getSurfaceCapabilities2KHR( const PhysicalDeviceSurfaceInfo2KHR & surfaceInfo, Dispatch const &d = Dispatch() ) const; + template + typename ResultValueType>::type getSurfaceCapabilities2KHR( const PhysicalDeviceSurfaceInfo2KHR & surfaceInfo, Dispatch const &d = Dispatch() ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + Result getSurfaceFormats2KHR( const PhysicalDeviceSurfaceInfo2KHR* pSurfaceInfo, uint32_t* pSurfaceFormatCount, SurfaceFormat2KHR* pSurfaceFormats, Dispatch const &d = Dispatch() ) const; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template , typename Dispatch = DispatchLoaderStatic> + typename ResultValueType>::type getSurfaceFormats2KHR( const PhysicalDeviceSurfaceInfo2KHR & surfaceInfo, Dispatch const &d = Dispatch() ) const; + template , typename Dispatch = DispatchLoaderStatic> + typename ResultValueType>::type getSurfaceFormats2KHR( const PhysicalDeviceSurfaceInfo2KHR & surfaceInfo, Allocator const& vectorAllocator, Dispatch const &d ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + Result getDisplayProperties2KHR( uint32_t* pPropertyCount, DisplayProperties2KHR* pProperties, Dispatch const &d = Dispatch() ) const; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template , typename Dispatch = DispatchLoaderStatic> + typename ResultValueType>::type getDisplayProperties2KHR(Dispatch const &d = Dispatch() ) const; + template , typename Dispatch = DispatchLoaderStatic> + typename ResultValueType>::type getDisplayProperties2KHR(Allocator const& vectorAllocator, Dispatch const &d ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + Result getDisplayPlaneProperties2KHR( uint32_t* pPropertyCount, DisplayPlaneProperties2KHR* pProperties, Dispatch const &d = Dispatch() ) const; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template , typename Dispatch = DispatchLoaderStatic> + typename ResultValueType>::type getDisplayPlaneProperties2KHR(Dispatch const &d = Dispatch() ) const; + template , typename Dispatch = DispatchLoaderStatic> + typename ResultValueType>::type getDisplayPlaneProperties2KHR(Allocator const& vectorAllocator, Dispatch const &d ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + Result getDisplayModeProperties2KHR( DisplayKHR display, uint32_t* pPropertyCount, DisplayModeProperties2KHR* pProperties, Dispatch const &d = Dispatch() ) const; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template , typename Dispatch = DispatchLoaderStatic> + typename ResultValueType>::type getDisplayModeProperties2KHR( DisplayKHR display, Dispatch const &d = Dispatch() ) const; + template , typename Dispatch = DispatchLoaderStatic> + typename ResultValueType>::type getDisplayModeProperties2KHR( DisplayKHR display, Allocator const& vectorAllocator, Dispatch const &d ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + Result getDisplayPlaneCapabilities2KHR( const DisplayPlaneInfo2KHR* pDisplayPlaneInfo, DisplayPlaneCapabilities2KHR* pCapabilities, Dispatch const &d = Dispatch() ) const; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + ResultValueType::type getDisplayPlaneCapabilities2KHR( const DisplayPlaneInfo2KHR & displayPlaneInfo, Dispatch const &d = Dispatch() ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + Result getCalibrateableTimeDomainsEXT( uint32_t* pTimeDomainCount, TimeDomainEXT* pTimeDomains, Dispatch const &d = Dispatch() ) const; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template , typename Dispatch = DispatchLoaderStatic> + typename ResultValueType>::type getCalibrateableTimeDomainsEXT(Dispatch const &d = Dispatch() ) const; + template , typename Dispatch = DispatchLoaderStatic> + typename ResultValueType>::type getCalibrateableTimeDomainsEXT(Allocator const& vectorAllocator, Dispatch const &d ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + + + VULKAN_HPP_TYPESAFE_EXPLICIT operator VkPhysicalDevice() const + { + return m_physicalDevice; + } + + explicit operator bool() const + { + return m_physicalDevice != VK_NULL_HANDLE; + } + + bool operator!() const + { + return m_physicalDevice == VK_NULL_HANDLE; + } + + private: + VkPhysicalDevice m_physicalDevice; + }; + + static_assert( sizeof( PhysicalDevice ) == sizeof( VkPhysicalDevice ), "handle and wrapper have different size!" ); + + template + VULKAN_HPP_INLINE void PhysicalDevice::getProperties( PhysicalDeviceProperties* pProperties, Dispatch const &d) const + { + d.vkGetPhysicalDeviceProperties( m_physicalDevice, reinterpret_cast( pProperties ) ); + } +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE PhysicalDeviceProperties PhysicalDevice::getProperties(Dispatch const &d ) const + { + PhysicalDeviceProperties properties; + d.vkGetPhysicalDeviceProperties( m_physicalDevice, reinterpret_cast( &properties ) ); + return properties; + } + template + VULKAN_HPP_INLINE StructureChain PhysicalDevice::getProperties(Dispatch const &d ) const + { + StructureChain structureChain; + PhysicalDeviceProperties& properties = structureChain.template get(); + d.vkGetPhysicalDeviceProperties( m_physicalDevice, reinterpret_cast( &properties ) ); + return structureChain; + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void PhysicalDevice::getQueueFamilyProperties( uint32_t* pQueueFamilyPropertyCount, QueueFamilyProperties* pQueueFamilyProperties, Dispatch const &d) const + { + d.vkGetPhysicalDeviceQueueFamilyProperties( m_physicalDevice, pQueueFamilyPropertyCount, reinterpret_cast( pQueueFamilyProperties ) ); + } +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE std::vector PhysicalDevice::getQueueFamilyProperties(Dispatch const &d ) const + { + std::vector queueFamilyProperties; + uint32_t queueFamilyPropertyCount; + d.vkGetPhysicalDeviceQueueFamilyProperties( m_physicalDevice, &queueFamilyPropertyCount, nullptr ); + queueFamilyProperties.resize( queueFamilyPropertyCount ); + d.vkGetPhysicalDeviceQueueFamilyProperties( m_physicalDevice, &queueFamilyPropertyCount, reinterpret_cast( queueFamilyProperties.data() ) ); + return queueFamilyProperties; + } + template + VULKAN_HPP_INLINE std::vector PhysicalDevice::getQueueFamilyProperties(Allocator const& vectorAllocator, Dispatch const &d ) const + { + std::vector queueFamilyProperties( vectorAllocator ); + uint32_t queueFamilyPropertyCount; + d.vkGetPhysicalDeviceQueueFamilyProperties( m_physicalDevice, &queueFamilyPropertyCount, nullptr ); + queueFamilyProperties.resize( queueFamilyPropertyCount ); + d.vkGetPhysicalDeviceQueueFamilyProperties( m_physicalDevice, &queueFamilyPropertyCount, reinterpret_cast( queueFamilyProperties.data() ) ); + return queueFamilyProperties; + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void PhysicalDevice::getMemoryProperties( PhysicalDeviceMemoryProperties* pMemoryProperties, Dispatch const &d) const + { + d.vkGetPhysicalDeviceMemoryProperties( m_physicalDevice, reinterpret_cast( pMemoryProperties ) ); + } +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE PhysicalDeviceMemoryProperties PhysicalDevice::getMemoryProperties(Dispatch const &d ) const + { + PhysicalDeviceMemoryProperties memoryProperties; + d.vkGetPhysicalDeviceMemoryProperties( m_physicalDevice, reinterpret_cast( &memoryProperties ) ); + return memoryProperties; + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void PhysicalDevice::getFeatures( PhysicalDeviceFeatures* pFeatures, Dispatch const &d) const + { + d.vkGetPhysicalDeviceFeatures( m_physicalDevice, reinterpret_cast( pFeatures ) ); + } +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE PhysicalDeviceFeatures PhysicalDevice::getFeatures(Dispatch const &d ) const + { + PhysicalDeviceFeatures features; + d.vkGetPhysicalDeviceFeatures( m_physicalDevice, reinterpret_cast( &features ) ); + return features; + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void PhysicalDevice::getFormatProperties( Format format, FormatProperties* pFormatProperties, Dispatch const &d) const + { + d.vkGetPhysicalDeviceFormatProperties( m_physicalDevice, static_cast( format ), reinterpret_cast( pFormatProperties ) ); + } +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE FormatProperties PhysicalDevice::getFormatProperties( Format format, Dispatch const &d ) const + { + FormatProperties formatProperties; + d.vkGetPhysicalDeviceFormatProperties( m_physicalDevice, static_cast( format ), reinterpret_cast( &formatProperties ) ); + return formatProperties; + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE Result PhysicalDevice::getImageFormatProperties( Format format, ImageType type, ImageTiling tiling, ImageUsageFlags usage, ImageCreateFlags flags, ImageFormatProperties* pImageFormatProperties, Dispatch const &d) const + { + return static_cast( d.vkGetPhysicalDeviceImageFormatProperties( m_physicalDevice, static_cast( format ), static_cast( type ), static_cast( tiling ), static_cast( usage ), static_cast( flags ), reinterpret_cast( pImageFormatProperties ) ) ); + } +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE ResultValueType::type PhysicalDevice::getImageFormatProperties( Format format, ImageType type, ImageTiling tiling, ImageUsageFlags usage, ImageCreateFlags flags, Dispatch const &d ) const + { + ImageFormatProperties imageFormatProperties; + Result result = static_cast( d.vkGetPhysicalDeviceImageFormatProperties( m_physicalDevice, static_cast( format ), static_cast( type ), static_cast( tiling ), static_cast( usage ), static_cast( flags ), reinterpret_cast( &imageFormatProperties ) ) ); + return createResultValue( result, imageFormatProperties, VULKAN_HPP_NAMESPACE_STRING"::PhysicalDevice::getImageFormatProperties" ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE Result PhysicalDevice::createDevice( const DeviceCreateInfo* pCreateInfo, const AllocationCallbacks* pAllocator, Device* pDevice, Dispatch const &d) const + { + return static_cast( d.vkCreateDevice( m_physicalDevice, reinterpret_cast( pCreateInfo ), reinterpret_cast( pAllocator ), reinterpret_cast( pDevice ) ) ); + } +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE ResultValueType::type PhysicalDevice::createDevice( const DeviceCreateInfo & createInfo, Optional allocator, Dispatch const &d ) const + { + Device device; + Result result = static_cast( d.vkCreateDevice( m_physicalDevice, reinterpret_cast( &createInfo ), reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( &device ) ) ); + return createResultValue( result, device, VULKAN_HPP_NAMESPACE_STRING"::PhysicalDevice::createDevice" ); + } +#ifndef VULKAN_HPP_NO_SMART_HANDLE + template + VULKAN_HPP_INLINE typename ResultValueType>::type PhysicalDevice::createDeviceUnique( const DeviceCreateInfo & createInfo, Optional allocator, Dispatch const &d ) const + { + Device device; + Result result = static_cast( d.vkCreateDevice( m_physicalDevice, reinterpret_cast( &createInfo ), reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( &device ) ) ); + + ObjectDestroy deleter( allocator, d ); + return createResultValue( result, device, VULKAN_HPP_NAMESPACE_STRING"::PhysicalDevice::createDeviceUnique", deleter ); + } +#endif /*VULKAN_HPP_NO_SMART_HANDLE*/ +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE Result PhysicalDevice::enumerateDeviceLayerProperties( uint32_t* pPropertyCount, LayerProperties* pProperties, Dispatch const &d) const + { + return static_cast( d.vkEnumerateDeviceLayerProperties( m_physicalDevice, pPropertyCount, reinterpret_cast( pProperties ) ) ); + } +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE typename ResultValueType>::type PhysicalDevice::enumerateDeviceLayerProperties(Dispatch const &d ) const + { + std::vector properties; + uint32_t propertyCount; + Result result; + do + { + result = static_cast( d.vkEnumerateDeviceLayerProperties( m_physicalDevice, &propertyCount, nullptr ) ); + if ( ( result == Result::eSuccess ) && propertyCount ) + { + properties.resize( propertyCount ); + result = static_cast( d.vkEnumerateDeviceLayerProperties( m_physicalDevice, &propertyCount, reinterpret_cast( properties.data() ) ) ); + } + } while ( result == Result::eIncomplete ); + VULKAN_HPP_ASSERT( propertyCount <= properties.size() ); + properties.resize( propertyCount ); + return createResultValue( result, properties, VULKAN_HPP_NAMESPACE_STRING"::PhysicalDevice::enumerateDeviceLayerProperties" ); + } + template + VULKAN_HPP_INLINE typename ResultValueType>::type PhysicalDevice::enumerateDeviceLayerProperties(Allocator const& vectorAllocator, Dispatch const &d ) const + { + std::vector properties( vectorAllocator ); + uint32_t propertyCount; + Result result; + do + { + result = static_cast( d.vkEnumerateDeviceLayerProperties( m_physicalDevice, &propertyCount, nullptr ) ); + if ( ( result == Result::eSuccess ) && propertyCount ) + { + properties.resize( propertyCount ); + result = static_cast( d.vkEnumerateDeviceLayerProperties( m_physicalDevice, &propertyCount, reinterpret_cast( properties.data() ) ) ); + } + } while ( result == Result::eIncomplete ); + VULKAN_HPP_ASSERT( propertyCount <= properties.size() ); + properties.resize( propertyCount ); + return createResultValue( result, properties, VULKAN_HPP_NAMESPACE_STRING"::PhysicalDevice::enumerateDeviceLayerProperties" ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE Result PhysicalDevice::enumerateDeviceExtensionProperties( const char* pLayerName, uint32_t* pPropertyCount, ExtensionProperties* pProperties, Dispatch const &d) const + { + return static_cast( d.vkEnumerateDeviceExtensionProperties( m_physicalDevice, pLayerName, pPropertyCount, reinterpret_cast( pProperties ) ) ); + } +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE typename ResultValueType>::type PhysicalDevice::enumerateDeviceExtensionProperties( Optional layerName, Dispatch const &d ) const + { + std::vector properties; + uint32_t propertyCount; + Result result; + do + { + result = static_cast( d.vkEnumerateDeviceExtensionProperties( m_physicalDevice, layerName ? layerName->c_str() : nullptr, &propertyCount, nullptr ) ); + if ( ( result == Result::eSuccess ) && propertyCount ) + { + properties.resize( propertyCount ); + result = static_cast( d.vkEnumerateDeviceExtensionProperties( m_physicalDevice, layerName ? layerName->c_str() : nullptr, &propertyCount, reinterpret_cast( properties.data() ) ) ); + } + } while ( result == Result::eIncomplete ); + VULKAN_HPP_ASSERT( propertyCount <= properties.size() ); + properties.resize( propertyCount ); + return createResultValue( result, properties, VULKAN_HPP_NAMESPACE_STRING"::PhysicalDevice::enumerateDeviceExtensionProperties" ); + } + template + VULKAN_HPP_INLINE typename ResultValueType>::type PhysicalDevice::enumerateDeviceExtensionProperties( Optional layerName, Allocator const& vectorAllocator, Dispatch const &d ) const + { + std::vector properties( vectorAllocator ); + uint32_t propertyCount; + Result result; + do + { + result = static_cast( d.vkEnumerateDeviceExtensionProperties( m_physicalDevice, layerName ? layerName->c_str() : nullptr, &propertyCount, nullptr ) ); + if ( ( result == Result::eSuccess ) && propertyCount ) + { + properties.resize( propertyCount ); + result = static_cast( d.vkEnumerateDeviceExtensionProperties( m_physicalDevice, layerName ? layerName->c_str() : nullptr, &propertyCount, reinterpret_cast( properties.data() ) ) ); + } + } while ( result == Result::eIncomplete ); + VULKAN_HPP_ASSERT( propertyCount <= properties.size() ); + properties.resize( propertyCount ); + return createResultValue( result, properties, VULKAN_HPP_NAMESPACE_STRING"::PhysicalDevice::enumerateDeviceExtensionProperties" ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void PhysicalDevice::getSparseImageFormatProperties( Format format, ImageType type, SampleCountFlagBits samples, ImageUsageFlags usage, ImageTiling tiling, uint32_t* pPropertyCount, SparseImageFormatProperties* pProperties, Dispatch const &d) const + { + d.vkGetPhysicalDeviceSparseImageFormatProperties( m_physicalDevice, static_cast( format ), static_cast( type ), static_cast( samples ), static_cast( usage ), static_cast( tiling ), pPropertyCount, reinterpret_cast( pProperties ) ); + } +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE std::vector PhysicalDevice::getSparseImageFormatProperties( Format format, ImageType type, SampleCountFlagBits samples, ImageUsageFlags usage, ImageTiling tiling, Dispatch const &d ) const + { + std::vector properties; + uint32_t propertyCount; + d.vkGetPhysicalDeviceSparseImageFormatProperties( m_physicalDevice, static_cast( format ), static_cast( type ), static_cast( samples ), static_cast( usage ), static_cast( tiling ), &propertyCount, nullptr ); + properties.resize( propertyCount ); + d.vkGetPhysicalDeviceSparseImageFormatProperties( m_physicalDevice, static_cast( format ), static_cast( type ), static_cast( samples ), static_cast( usage ), static_cast( tiling ), &propertyCount, reinterpret_cast( properties.data() ) ); + return properties; + } + template + VULKAN_HPP_INLINE std::vector PhysicalDevice::getSparseImageFormatProperties( Format format, ImageType type, SampleCountFlagBits samples, ImageUsageFlags usage, ImageTiling tiling, Allocator const& vectorAllocator, Dispatch const &d ) const + { + std::vector properties( vectorAllocator ); + uint32_t propertyCount; + d.vkGetPhysicalDeviceSparseImageFormatProperties( m_physicalDevice, static_cast( format ), static_cast( type ), static_cast( samples ), static_cast( usage ), static_cast( tiling ), &propertyCount, nullptr ); + properties.resize( propertyCount ); + d.vkGetPhysicalDeviceSparseImageFormatProperties( m_physicalDevice, static_cast( format ), static_cast( type ), static_cast( samples ), static_cast( usage ), static_cast( tiling ), &propertyCount, reinterpret_cast( properties.data() ) ); + return properties; + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE Result PhysicalDevice::getDisplayPropertiesKHR( uint32_t* pPropertyCount, DisplayPropertiesKHR* pProperties, Dispatch const &d) const + { + return static_cast( d.vkGetPhysicalDeviceDisplayPropertiesKHR( m_physicalDevice, pPropertyCount, reinterpret_cast( pProperties ) ) ); + } +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE typename ResultValueType>::type PhysicalDevice::getDisplayPropertiesKHR(Dispatch const &d ) const + { + std::vector properties; + uint32_t propertyCount; + Result result; + do + { + result = static_cast( d.vkGetPhysicalDeviceDisplayPropertiesKHR( m_physicalDevice, &propertyCount, nullptr ) ); + if ( ( result == Result::eSuccess ) && propertyCount ) + { + properties.resize( propertyCount ); + result = static_cast( d.vkGetPhysicalDeviceDisplayPropertiesKHR( m_physicalDevice, &propertyCount, reinterpret_cast( properties.data() ) ) ); + } + } while ( result == Result::eIncomplete ); + VULKAN_HPP_ASSERT( propertyCount <= properties.size() ); + properties.resize( propertyCount ); + return createResultValue( result, properties, VULKAN_HPP_NAMESPACE_STRING"::PhysicalDevice::getDisplayPropertiesKHR" ); + } + template + VULKAN_HPP_INLINE typename ResultValueType>::type PhysicalDevice::getDisplayPropertiesKHR(Allocator const& vectorAllocator, Dispatch const &d ) const + { + std::vector properties( vectorAllocator ); + uint32_t propertyCount; + Result result; + do + { + result = static_cast( d.vkGetPhysicalDeviceDisplayPropertiesKHR( m_physicalDevice, &propertyCount, nullptr ) ); + if ( ( result == Result::eSuccess ) && propertyCount ) + { + properties.resize( propertyCount ); + result = static_cast( d.vkGetPhysicalDeviceDisplayPropertiesKHR( m_physicalDevice, &propertyCount, reinterpret_cast( properties.data() ) ) ); + } + } while ( result == Result::eIncomplete ); + VULKAN_HPP_ASSERT( propertyCount <= properties.size() ); + properties.resize( propertyCount ); + return createResultValue( result, properties, VULKAN_HPP_NAMESPACE_STRING"::PhysicalDevice::getDisplayPropertiesKHR" ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE Result PhysicalDevice::getDisplayPlanePropertiesKHR( uint32_t* pPropertyCount, DisplayPlanePropertiesKHR* pProperties, Dispatch const &d) const + { + return static_cast( d.vkGetPhysicalDeviceDisplayPlanePropertiesKHR( m_physicalDevice, pPropertyCount, reinterpret_cast( pProperties ) ) ); + } +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE typename ResultValueType>::type PhysicalDevice::getDisplayPlanePropertiesKHR(Dispatch const &d ) const + { + std::vector properties; + uint32_t propertyCount; + Result result; + do + { + result = static_cast( d.vkGetPhysicalDeviceDisplayPlanePropertiesKHR( m_physicalDevice, &propertyCount, nullptr ) ); + if ( ( result == Result::eSuccess ) && propertyCount ) + { + properties.resize( propertyCount ); + result = static_cast( d.vkGetPhysicalDeviceDisplayPlanePropertiesKHR( m_physicalDevice, &propertyCount, reinterpret_cast( properties.data() ) ) ); + } + } while ( result == Result::eIncomplete ); + VULKAN_HPP_ASSERT( propertyCount <= properties.size() ); + properties.resize( propertyCount ); + return createResultValue( result, properties, VULKAN_HPP_NAMESPACE_STRING"::PhysicalDevice::getDisplayPlanePropertiesKHR" ); + } + template + VULKAN_HPP_INLINE typename ResultValueType>::type PhysicalDevice::getDisplayPlanePropertiesKHR(Allocator const& vectorAllocator, Dispatch const &d ) const + { + std::vector properties( vectorAllocator ); + uint32_t propertyCount; + Result result; + do + { + result = static_cast( d.vkGetPhysicalDeviceDisplayPlanePropertiesKHR( m_physicalDevice, &propertyCount, nullptr ) ); + if ( ( result == Result::eSuccess ) && propertyCount ) + { + properties.resize( propertyCount ); + result = static_cast( d.vkGetPhysicalDeviceDisplayPlanePropertiesKHR( m_physicalDevice, &propertyCount, reinterpret_cast( properties.data() ) ) ); + } + } while ( result == Result::eIncomplete ); + VULKAN_HPP_ASSERT( propertyCount <= properties.size() ); + properties.resize( propertyCount ); + return createResultValue( result, properties, VULKAN_HPP_NAMESPACE_STRING"::PhysicalDevice::getDisplayPlanePropertiesKHR" ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE Result PhysicalDevice::getDisplayPlaneSupportedDisplaysKHR( uint32_t planeIndex, uint32_t* pDisplayCount, DisplayKHR* pDisplays, Dispatch const &d) const + { + return static_cast( d.vkGetDisplayPlaneSupportedDisplaysKHR( m_physicalDevice, planeIndex, pDisplayCount, reinterpret_cast( pDisplays ) ) ); + } +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE typename ResultValueType>::type PhysicalDevice::getDisplayPlaneSupportedDisplaysKHR( uint32_t planeIndex, Dispatch const &d ) const + { + std::vector displays; + uint32_t displayCount; + Result result; + do + { + result = static_cast( d.vkGetDisplayPlaneSupportedDisplaysKHR( m_physicalDevice, planeIndex, &displayCount, nullptr ) ); + if ( ( result == Result::eSuccess ) && displayCount ) + { + displays.resize( displayCount ); + result = static_cast( d.vkGetDisplayPlaneSupportedDisplaysKHR( m_physicalDevice, planeIndex, &displayCount, reinterpret_cast( displays.data() ) ) ); + } + } while ( result == Result::eIncomplete ); + VULKAN_HPP_ASSERT( displayCount <= displays.size() ); + displays.resize( displayCount ); + return createResultValue( result, displays, VULKAN_HPP_NAMESPACE_STRING"::PhysicalDevice::getDisplayPlaneSupportedDisplaysKHR" ); + } + template + VULKAN_HPP_INLINE typename ResultValueType>::type PhysicalDevice::getDisplayPlaneSupportedDisplaysKHR( uint32_t planeIndex, Allocator const& vectorAllocator, Dispatch const &d ) const + { + std::vector displays( vectorAllocator ); + uint32_t displayCount; + Result result; + do + { + result = static_cast( d.vkGetDisplayPlaneSupportedDisplaysKHR( m_physicalDevice, planeIndex, &displayCount, nullptr ) ); + if ( ( result == Result::eSuccess ) && displayCount ) + { + displays.resize( displayCount ); + result = static_cast( d.vkGetDisplayPlaneSupportedDisplaysKHR( m_physicalDevice, planeIndex, &displayCount, reinterpret_cast( displays.data() ) ) ); + } + } while ( result == Result::eIncomplete ); + VULKAN_HPP_ASSERT( displayCount <= displays.size() ); + displays.resize( displayCount ); + return createResultValue( result, displays, VULKAN_HPP_NAMESPACE_STRING"::PhysicalDevice::getDisplayPlaneSupportedDisplaysKHR" ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE Result PhysicalDevice::getDisplayModePropertiesKHR( DisplayKHR display, uint32_t* pPropertyCount, DisplayModePropertiesKHR* pProperties, Dispatch const &d) const + { + return static_cast( d.vkGetDisplayModePropertiesKHR( m_physicalDevice, static_cast( display ), pPropertyCount, reinterpret_cast( pProperties ) ) ); + } +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE typename ResultValueType>::type PhysicalDevice::getDisplayModePropertiesKHR( DisplayKHR display, Dispatch const &d ) const + { + std::vector properties; + uint32_t propertyCount; + Result result; + do + { + result = static_cast( d.vkGetDisplayModePropertiesKHR( m_physicalDevice, static_cast( display ), &propertyCount, nullptr ) ); + if ( ( result == Result::eSuccess ) && propertyCount ) + { + properties.resize( propertyCount ); + result = static_cast( d.vkGetDisplayModePropertiesKHR( m_physicalDevice, static_cast( display ), &propertyCount, reinterpret_cast( properties.data() ) ) ); + } + } while ( result == Result::eIncomplete ); + VULKAN_HPP_ASSERT( propertyCount <= properties.size() ); + properties.resize( propertyCount ); + return createResultValue( result, properties, VULKAN_HPP_NAMESPACE_STRING"::PhysicalDevice::getDisplayModePropertiesKHR" ); + } + template + VULKAN_HPP_INLINE typename ResultValueType>::type PhysicalDevice::getDisplayModePropertiesKHR( DisplayKHR display, Allocator const& vectorAllocator, Dispatch const &d ) const + { + std::vector properties( vectorAllocator ); + uint32_t propertyCount; + Result result; + do + { + result = static_cast( d.vkGetDisplayModePropertiesKHR( m_physicalDevice, static_cast( display ), &propertyCount, nullptr ) ); + if ( ( result == Result::eSuccess ) && propertyCount ) + { + properties.resize( propertyCount ); + result = static_cast( d.vkGetDisplayModePropertiesKHR( m_physicalDevice, static_cast( display ), &propertyCount, reinterpret_cast( properties.data() ) ) ); + } + } while ( result == Result::eIncomplete ); + VULKAN_HPP_ASSERT( propertyCount <= properties.size() ); + properties.resize( propertyCount ); + return createResultValue( result, properties, VULKAN_HPP_NAMESPACE_STRING"::PhysicalDevice::getDisplayModePropertiesKHR" ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE Result PhysicalDevice::createDisplayModeKHR( DisplayKHR display, const DisplayModeCreateInfoKHR* pCreateInfo, const AllocationCallbacks* pAllocator, DisplayModeKHR* pMode, Dispatch const &d) const + { + return static_cast( d.vkCreateDisplayModeKHR( m_physicalDevice, static_cast( display ), reinterpret_cast( pCreateInfo ), reinterpret_cast( pAllocator ), reinterpret_cast( pMode ) ) ); + } +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE ResultValueType::type PhysicalDevice::createDisplayModeKHR( DisplayKHR display, const DisplayModeCreateInfoKHR & createInfo, Optional allocator, Dispatch const &d ) const + { + DisplayModeKHR mode; + Result result = static_cast( d.vkCreateDisplayModeKHR( m_physicalDevice, static_cast( display ), reinterpret_cast( &createInfo ), reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( &mode ) ) ); + return createResultValue( result, mode, VULKAN_HPP_NAMESPACE_STRING"::PhysicalDevice::createDisplayModeKHR" ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE Result PhysicalDevice::getDisplayPlaneCapabilitiesKHR( DisplayModeKHR mode, uint32_t planeIndex, DisplayPlaneCapabilitiesKHR* pCapabilities, Dispatch const &d) const + { + return static_cast( d.vkGetDisplayPlaneCapabilitiesKHR( m_physicalDevice, static_cast( mode ), planeIndex, reinterpret_cast( pCapabilities ) ) ); + } +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE ResultValueType::type PhysicalDevice::getDisplayPlaneCapabilitiesKHR( DisplayModeKHR mode, uint32_t planeIndex, Dispatch const &d ) const + { + DisplayPlaneCapabilitiesKHR capabilities; + Result result = static_cast( d.vkGetDisplayPlaneCapabilitiesKHR( m_physicalDevice, static_cast( mode ), planeIndex, reinterpret_cast( &capabilities ) ) ); + return createResultValue( result, capabilities, VULKAN_HPP_NAMESPACE_STRING"::PhysicalDevice::getDisplayPlaneCapabilitiesKHR" ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE Result PhysicalDevice::getSurfaceSupportKHR( uint32_t queueFamilyIndex, SurfaceKHR surface, Bool32* pSupported, Dispatch const &d) const + { + return static_cast( d.vkGetPhysicalDeviceSurfaceSupportKHR( m_physicalDevice, queueFamilyIndex, static_cast( surface ), pSupported ) ); + } +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE ResultValueType::type PhysicalDevice::getSurfaceSupportKHR( uint32_t queueFamilyIndex, SurfaceKHR surface, Dispatch const &d ) const + { + Bool32 supported; + Result result = static_cast( d.vkGetPhysicalDeviceSurfaceSupportKHR( m_physicalDevice, queueFamilyIndex, static_cast( surface ), &supported ) ); + return createResultValue( result, supported, VULKAN_HPP_NAMESPACE_STRING"::PhysicalDevice::getSurfaceSupportKHR" ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE Result PhysicalDevice::getSurfaceCapabilitiesKHR( SurfaceKHR surface, SurfaceCapabilitiesKHR* pSurfaceCapabilities, Dispatch const &d) const + { + return static_cast( d.vkGetPhysicalDeviceSurfaceCapabilitiesKHR( m_physicalDevice, static_cast( surface ), reinterpret_cast( pSurfaceCapabilities ) ) ); + } +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE ResultValueType::type PhysicalDevice::getSurfaceCapabilitiesKHR( SurfaceKHR surface, Dispatch const &d ) const + { + SurfaceCapabilitiesKHR surfaceCapabilities; + Result result = static_cast( d.vkGetPhysicalDeviceSurfaceCapabilitiesKHR( m_physicalDevice, static_cast( surface ), reinterpret_cast( &surfaceCapabilities ) ) ); + return createResultValue( result, surfaceCapabilities, VULKAN_HPP_NAMESPACE_STRING"::PhysicalDevice::getSurfaceCapabilitiesKHR" ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE Result PhysicalDevice::getSurfaceFormatsKHR( SurfaceKHR surface, uint32_t* pSurfaceFormatCount, SurfaceFormatKHR* pSurfaceFormats, Dispatch const &d) const + { + return static_cast( d.vkGetPhysicalDeviceSurfaceFormatsKHR( m_physicalDevice, static_cast( surface ), pSurfaceFormatCount, reinterpret_cast( pSurfaceFormats ) ) ); + } +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE typename ResultValueType>::type PhysicalDevice::getSurfaceFormatsKHR( SurfaceKHR surface, Dispatch const &d ) const + { + std::vector surfaceFormats; + uint32_t surfaceFormatCount; + Result result; + do + { + result = static_cast( d.vkGetPhysicalDeviceSurfaceFormatsKHR( m_physicalDevice, static_cast( surface ), &surfaceFormatCount, nullptr ) ); + if ( ( result == Result::eSuccess ) && surfaceFormatCount ) + { + surfaceFormats.resize( surfaceFormatCount ); + result = static_cast( d.vkGetPhysicalDeviceSurfaceFormatsKHR( m_physicalDevice, static_cast( surface ), &surfaceFormatCount, reinterpret_cast( surfaceFormats.data() ) ) ); + } + } while ( result == Result::eIncomplete ); + VULKAN_HPP_ASSERT( surfaceFormatCount <= surfaceFormats.size() ); + surfaceFormats.resize( surfaceFormatCount ); + return createResultValue( result, surfaceFormats, VULKAN_HPP_NAMESPACE_STRING"::PhysicalDevice::getSurfaceFormatsKHR" ); + } + template + VULKAN_HPP_INLINE typename ResultValueType>::type PhysicalDevice::getSurfaceFormatsKHR( SurfaceKHR surface, Allocator const& vectorAllocator, Dispatch const &d ) const + { + std::vector surfaceFormats( vectorAllocator ); + uint32_t surfaceFormatCount; + Result result; + do + { + result = static_cast( d.vkGetPhysicalDeviceSurfaceFormatsKHR( m_physicalDevice, static_cast( surface ), &surfaceFormatCount, nullptr ) ); + if ( ( result == Result::eSuccess ) && surfaceFormatCount ) + { + surfaceFormats.resize( surfaceFormatCount ); + result = static_cast( d.vkGetPhysicalDeviceSurfaceFormatsKHR( m_physicalDevice, static_cast( surface ), &surfaceFormatCount, reinterpret_cast( surfaceFormats.data() ) ) ); + } + } while ( result == Result::eIncomplete ); + VULKAN_HPP_ASSERT( surfaceFormatCount <= surfaceFormats.size() ); + surfaceFormats.resize( surfaceFormatCount ); + return createResultValue( result, surfaceFormats, VULKAN_HPP_NAMESPACE_STRING"::PhysicalDevice::getSurfaceFormatsKHR" ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE Result PhysicalDevice::getSurfacePresentModesKHR( SurfaceKHR surface, uint32_t* pPresentModeCount, PresentModeKHR* pPresentModes, Dispatch const &d) const + { + return static_cast( d.vkGetPhysicalDeviceSurfacePresentModesKHR( m_physicalDevice, static_cast( surface ), pPresentModeCount, reinterpret_cast( pPresentModes ) ) ); + } +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE typename ResultValueType>::type PhysicalDevice::getSurfacePresentModesKHR( SurfaceKHR surface, Dispatch const &d ) const + { + std::vector presentModes; + uint32_t presentModeCount; + Result result; + do + { + result = static_cast( d.vkGetPhysicalDeviceSurfacePresentModesKHR( m_physicalDevice, static_cast( surface ), &presentModeCount, nullptr ) ); + if ( ( result == Result::eSuccess ) && presentModeCount ) + { + presentModes.resize( presentModeCount ); + result = static_cast( d.vkGetPhysicalDeviceSurfacePresentModesKHR( m_physicalDevice, static_cast( surface ), &presentModeCount, reinterpret_cast( presentModes.data() ) ) ); + } + } while ( result == Result::eIncomplete ); + VULKAN_HPP_ASSERT( presentModeCount <= presentModes.size() ); + presentModes.resize( presentModeCount ); + return createResultValue( result, presentModes, VULKAN_HPP_NAMESPACE_STRING"::PhysicalDevice::getSurfacePresentModesKHR" ); + } + template + VULKAN_HPP_INLINE typename ResultValueType>::type PhysicalDevice::getSurfacePresentModesKHR( SurfaceKHR surface, Allocator const& vectorAllocator, Dispatch const &d ) const + { + std::vector presentModes( vectorAllocator ); + uint32_t presentModeCount; + Result result; + do + { + result = static_cast( d.vkGetPhysicalDeviceSurfacePresentModesKHR( m_physicalDevice, static_cast( surface ), &presentModeCount, nullptr ) ); + if ( ( result == Result::eSuccess ) && presentModeCount ) + { + presentModes.resize( presentModeCount ); + result = static_cast( d.vkGetPhysicalDeviceSurfacePresentModesKHR( m_physicalDevice, static_cast( surface ), &presentModeCount, reinterpret_cast( presentModes.data() ) ) ); + } + } while ( result == Result::eIncomplete ); + VULKAN_HPP_ASSERT( presentModeCount <= presentModes.size() ); + presentModes.resize( presentModeCount ); + return createResultValue( result, presentModes, VULKAN_HPP_NAMESPACE_STRING"::PhysicalDevice::getSurfacePresentModesKHR" ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + +#ifdef VK_USE_PLATFORM_WAYLAND_KHR + template + VULKAN_HPP_INLINE Bool32 PhysicalDevice::getWaylandPresentationSupportKHR( uint32_t queueFamilyIndex, struct wl_display* display, Dispatch const &d) const + { + return d.vkGetPhysicalDeviceWaylandPresentationSupportKHR( m_physicalDevice, queueFamilyIndex, display ); + } +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE Bool32 PhysicalDevice::getWaylandPresentationSupportKHR( uint32_t queueFamilyIndex, struct wl_display & display, Dispatch const &d ) const + { + return d.vkGetPhysicalDeviceWaylandPresentationSupportKHR( m_physicalDevice, queueFamilyIndex, &display ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VK_USE_PLATFORM_WAYLAND_KHR*/ + +#ifdef VK_USE_PLATFORM_WIN32_KHR +#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE Bool32 PhysicalDevice::getWin32PresentationSupportKHR( uint32_t queueFamilyIndex, Dispatch const &d) const + { + return d.vkGetPhysicalDeviceWin32PresentationSupportKHR( m_physicalDevice, queueFamilyIndex ); + } +#else + template + VULKAN_HPP_INLINE Bool32 PhysicalDevice::getWin32PresentationSupportKHR( uint32_t queueFamilyIndex, Dispatch const &d ) const + { + return d.vkGetPhysicalDeviceWin32PresentationSupportKHR( m_physicalDevice, queueFamilyIndex ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VK_USE_PLATFORM_WIN32_KHR*/ + +#ifdef VK_USE_PLATFORM_XLIB_KHR + template + VULKAN_HPP_INLINE Bool32 PhysicalDevice::getXlibPresentationSupportKHR( uint32_t queueFamilyIndex, Display* dpy, VisualID visualID, Dispatch const &d) const + { + return d.vkGetPhysicalDeviceXlibPresentationSupportKHR( m_physicalDevice, queueFamilyIndex, dpy, visualID ); + } +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE Bool32 PhysicalDevice::getXlibPresentationSupportKHR( uint32_t queueFamilyIndex, Display & dpy, VisualID visualID, Dispatch const &d ) const + { + return d.vkGetPhysicalDeviceXlibPresentationSupportKHR( m_physicalDevice, queueFamilyIndex, &dpy, visualID ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VK_USE_PLATFORM_XLIB_KHR*/ + +#ifdef VK_USE_PLATFORM_XCB_KHR + template + VULKAN_HPP_INLINE Bool32 PhysicalDevice::getXcbPresentationSupportKHR( uint32_t queueFamilyIndex, xcb_connection_t* connection, xcb_visualid_t visual_id, Dispatch const &d) const + { + return d.vkGetPhysicalDeviceXcbPresentationSupportKHR( m_physicalDevice, queueFamilyIndex, connection, visual_id ); + } +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE Bool32 PhysicalDevice::getXcbPresentationSupportKHR( uint32_t queueFamilyIndex, xcb_connection_t & connection, xcb_visualid_t visual_id, Dispatch const &d ) const + { + return d.vkGetPhysicalDeviceXcbPresentationSupportKHR( m_physicalDevice, queueFamilyIndex, &connection, visual_id ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VK_USE_PLATFORM_XCB_KHR*/ + + template + VULKAN_HPP_INLINE Result PhysicalDevice::getExternalImageFormatPropertiesNV( Format format, ImageType type, ImageTiling tiling, ImageUsageFlags usage, ImageCreateFlags flags, ExternalMemoryHandleTypeFlagsNV externalHandleType, ExternalImageFormatPropertiesNV* pExternalImageFormatProperties, Dispatch const &d) const + { + return static_cast( d.vkGetPhysicalDeviceExternalImageFormatPropertiesNV( m_physicalDevice, static_cast( format ), static_cast( type ), static_cast( tiling ), static_cast( usage ), static_cast( flags ), static_cast( externalHandleType ), reinterpret_cast( pExternalImageFormatProperties ) ) ); + } +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE ResultValueType::type PhysicalDevice::getExternalImageFormatPropertiesNV( Format format, ImageType type, ImageTiling tiling, ImageUsageFlags usage, ImageCreateFlags flags, ExternalMemoryHandleTypeFlagsNV externalHandleType, Dispatch const &d ) const + { + ExternalImageFormatPropertiesNV externalImageFormatProperties; + Result result = static_cast( d.vkGetPhysicalDeviceExternalImageFormatPropertiesNV( m_physicalDevice, static_cast( format ), static_cast( type ), static_cast( tiling ), static_cast( usage ), static_cast( flags ), static_cast( externalHandleType ), reinterpret_cast( &externalImageFormatProperties ) ) ); + return createResultValue( result, externalImageFormatProperties, VULKAN_HPP_NAMESPACE_STRING"::PhysicalDevice::getExternalImageFormatPropertiesNV" ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void PhysicalDevice::getGeneratedCommandsPropertiesNVX( DeviceGeneratedCommandsFeaturesNVX* pFeatures, DeviceGeneratedCommandsLimitsNVX* pLimits, Dispatch const &d) const + { + d.vkGetPhysicalDeviceGeneratedCommandsPropertiesNVX( m_physicalDevice, reinterpret_cast( pFeatures ), reinterpret_cast( pLimits ) ); + } +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE DeviceGeneratedCommandsLimitsNVX PhysicalDevice::getGeneratedCommandsPropertiesNVX( DeviceGeneratedCommandsFeaturesNVX & features, Dispatch const &d ) const + { + DeviceGeneratedCommandsLimitsNVX limits; + d.vkGetPhysicalDeviceGeneratedCommandsPropertiesNVX( m_physicalDevice, reinterpret_cast( &features ), reinterpret_cast( &limits ) ); + return limits; + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void PhysicalDevice::getFeatures2( PhysicalDeviceFeatures2* pFeatures, Dispatch const &d) const + { + d.vkGetPhysicalDeviceFeatures2( m_physicalDevice, reinterpret_cast( pFeatures ) ); + } +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE PhysicalDeviceFeatures2 PhysicalDevice::getFeatures2(Dispatch const &d ) const + { + PhysicalDeviceFeatures2 features; + d.vkGetPhysicalDeviceFeatures2( m_physicalDevice, reinterpret_cast( &features ) ); + return features; + } + template + VULKAN_HPP_INLINE StructureChain PhysicalDevice::getFeatures2(Dispatch const &d ) const + { + StructureChain structureChain; + PhysicalDeviceFeatures2& features = structureChain.template get(); + d.vkGetPhysicalDeviceFeatures2( m_physicalDevice, reinterpret_cast( &features ) ); + return structureChain; + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void PhysicalDevice::getFeatures2KHR( PhysicalDeviceFeatures2* pFeatures, Dispatch const &d) const + { + d.vkGetPhysicalDeviceFeatures2KHR( m_physicalDevice, reinterpret_cast( pFeatures ) ); + } +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE PhysicalDeviceFeatures2 PhysicalDevice::getFeatures2KHR(Dispatch const &d ) const + { + PhysicalDeviceFeatures2 features; + d.vkGetPhysicalDeviceFeatures2KHR( m_physicalDevice, reinterpret_cast( &features ) ); + return features; + } + template + VULKAN_HPP_INLINE StructureChain PhysicalDevice::getFeatures2KHR(Dispatch const &d ) const + { + StructureChain structureChain; + PhysicalDeviceFeatures2& features = structureChain.template get(); + d.vkGetPhysicalDeviceFeatures2KHR( m_physicalDevice, reinterpret_cast( &features ) ); + return structureChain; + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void PhysicalDevice::getProperties2( PhysicalDeviceProperties2* pProperties, Dispatch const &d) const + { + d.vkGetPhysicalDeviceProperties2( m_physicalDevice, reinterpret_cast( pProperties ) ); + } +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE PhysicalDeviceProperties2 PhysicalDevice::getProperties2(Dispatch const &d ) const + { + PhysicalDeviceProperties2 properties; + d.vkGetPhysicalDeviceProperties2( m_physicalDevice, reinterpret_cast( &properties ) ); + return properties; + } + template + VULKAN_HPP_INLINE StructureChain PhysicalDevice::getProperties2(Dispatch const &d ) const + { + StructureChain structureChain; + PhysicalDeviceProperties2& properties = structureChain.template get(); + d.vkGetPhysicalDeviceProperties2( m_physicalDevice, reinterpret_cast( &properties ) ); + return structureChain; + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void PhysicalDevice::getProperties2KHR( PhysicalDeviceProperties2* pProperties, Dispatch const &d) const + { + d.vkGetPhysicalDeviceProperties2KHR( m_physicalDevice, reinterpret_cast( pProperties ) ); + } +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE PhysicalDeviceProperties2 PhysicalDevice::getProperties2KHR(Dispatch const &d ) const + { + PhysicalDeviceProperties2 properties; + d.vkGetPhysicalDeviceProperties2KHR( m_physicalDevice, reinterpret_cast( &properties ) ); + return properties; + } + template + VULKAN_HPP_INLINE StructureChain PhysicalDevice::getProperties2KHR(Dispatch const &d ) const + { + StructureChain structureChain; + PhysicalDeviceProperties2& properties = structureChain.template get(); + d.vkGetPhysicalDeviceProperties2KHR( m_physicalDevice, reinterpret_cast( &properties ) ); + return structureChain; + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void PhysicalDevice::getFormatProperties2( Format format, FormatProperties2* pFormatProperties, Dispatch const &d) const + { + d.vkGetPhysicalDeviceFormatProperties2( m_physicalDevice, static_cast( format ), reinterpret_cast( pFormatProperties ) ); + } +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE FormatProperties2 PhysicalDevice::getFormatProperties2( Format format, Dispatch const &d ) const + { + FormatProperties2 formatProperties; + d.vkGetPhysicalDeviceFormatProperties2( m_physicalDevice, static_cast( format ), reinterpret_cast( &formatProperties ) ); + return formatProperties; + } + template + VULKAN_HPP_INLINE StructureChain PhysicalDevice::getFormatProperties2( Format format, Dispatch const &d ) const + { + StructureChain structureChain; + FormatProperties2& formatProperties = structureChain.template get(); + d.vkGetPhysicalDeviceFormatProperties2( m_physicalDevice, static_cast( format ), reinterpret_cast( &formatProperties ) ); + return structureChain; + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void PhysicalDevice::getFormatProperties2KHR( Format format, FormatProperties2* pFormatProperties, Dispatch const &d) const + { + d.vkGetPhysicalDeviceFormatProperties2KHR( m_physicalDevice, static_cast( format ), reinterpret_cast( pFormatProperties ) ); + } +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE FormatProperties2 PhysicalDevice::getFormatProperties2KHR( Format format, Dispatch const &d ) const + { + FormatProperties2 formatProperties; + d.vkGetPhysicalDeviceFormatProperties2KHR( m_physicalDevice, static_cast( format ), reinterpret_cast( &formatProperties ) ); + return formatProperties; + } + template + VULKAN_HPP_INLINE StructureChain PhysicalDevice::getFormatProperties2KHR( Format format, Dispatch const &d ) const + { + StructureChain structureChain; + FormatProperties2& formatProperties = structureChain.template get(); + d.vkGetPhysicalDeviceFormatProperties2KHR( m_physicalDevice, static_cast( format ), reinterpret_cast( &formatProperties ) ); + return structureChain; + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE Result PhysicalDevice::getImageFormatProperties2( const PhysicalDeviceImageFormatInfo2* pImageFormatInfo, ImageFormatProperties2* pImageFormatProperties, Dispatch const &d) const + { + return static_cast( d.vkGetPhysicalDeviceImageFormatProperties2( m_physicalDevice, reinterpret_cast( pImageFormatInfo ), reinterpret_cast( pImageFormatProperties ) ) ); + } +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE ResultValueType::type PhysicalDevice::getImageFormatProperties2( const PhysicalDeviceImageFormatInfo2 & imageFormatInfo, Dispatch const &d ) const + { + ImageFormatProperties2 imageFormatProperties; + Result result = static_cast( d.vkGetPhysicalDeviceImageFormatProperties2( m_physicalDevice, reinterpret_cast( &imageFormatInfo ), reinterpret_cast( &imageFormatProperties ) ) ); + return createResultValue( result, imageFormatProperties, VULKAN_HPP_NAMESPACE_STRING"::PhysicalDevice::getImageFormatProperties2" ); + } + template + VULKAN_HPP_INLINE typename ResultValueType>::type PhysicalDevice::getImageFormatProperties2( const PhysicalDeviceImageFormatInfo2 & imageFormatInfo, Dispatch const &d ) const + { + StructureChain structureChain; + ImageFormatProperties2& imageFormatProperties = structureChain.template get(); + Result result = static_cast( d.vkGetPhysicalDeviceImageFormatProperties2( m_physicalDevice, reinterpret_cast( &imageFormatInfo ), reinterpret_cast( &imageFormatProperties ) ) ); + return createResultValue( result, structureChain, VULKAN_HPP_NAMESPACE_STRING"::PhysicalDevice::getImageFormatProperties2" ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE Result PhysicalDevice::getImageFormatProperties2KHR( const PhysicalDeviceImageFormatInfo2* pImageFormatInfo, ImageFormatProperties2* pImageFormatProperties, Dispatch const &d) const + { + return static_cast( d.vkGetPhysicalDeviceImageFormatProperties2KHR( m_physicalDevice, reinterpret_cast( pImageFormatInfo ), reinterpret_cast( pImageFormatProperties ) ) ); + } +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE ResultValueType::type PhysicalDevice::getImageFormatProperties2KHR( const PhysicalDeviceImageFormatInfo2 & imageFormatInfo, Dispatch const &d ) const + { + ImageFormatProperties2 imageFormatProperties; + Result result = static_cast( d.vkGetPhysicalDeviceImageFormatProperties2KHR( m_physicalDevice, reinterpret_cast( &imageFormatInfo ), reinterpret_cast( &imageFormatProperties ) ) ); + return createResultValue( result, imageFormatProperties, VULKAN_HPP_NAMESPACE_STRING"::PhysicalDevice::getImageFormatProperties2KHR" ); + } + template + VULKAN_HPP_INLINE typename ResultValueType>::type PhysicalDevice::getImageFormatProperties2KHR( const PhysicalDeviceImageFormatInfo2 & imageFormatInfo, Dispatch const &d ) const + { + StructureChain structureChain; + ImageFormatProperties2& imageFormatProperties = structureChain.template get(); + Result result = static_cast( d.vkGetPhysicalDeviceImageFormatProperties2KHR( m_physicalDevice, reinterpret_cast( &imageFormatInfo ), reinterpret_cast( &imageFormatProperties ) ) ); + return createResultValue( result, structureChain, VULKAN_HPP_NAMESPACE_STRING"::PhysicalDevice::getImageFormatProperties2KHR" ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void PhysicalDevice::getQueueFamilyProperties2( uint32_t* pQueueFamilyPropertyCount, QueueFamilyProperties2* pQueueFamilyProperties, Dispatch const &d) const + { + d.vkGetPhysicalDeviceQueueFamilyProperties2( m_physicalDevice, pQueueFamilyPropertyCount, reinterpret_cast( pQueueFamilyProperties ) ); + } +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE std::vector PhysicalDevice::getQueueFamilyProperties2(Dispatch const &d ) const + { + std::vector queueFamilyProperties; + uint32_t queueFamilyPropertyCount; + d.vkGetPhysicalDeviceQueueFamilyProperties2( m_physicalDevice, &queueFamilyPropertyCount, nullptr ); + queueFamilyProperties.resize( queueFamilyPropertyCount ); + d.vkGetPhysicalDeviceQueueFamilyProperties2( m_physicalDevice, &queueFamilyPropertyCount, reinterpret_cast( queueFamilyProperties.data() ) ); + return queueFamilyProperties; + } + template + VULKAN_HPP_INLINE std::vector PhysicalDevice::getQueueFamilyProperties2(Allocator const& vectorAllocator, Dispatch const &d ) const + { + std::vector queueFamilyProperties( vectorAllocator ); + uint32_t queueFamilyPropertyCount; + d.vkGetPhysicalDeviceQueueFamilyProperties2( m_physicalDevice, &queueFamilyPropertyCount, nullptr ); + queueFamilyProperties.resize( queueFamilyPropertyCount ); + d.vkGetPhysicalDeviceQueueFamilyProperties2( m_physicalDevice, &queueFamilyPropertyCount, reinterpret_cast( queueFamilyProperties.data() ) ); + return queueFamilyProperties; + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void PhysicalDevice::getQueueFamilyProperties2KHR( uint32_t* pQueueFamilyPropertyCount, QueueFamilyProperties2* pQueueFamilyProperties, Dispatch const &d) const + { + d.vkGetPhysicalDeviceQueueFamilyProperties2KHR( m_physicalDevice, pQueueFamilyPropertyCount, reinterpret_cast( pQueueFamilyProperties ) ); + } +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE std::vector PhysicalDevice::getQueueFamilyProperties2KHR(Dispatch const &d ) const + { + std::vector queueFamilyProperties; + uint32_t queueFamilyPropertyCount; + d.vkGetPhysicalDeviceQueueFamilyProperties2KHR( m_physicalDevice, &queueFamilyPropertyCount, nullptr ); + queueFamilyProperties.resize( queueFamilyPropertyCount ); + d.vkGetPhysicalDeviceQueueFamilyProperties2KHR( m_physicalDevice, &queueFamilyPropertyCount, reinterpret_cast( queueFamilyProperties.data() ) ); + return queueFamilyProperties; + } + template + VULKAN_HPP_INLINE std::vector PhysicalDevice::getQueueFamilyProperties2KHR(Allocator const& vectorAllocator, Dispatch const &d ) const + { + std::vector queueFamilyProperties( vectorAllocator ); + uint32_t queueFamilyPropertyCount; + d.vkGetPhysicalDeviceQueueFamilyProperties2KHR( m_physicalDevice, &queueFamilyPropertyCount, nullptr ); + queueFamilyProperties.resize( queueFamilyPropertyCount ); + d.vkGetPhysicalDeviceQueueFamilyProperties2KHR( m_physicalDevice, &queueFamilyPropertyCount, reinterpret_cast( queueFamilyProperties.data() ) ); + return queueFamilyProperties; + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void PhysicalDevice::getMemoryProperties2( PhysicalDeviceMemoryProperties2* pMemoryProperties, Dispatch const &d) const + { + d.vkGetPhysicalDeviceMemoryProperties2( m_physicalDevice, reinterpret_cast( pMemoryProperties ) ); + } +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE PhysicalDeviceMemoryProperties2 PhysicalDevice::getMemoryProperties2(Dispatch const &d ) const + { + PhysicalDeviceMemoryProperties2 memoryProperties; + d.vkGetPhysicalDeviceMemoryProperties2( m_physicalDevice, reinterpret_cast( &memoryProperties ) ); + return memoryProperties; + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void PhysicalDevice::getMemoryProperties2KHR( PhysicalDeviceMemoryProperties2* pMemoryProperties, Dispatch const &d) const + { + d.vkGetPhysicalDeviceMemoryProperties2KHR( m_physicalDevice, reinterpret_cast( pMemoryProperties ) ); + } +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE PhysicalDeviceMemoryProperties2 PhysicalDevice::getMemoryProperties2KHR(Dispatch const &d ) const + { + PhysicalDeviceMemoryProperties2 memoryProperties; + d.vkGetPhysicalDeviceMemoryProperties2KHR( m_physicalDevice, reinterpret_cast( &memoryProperties ) ); + return memoryProperties; + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void PhysicalDevice::getSparseImageFormatProperties2( const PhysicalDeviceSparseImageFormatInfo2* pFormatInfo, uint32_t* pPropertyCount, SparseImageFormatProperties2* pProperties, Dispatch const &d) const + { + d.vkGetPhysicalDeviceSparseImageFormatProperties2( m_physicalDevice, reinterpret_cast( pFormatInfo ), pPropertyCount, reinterpret_cast( pProperties ) ); + } +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE std::vector PhysicalDevice::getSparseImageFormatProperties2( const PhysicalDeviceSparseImageFormatInfo2 & formatInfo, Dispatch const &d ) const + { + std::vector properties; + uint32_t propertyCount; + d.vkGetPhysicalDeviceSparseImageFormatProperties2( m_physicalDevice, reinterpret_cast( &formatInfo ), &propertyCount, nullptr ); + properties.resize( propertyCount ); + d.vkGetPhysicalDeviceSparseImageFormatProperties2( m_physicalDevice, reinterpret_cast( &formatInfo ), &propertyCount, reinterpret_cast( properties.data() ) ); + return properties; + } + template + VULKAN_HPP_INLINE std::vector PhysicalDevice::getSparseImageFormatProperties2( const PhysicalDeviceSparseImageFormatInfo2 & formatInfo, Allocator const& vectorAllocator, Dispatch const &d ) const + { + std::vector properties( vectorAllocator ); + uint32_t propertyCount; + d.vkGetPhysicalDeviceSparseImageFormatProperties2( m_physicalDevice, reinterpret_cast( &formatInfo ), &propertyCount, nullptr ); + properties.resize( propertyCount ); + d.vkGetPhysicalDeviceSparseImageFormatProperties2( m_physicalDevice, reinterpret_cast( &formatInfo ), &propertyCount, reinterpret_cast( properties.data() ) ); + return properties; + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void PhysicalDevice::getSparseImageFormatProperties2KHR( const PhysicalDeviceSparseImageFormatInfo2* pFormatInfo, uint32_t* pPropertyCount, SparseImageFormatProperties2* pProperties, Dispatch const &d) const + { + d.vkGetPhysicalDeviceSparseImageFormatProperties2KHR( m_physicalDevice, reinterpret_cast( pFormatInfo ), pPropertyCount, reinterpret_cast( pProperties ) ); + } +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE std::vector PhysicalDevice::getSparseImageFormatProperties2KHR( const PhysicalDeviceSparseImageFormatInfo2 & formatInfo, Dispatch const &d ) const + { + std::vector properties; + uint32_t propertyCount; + d.vkGetPhysicalDeviceSparseImageFormatProperties2KHR( m_physicalDevice, reinterpret_cast( &formatInfo ), &propertyCount, nullptr ); + properties.resize( propertyCount ); + d.vkGetPhysicalDeviceSparseImageFormatProperties2KHR( m_physicalDevice, reinterpret_cast( &formatInfo ), &propertyCount, reinterpret_cast( properties.data() ) ); + return properties; + } + template + VULKAN_HPP_INLINE std::vector PhysicalDevice::getSparseImageFormatProperties2KHR( const PhysicalDeviceSparseImageFormatInfo2 & formatInfo, Allocator const& vectorAllocator, Dispatch const &d ) const + { + std::vector properties( vectorAllocator ); + uint32_t propertyCount; + d.vkGetPhysicalDeviceSparseImageFormatProperties2KHR( m_physicalDevice, reinterpret_cast( &formatInfo ), &propertyCount, nullptr ); + properties.resize( propertyCount ); + d.vkGetPhysicalDeviceSparseImageFormatProperties2KHR( m_physicalDevice, reinterpret_cast( &formatInfo ), &propertyCount, reinterpret_cast( properties.data() ) ); + return properties; + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void PhysicalDevice::getExternalBufferProperties( const PhysicalDeviceExternalBufferInfo* pExternalBufferInfo, ExternalBufferProperties* pExternalBufferProperties, Dispatch const &d) const + { + d.vkGetPhysicalDeviceExternalBufferProperties( m_physicalDevice, reinterpret_cast( pExternalBufferInfo ), reinterpret_cast( pExternalBufferProperties ) ); + } +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE ExternalBufferProperties PhysicalDevice::getExternalBufferProperties( const PhysicalDeviceExternalBufferInfo & externalBufferInfo, Dispatch const &d ) const + { + ExternalBufferProperties externalBufferProperties; + d.vkGetPhysicalDeviceExternalBufferProperties( m_physicalDevice, reinterpret_cast( &externalBufferInfo ), reinterpret_cast( &externalBufferProperties ) ); + return externalBufferProperties; + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void PhysicalDevice::getExternalBufferPropertiesKHR( const PhysicalDeviceExternalBufferInfo* pExternalBufferInfo, ExternalBufferProperties* pExternalBufferProperties, Dispatch const &d) const + { + d.vkGetPhysicalDeviceExternalBufferPropertiesKHR( m_physicalDevice, reinterpret_cast( pExternalBufferInfo ), reinterpret_cast( pExternalBufferProperties ) ); + } +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE ExternalBufferProperties PhysicalDevice::getExternalBufferPropertiesKHR( const PhysicalDeviceExternalBufferInfo & externalBufferInfo, Dispatch const &d ) const + { + ExternalBufferProperties externalBufferProperties; + d.vkGetPhysicalDeviceExternalBufferPropertiesKHR( m_physicalDevice, reinterpret_cast( &externalBufferInfo ), reinterpret_cast( &externalBufferProperties ) ); + return externalBufferProperties; + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void PhysicalDevice::getExternalSemaphoreProperties( const PhysicalDeviceExternalSemaphoreInfo* pExternalSemaphoreInfo, ExternalSemaphoreProperties* pExternalSemaphoreProperties, Dispatch const &d) const + { + d.vkGetPhysicalDeviceExternalSemaphoreProperties( m_physicalDevice, reinterpret_cast( pExternalSemaphoreInfo ), reinterpret_cast( pExternalSemaphoreProperties ) ); + } +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE ExternalSemaphoreProperties PhysicalDevice::getExternalSemaphoreProperties( const PhysicalDeviceExternalSemaphoreInfo & externalSemaphoreInfo, Dispatch const &d ) const + { + ExternalSemaphoreProperties externalSemaphoreProperties; + d.vkGetPhysicalDeviceExternalSemaphoreProperties( m_physicalDevice, reinterpret_cast( &externalSemaphoreInfo ), reinterpret_cast( &externalSemaphoreProperties ) ); + return externalSemaphoreProperties; + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void PhysicalDevice::getExternalSemaphorePropertiesKHR( const PhysicalDeviceExternalSemaphoreInfo* pExternalSemaphoreInfo, ExternalSemaphoreProperties* pExternalSemaphoreProperties, Dispatch const &d) const + { + d.vkGetPhysicalDeviceExternalSemaphorePropertiesKHR( m_physicalDevice, reinterpret_cast( pExternalSemaphoreInfo ), reinterpret_cast( pExternalSemaphoreProperties ) ); + } +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE ExternalSemaphoreProperties PhysicalDevice::getExternalSemaphorePropertiesKHR( const PhysicalDeviceExternalSemaphoreInfo & externalSemaphoreInfo, Dispatch const &d ) const + { + ExternalSemaphoreProperties externalSemaphoreProperties; + d.vkGetPhysicalDeviceExternalSemaphorePropertiesKHR( m_physicalDevice, reinterpret_cast( &externalSemaphoreInfo ), reinterpret_cast( &externalSemaphoreProperties ) ); + return externalSemaphoreProperties; + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void PhysicalDevice::getExternalFenceProperties( const PhysicalDeviceExternalFenceInfo* pExternalFenceInfo, ExternalFenceProperties* pExternalFenceProperties, Dispatch const &d) const + { + d.vkGetPhysicalDeviceExternalFenceProperties( m_physicalDevice, reinterpret_cast( pExternalFenceInfo ), reinterpret_cast( pExternalFenceProperties ) ); + } +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE ExternalFenceProperties PhysicalDevice::getExternalFenceProperties( const PhysicalDeviceExternalFenceInfo & externalFenceInfo, Dispatch const &d ) const + { + ExternalFenceProperties externalFenceProperties; + d.vkGetPhysicalDeviceExternalFenceProperties( m_physicalDevice, reinterpret_cast( &externalFenceInfo ), reinterpret_cast( &externalFenceProperties ) ); + return externalFenceProperties; + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void PhysicalDevice::getExternalFencePropertiesKHR( const PhysicalDeviceExternalFenceInfo* pExternalFenceInfo, ExternalFenceProperties* pExternalFenceProperties, Dispatch const &d) const + { + d.vkGetPhysicalDeviceExternalFencePropertiesKHR( m_physicalDevice, reinterpret_cast( pExternalFenceInfo ), reinterpret_cast( pExternalFenceProperties ) ); + } +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE ExternalFenceProperties PhysicalDevice::getExternalFencePropertiesKHR( const PhysicalDeviceExternalFenceInfo & externalFenceInfo, Dispatch const &d ) const + { + ExternalFenceProperties externalFenceProperties; + d.vkGetPhysicalDeviceExternalFencePropertiesKHR( m_physicalDevice, reinterpret_cast( &externalFenceInfo ), reinterpret_cast( &externalFenceProperties ) ); + return externalFenceProperties; + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + +#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE Result PhysicalDevice::releaseDisplayEXT( DisplayKHR display, Dispatch const &d) const + { + return static_cast( d.vkReleaseDisplayEXT( m_physicalDevice, static_cast( display ) ) ); + } +#else + template + VULKAN_HPP_INLINE ResultValueType::type PhysicalDevice::releaseDisplayEXT( DisplayKHR display, Dispatch const &d ) const + { + Result result = static_cast( d.vkReleaseDisplayEXT( m_physicalDevice, static_cast( display ) ) ); + return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING"::PhysicalDevice::releaseDisplayEXT" ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + +#ifdef VK_USE_PLATFORM_XLIB_XRANDR_NV + template + VULKAN_HPP_INLINE Result PhysicalDevice::acquireXlibDisplayEXT( Display* dpy, DisplayKHR display, Dispatch const &d) const + { + return static_cast( d.vkAcquireXlibDisplayEXT( m_physicalDevice, dpy, static_cast( display ) ) ); + } +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE ResultValueType::type PhysicalDevice::acquireXlibDisplayEXT( DisplayKHR display, Dispatch const &d ) const + { + Display dpy; + Result result = static_cast( d.vkAcquireXlibDisplayEXT( m_physicalDevice, &dpy, static_cast( display ) ) ); + return createResultValue( result, dpy, VULKAN_HPP_NAMESPACE_STRING"::PhysicalDevice::acquireXlibDisplayEXT" ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VK_USE_PLATFORM_XLIB_XRANDR_NV*/ + +#ifdef VK_USE_PLATFORM_XLIB_XRANDR_NV + template + VULKAN_HPP_INLINE Result PhysicalDevice::getRandROutputDisplayEXT( Display* dpy, RROutput rrOutput, DisplayKHR* pDisplay, Dispatch const &d) const + { + return static_cast( d.vkGetRandROutputDisplayEXT( m_physicalDevice, dpy, rrOutput, reinterpret_cast( pDisplay ) ) ); + } +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE ResultValueType::type PhysicalDevice::getRandROutputDisplayEXT( Display & dpy, RROutput rrOutput, Dispatch const &d ) const + { + DisplayKHR display; + Result result = static_cast( d.vkGetRandROutputDisplayEXT( m_physicalDevice, &dpy, rrOutput, reinterpret_cast( &display ) ) ); + return createResultValue( result, display, VULKAN_HPP_NAMESPACE_STRING"::PhysicalDevice::getRandROutputDisplayEXT" ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VK_USE_PLATFORM_XLIB_XRANDR_NV*/ + + template + VULKAN_HPP_INLINE Result PhysicalDevice::getSurfaceCapabilities2EXT( SurfaceKHR surface, SurfaceCapabilities2EXT* pSurfaceCapabilities, Dispatch const &d) const + { + return static_cast( d.vkGetPhysicalDeviceSurfaceCapabilities2EXT( m_physicalDevice, static_cast( surface ), reinterpret_cast( pSurfaceCapabilities ) ) ); + } +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE ResultValueType::type PhysicalDevice::getSurfaceCapabilities2EXT( SurfaceKHR surface, Dispatch const &d ) const + { + SurfaceCapabilities2EXT surfaceCapabilities; + Result result = static_cast( d.vkGetPhysicalDeviceSurfaceCapabilities2EXT( m_physicalDevice, static_cast( surface ), reinterpret_cast( &surfaceCapabilities ) ) ); + return createResultValue( result, surfaceCapabilities, VULKAN_HPP_NAMESPACE_STRING"::PhysicalDevice::getSurfaceCapabilities2EXT" ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE Result PhysicalDevice::getPresentRectanglesKHR( SurfaceKHR surface, uint32_t* pRectCount, Rect2D* pRects, Dispatch const &d) const + { + return static_cast( d.vkGetPhysicalDevicePresentRectanglesKHR( m_physicalDevice, static_cast( surface ), pRectCount, reinterpret_cast( pRects ) ) ); + } +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE typename ResultValueType>::type PhysicalDevice::getPresentRectanglesKHR( SurfaceKHR surface, Dispatch const &d ) const + { + std::vector rects; + uint32_t rectCount; + Result result; + do + { + result = static_cast( d.vkGetPhysicalDevicePresentRectanglesKHR( m_physicalDevice, static_cast( surface ), &rectCount, nullptr ) ); + if ( ( result == Result::eSuccess ) && rectCount ) + { + rects.resize( rectCount ); + result = static_cast( d.vkGetPhysicalDevicePresentRectanglesKHR( m_physicalDevice, static_cast( surface ), &rectCount, reinterpret_cast( rects.data() ) ) ); + } + } while ( result == Result::eIncomplete ); + VULKAN_HPP_ASSERT( rectCount <= rects.size() ); + rects.resize( rectCount ); + return createResultValue( result, rects, VULKAN_HPP_NAMESPACE_STRING"::PhysicalDevice::getPresentRectanglesKHR" ); + } + template + VULKAN_HPP_INLINE typename ResultValueType>::type PhysicalDevice::getPresentRectanglesKHR( SurfaceKHR surface, Allocator const& vectorAllocator, Dispatch const &d ) const + { + std::vector rects( vectorAllocator ); + uint32_t rectCount; + Result result; + do + { + result = static_cast( d.vkGetPhysicalDevicePresentRectanglesKHR( m_physicalDevice, static_cast( surface ), &rectCount, nullptr ) ); + if ( ( result == Result::eSuccess ) && rectCount ) + { + rects.resize( rectCount ); + result = static_cast( d.vkGetPhysicalDevicePresentRectanglesKHR( m_physicalDevice, static_cast( surface ), &rectCount, reinterpret_cast( rects.data() ) ) ); + } + } while ( result == Result::eIncomplete ); + VULKAN_HPP_ASSERT( rectCount <= rects.size() ); + rects.resize( rectCount ); + return createResultValue( result, rects, VULKAN_HPP_NAMESPACE_STRING"::PhysicalDevice::getPresentRectanglesKHR" ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void PhysicalDevice::getMultisamplePropertiesEXT( SampleCountFlagBits samples, MultisamplePropertiesEXT* pMultisampleProperties, Dispatch const &d) const + { + d.vkGetPhysicalDeviceMultisamplePropertiesEXT( m_physicalDevice, static_cast( samples ), reinterpret_cast( pMultisampleProperties ) ); + } +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE MultisamplePropertiesEXT PhysicalDevice::getMultisamplePropertiesEXT( SampleCountFlagBits samples, Dispatch const &d ) const + { + MultisamplePropertiesEXT multisampleProperties; + d.vkGetPhysicalDeviceMultisamplePropertiesEXT( m_physicalDevice, static_cast( samples ), reinterpret_cast( &multisampleProperties ) ); + return multisampleProperties; + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE Result PhysicalDevice::getSurfaceCapabilities2KHR( const PhysicalDeviceSurfaceInfo2KHR* pSurfaceInfo, SurfaceCapabilities2KHR* pSurfaceCapabilities, Dispatch const &d) const + { + return static_cast( d.vkGetPhysicalDeviceSurfaceCapabilities2KHR( m_physicalDevice, reinterpret_cast( pSurfaceInfo ), reinterpret_cast( pSurfaceCapabilities ) ) ); + } +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE ResultValueType::type PhysicalDevice::getSurfaceCapabilities2KHR( const PhysicalDeviceSurfaceInfo2KHR & surfaceInfo, Dispatch const &d ) const + { + SurfaceCapabilities2KHR surfaceCapabilities; + Result result = static_cast( d.vkGetPhysicalDeviceSurfaceCapabilities2KHR( m_physicalDevice, reinterpret_cast( &surfaceInfo ), reinterpret_cast( &surfaceCapabilities ) ) ); + return createResultValue( result, surfaceCapabilities, VULKAN_HPP_NAMESPACE_STRING"::PhysicalDevice::getSurfaceCapabilities2KHR" ); + } + template + VULKAN_HPP_INLINE typename ResultValueType>::type PhysicalDevice::getSurfaceCapabilities2KHR( const PhysicalDeviceSurfaceInfo2KHR & surfaceInfo, Dispatch const &d ) const + { + StructureChain structureChain; + SurfaceCapabilities2KHR& surfaceCapabilities = structureChain.template get(); + Result result = static_cast( d.vkGetPhysicalDeviceSurfaceCapabilities2KHR( m_physicalDevice, reinterpret_cast( &surfaceInfo ), reinterpret_cast( &surfaceCapabilities ) ) ); + return createResultValue( result, structureChain, VULKAN_HPP_NAMESPACE_STRING"::PhysicalDevice::getSurfaceCapabilities2KHR" ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE Result PhysicalDevice::getSurfaceFormats2KHR( const PhysicalDeviceSurfaceInfo2KHR* pSurfaceInfo, uint32_t* pSurfaceFormatCount, SurfaceFormat2KHR* pSurfaceFormats, Dispatch const &d) const + { + return static_cast( d.vkGetPhysicalDeviceSurfaceFormats2KHR( m_physicalDevice, reinterpret_cast( pSurfaceInfo ), pSurfaceFormatCount, reinterpret_cast( pSurfaceFormats ) ) ); + } +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE typename ResultValueType>::type PhysicalDevice::getSurfaceFormats2KHR( const PhysicalDeviceSurfaceInfo2KHR & surfaceInfo, Dispatch const &d ) const + { + std::vector surfaceFormats; + uint32_t surfaceFormatCount; + Result result; + do + { + result = static_cast( d.vkGetPhysicalDeviceSurfaceFormats2KHR( m_physicalDevice, reinterpret_cast( &surfaceInfo ), &surfaceFormatCount, nullptr ) ); + if ( ( result == Result::eSuccess ) && surfaceFormatCount ) + { + surfaceFormats.resize( surfaceFormatCount ); + result = static_cast( d.vkGetPhysicalDeviceSurfaceFormats2KHR( m_physicalDevice, reinterpret_cast( &surfaceInfo ), &surfaceFormatCount, reinterpret_cast( surfaceFormats.data() ) ) ); + } + } while ( result == Result::eIncomplete ); + VULKAN_HPP_ASSERT( surfaceFormatCount <= surfaceFormats.size() ); + surfaceFormats.resize( surfaceFormatCount ); + return createResultValue( result, surfaceFormats, VULKAN_HPP_NAMESPACE_STRING"::PhysicalDevice::getSurfaceFormats2KHR" ); + } + template + VULKAN_HPP_INLINE typename ResultValueType>::type PhysicalDevice::getSurfaceFormats2KHR( const PhysicalDeviceSurfaceInfo2KHR & surfaceInfo, Allocator const& vectorAllocator, Dispatch const &d ) const + { + std::vector surfaceFormats( vectorAllocator ); + uint32_t surfaceFormatCount; + Result result; + do + { + result = static_cast( d.vkGetPhysicalDeviceSurfaceFormats2KHR( m_physicalDevice, reinterpret_cast( &surfaceInfo ), &surfaceFormatCount, nullptr ) ); + if ( ( result == Result::eSuccess ) && surfaceFormatCount ) + { + surfaceFormats.resize( surfaceFormatCount ); + result = static_cast( d.vkGetPhysicalDeviceSurfaceFormats2KHR( m_physicalDevice, reinterpret_cast( &surfaceInfo ), &surfaceFormatCount, reinterpret_cast( surfaceFormats.data() ) ) ); + } + } while ( result == Result::eIncomplete ); + VULKAN_HPP_ASSERT( surfaceFormatCount <= surfaceFormats.size() ); + surfaceFormats.resize( surfaceFormatCount ); + return createResultValue( result, surfaceFormats, VULKAN_HPP_NAMESPACE_STRING"::PhysicalDevice::getSurfaceFormats2KHR" ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE Result PhysicalDevice::getDisplayProperties2KHR( uint32_t* pPropertyCount, DisplayProperties2KHR* pProperties, Dispatch const &d) const + { + return static_cast( d.vkGetPhysicalDeviceDisplayProperties2KHR( m_physicalDevice, pPropertyCount, reinterpret_cast( pProperties ) ) ); + } +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE typename ResultValueType>::type PhysicalDevice::getDisplayProperties2KHR(Dispatch const &d ) const + { + std::vector properties; + uint32_t propertyCount; + Result result; + do + { + result = static_cast( d.vkGetPhysicalDeviceDisplayProperties2KHR( m_physicalDevice, &propertyCount, nullptr ) ); + if ( ( result == Result::eSuccess ) && propertyCount ) + { + properties.resize( propertyCount ); + result = static_cast( d.vkGetPhysicalDeviceDisplayProperties2KHR( m_physicalDevice, &propertyCount, reinterpret_cast( properties.data() ) ) ); + } + } while ( result == Result::eIncomplete ); + VULKAN_HPP_ASSERT( propertyCount <= properties.size() ); + properties.resize( propertyCount ); + return createResultValue( result, properties, VULKAN_HPP_NAMESPACE_STRING"::PhysicalDevice::getDisplayProperties2KHR" ); + } + template + VULKAN_HPP_INLINE typename ResultValueType>::type PhysicalDevice::getDisplayProperties2KHR(Allocator const& vectorAllocator, Dispatch const &d ) const + { + std::vector properties( vectorAllocator ); + uint32_t propertyCount; + Result result; + do + { + result = static_cast( d.vkGetPhysicalDeviceDisplayProperties2KHR( m_physicalDevice, &propertyCount, nullptr ) ); + if ( ( result == Result::eSuccess ) && propertyCount ) + { + properties.resize( propertyCount ); + result = static_cast( d.vkGetPhysicalDeviceDisplayProperties2KHR( m_physicalDevice, &propertyCount, reinterpret_cast( properties.data() ) ) ); + } + } while ( result == Result::eIncomplete ); + VULKAN_HPP_ASSERT( propertyCount <= properties.size() ); + properties.resize( propertyCount ); + return createResultValue( result, properties, VULKAN_HPP_NAMESPACE_STRING"::PhysicalDevice::getDisplayProperties2KHR" ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE Result PhysicalDevice::getDisplayPlaneProperties2KHR( uint32_t* pPropertyCount, DisplayPlaneProperties2KHR* pProperties, Dispatch const &d) const + { + return static_cast( d.vkGetPhysicalDeviceDisplayPlaneProperties2KHR( m_physicalDevice, pPropertyCount, reinterpret_cast( pProperties ) ) ); + } +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE typename ResultValueType>::type PhysicalDevice::getDisplayPlaneProperties2KHR(Dispatch const &d ) const + { + std::vector properties; + uint32_t propertyCount; + Result result; + do + { + result = static_cast( d.vkGetPhysicalDeviceDisplayPlaneProperties2KHR( m_physicalDevice, &propertyCount, nullptr ) ); + if ( ( result == Result::eSuccess ) && propertyCount ) + { + properties.resize( propertyCount ); + result = static_cast( d.vkGetPhysicalDeviceDisplayPlaneProperties2KHR( m_physicalDevice, &propertyCount, reinterpret_cast( properties.data() ) ) ); + } + } while ( result == Result::eIncomplete ); + VULKAN_HPP_ASSERT( propertyCount <= properties.size() ); + properties.resize( propertyCount ); + return createResultValue( result, properties, VULKAN_HPP_NAMESPACE_STRING"::PhysicalDevice::getDisplayPlaneProperties2KHR" ); + } + template + VULKAN_HPP_INLINE typename ResultValueType>::type PhysicalDevice::getDisplayPlaneProperties2KHR(Allocator const& vectorAllocator, Dispatch const &d ) const + { + std::vector properties( vectorAllocator ); + uint32_t propertyCount; + Result result; + do + { + result = static_cast( d.vkGetPhysicalDeviceDisplayPlaneProperties2KHR( m_physicalDevice, &propertyCount, nullptr ) ); + if ( ( result == Result::eSuccess ) && propertyCount ) + { + properties.resize( propertyCount ); + result = static_cast( d.vkGetPhysicalDeviceDisplayPlaneProperties2KHR( m_physicalDevice, &propertyCount, reinterpret_cast( properties.data() ) ) ); + } + } while ( result == Result::eIncomplete ); + VULKAN_HPP_ASSERT( propertyCount <= properties.size() ); + properties.resize( propertyCount ); + return createResultValue( result, properties, VULKAN_HPP_NAMESPACE_STRING"::PhysicalDevice::getDisplayPlaneProperties2KHR" ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE Result PhysicalDevice::getDisplayModeProperties2KHR( DisplayKHR display, uint32_t* pPropertyCount, DisplayModeProperties2KHR* pProperties, Dispatch const &d) const + { + return static_cast( d.vkGetDisplayModeProperties2KHR( m_physicalDevice, static_cast( display ), pPropertyCount, reinterpret_cast( pProperties ) ) ); + } +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE typename ResultValueType>::type PhysicalDevice::getDisplayModeProperties2KHR( DisplayKHR display, Dispatch const &d ) const + { + std::vector properties; + uint32_t propertyCount; + Result result; + do + { + result = static_cast( d.vkGetDisplayModeProperties2KHR( m_physicalDevice, static_cast( display ), &propertyCount, nullptr ) ); + if ( ( result == Result::eSuccess ) && propertyCount ) + { + properties.resize( propertyCount ); + result = static_cast( d.vkGetDisplayModeProperties2KHR( m_physicalDevice, static_cast( display ), &propertyCount, reinterpret_cast( properties.data() ) ) ); + } + } while ( result == Result::eIncomplete ); + VULKAN_HPP_ASSERT( propertyCount <= properties.size() ); + properties.resize( propertyCount ); + return createResultValue( result, properties, VULKAN_HPP_NAMESPACE_STRING"::PhysicalDevice::getDisplayModeProperties2KHR" ); + } + template + VULKAN_HPP_INLINE typename ResultValueType>::type PhysicalDevice::getDisplayModeProperties2KHR( DisplayKHR display, Allocator const& vectorAllocator, Dispatch const &d ) const + { + std::vector properties( vectorAllocator ); + uint32_t propertyCount; + Result result; + do + { + result = static_cast( d.vkGetDisplayModeProperties2KHR( m_physicalDevice, static_cast( display ), &propertyCount, nullptr ) ); + if ( ( result == Result::eSuccess ) && propertyCount ) + { + properties.resize( propertyCount ); + result = static_cast( d.vkGetDisplayModeProperties2KHR( m_physicalDevice, static_cast( display ), &propertyCount, reinterpret_cast( properties.data() ) ) ); + } + } while ( result == Result::eIncomplete ); + VULKAN_HPP_ASSERT( propertyCount <= properties.size() ); + properties.resize( propertyCount ); + return createResultValue( result, properties, VULKAN_HPP_NAMESPACE_STRING"::PhysicalDevice::getDisplayModeProperties2KHR" ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE Result PhysicalDevice::getDisplayPlaneCapabilities2KHR( const DisplayPlaneInfo2KHR* pDisplayPlaneInfo, DisplayPlaneCapabilities2KHR* pCapabilities, Dispatch const &d) const + { + return static_cast( d.vkGetDisplayPlaneCapabilities2KHR( m_physicalDevice, reinterpret_cast( pDisplayPlaneInfo ), reinterpret_cast( pCapabilities ) ) ); + } +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE ResultValueType::type PhysicalDevice::getDisplayPlaneCapabilities2KHR( const DisplayPlaneInfo2KHR & displayPlaneInfo, Dispatch const &d ) const + { + DisplayPlaneCapabilities2KHR capabilities; + Result result = static_cast( d.vkGetDisplayPlaneCapabilities2KHR( m_physicalDevice, reinterpret_cast( &displayPlaneInfo ), reinterpret_cast( &capabilities ) ) ); + return createResultValue( result, capabilities, VULKAN_HPP_NAMESPACE_STRING"::PhysicalDevice::getDisplayPlaneCapabilities2KHR" ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE Result PhysicalDevice::getCalibrateableTimeDomainsEXT( uint32_t* pTimeDomainCount, TimeDomainEXT* pTimeDomains, Dispatch const &d) const + { + return static_cast( d.vkGetPhysicalDeviceCalibrateableTimeDomainsEXT( m_physicalDevice, pTimeDomainCount, reinterpret_cast( pTimeDomains ) ) ); + } +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE typename ResultValueType>::type PhysicalDevice::getCalibrateableTimeDomainsEXT(Dispatch const &d ) const + { + std::vector timeDomains; + uint32_t timeDomainCount; + Result result; + do + { + result = static_cast( d.vkGetPhysicalDeviceCalibrateableTimeDomainsEXT( m_physicalDevice, &timeDomainCount, nullptr ) ); + if ( ( result == Result::eSuccess ) && timeDomainCount ) + { + timeDomains.resize( timeDomainCount ); + result = static_cast( d.vkGetPhysicalDeviceCalibrateableTimeDomainsEXT( m_physicalDevice, &timeDomainCount, reinterpret_cast( timeDomains.data() ) ) ); + } + } while ( result == Result::eIncomplete ); + VULKAN_HPP_ASSERT( timeDomainCount <= timeDomains.size() ); + timeDomains.resize( timeDomainCount ); + return createResultValue( result, timeDomains, VULKAN_HPP_NAMESPACE_STRING"::PhysicalDevice::getCalibrateableTimeDomainsEXT" ); + } + template + VULKAN_HPP_INLINE typename ResultValueType>::type PhysicalDevice::getCalibrateableTimeDomainsEXT(Allocator const& vectorAllocator, Dispatch const &d ) const + { + std::vector timeDomains( vectorAllocator ); + uint32_t timeDomainCount; + Result result; + do + { + result = static_cast( d.vkGetPhysicalDeviceCalibrateableTimeDomainsEXT( m_physicalDevice, &timeDomainCount, nullptr ) ); + if ( ( result == Result::eSuccess ) && timeDomainCount ) + { + timeDomains.resize( timeDomainCount ); + result = static_cast( d.vkGetPhysicalDeviceCalibrateableTimeDomainsEXT( m_physicalDevice, &timeDomainCount, reinterpret_cast( timeDomains.data() ) ) ); + } + } while ( result == Result::eIncomplete ); + VULKAN_HPP_ASSERT( timeDomainCount <= timeDomains.size() ); + timeDomains.resize( timeDomainCount ); + return createResultValue( result, timeDomains, VULKAN_HPP_NAMESPACE_STRING"::PhysicalDevice::getCalibrateableTimeDomainsEXT" ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + struct CmdProcessCommandsInfoNVX + { + CmdProcessCommandsInfoNVX( ObjectTableNVX objectTable_ = ObjectTableNVX(), + IndirectCommandsLayoutNVX indirectCommandsLayout_ = IndirectCommandsLayoutNVX(), + uint32_t indirectCommandsTokenCount_ = 0, + const IndirectCommandsTokenNVX* pIndirectCommandsTokens_ = nullptr, + uint32_t maxSequencesCount_ = 0, + CommandBuffer targetCommandBuffer_ = CommandBuffer(), + Buffer sequencesCountBuffer_ = Buffer(), + DeviceSize sequencesCountOffset_ = 0, + Buffer sequencesIndexBuffer_ = Buffer(), + DeviceSize sequencesIndexOffset_ = 0 ) + : objectTable( objectTable_ ) + , indirectCommandsLayout( indirectCommandsLayout_ ) + , indirectCommandsTokenCount( indirectCommandsTokenCount_ ) + , pIndirectCommandsTokens( pIndirectCommandsTokens_ ) + , maxSequencesCount( maxSequencesCount_ ) + , targetCommandBuffer( targetCommandBuffer_ ) + , sequencesCountBuffer( sequencesCountBuffer_ ) + , sequencesCountOffset( sequencesCountOffset_ ) + , sequencesIndexBuffer( sequencesIndexBuffer_ ) + , sequencesIndexOffset( sequencesIndexOffset_ ) + { + } + + CmdProcessCommandsInfoNVX( VkCmdProcessCommandsInfoNVX const & rhs ) + { + memcpy( this, &rhs, sizeof( CmdProcessCommandsInfoNVX ) ); + } + + CmdProcessCommandsInfoNVX& operator=( VkCmdProcessCommandsInfoNVX const & rhs ) + { + memcpy( this, &rhs, sizeof( CmdProcessCommandsInfoNVX ) ); + return *this; + } + CmdProcessCommandsInfoNVX& setPNext( const void* pNext_ ) + { + pNext = pNext_; + return *this; + } + + CmdProcessCommandsInfoNVX& setObjectTable( ObjectTableNVX objectTable_ ) + { + objectTable = objectTable_; + return *this; + } + + CmdProcessCommandsInfoNVX& setIndirectCommandsLayout( IndirectCommandsLayoutNVX indirectCommandsLayout_ ) + { + indirectCommandsLayout = indirectCommandsLayout_; + return *this; + } + + CmdProcessCommandsInfoNVX& setIndirectCommandsTokenCount( uint32_t indirectCommandsTokenCount_ ) + { + indirectCommandsTokenCount = indirectCommandsTokenCount_; + return *this; + } + + CmdProcessCommandsInfoNVX& setPIndirectCommandsTokens( const IndirectCommandsTokenNVX* pIndirectCommandsTokens_ ) + { + pIndirectCommandsTokens = pIndirectCommandsTokens_; + return *this; + } + + CmdProcessCommandsInfoNVX& setMaxSequencesCount( uint32_t maxSequencesCount_ ) + { + maxSequencesCount = maxSequencesCount_; + return *this; + } + + CmdProcessCommandsInfoNVX& setTargetCommandBuffer( CommandBuffer targetCommandBuffer_ ) + { + targetCommandBuffer = targetCommandBuffer_; + return *this; + } + + CmdProcessCommandsInfoNVX& setSequencesCountBuffer( Buffer sequencesCountBuffer_ ) + { + sequencesCountBuffer = sequencesCountBuffer_; + return *this; + } + + CmdProcessCommandsInfoNVX& setSequencesCountOffset( DeviceSize sequencesCountOffset_ ) + { + sequencesCountOffset = sequencesCountOffset_; + return *this; + } + + CmdProcessCommandsInfoNVX& setSequencesIndexBuffer( Buffer sequencesIndexBuffer_ ) + { + sequencesIndexBuffer = sequencesIndexBuffer_; + return *this; + } + + CmdProcessCommandsInfoNVX& setSequencesIndexOffset( DeviceSize sequencesIndexOffset_ ) + { + sequencesIndexOffset = sequencesIndexOffset_; + return *this; + } + + operator VkCmdProcessCommandsInfoNVX const&() const + { + return *reinterpret_cast(this); + } + + operator VkCmdProcessCommandsInfoNVX &() + { + return *reinterpret_cast(this); + } + + bool operator==( CmdProcessCommandsInfoNVX const& rhs ) const + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( objectTable == rhs.objectTable ) + && ( indirectCommandsLayout == rhs.indirectCommandsLayout ) + && ( indirectCommandsTokenCount == rhs.indirectCommandsTokenCount ) + && ( pIndirectCommandsTokens == rhs.pIndirectCommandsTokens ) + && ( maxSequencesCount == rhs.maxSequencesCount ) + && ( targetCommandBuffer == rhs.targetCommandBuffer ) + && ( sequencesCountBuffer == rhs.sequencesCountBuffer ) + && ( sequencesCountOffset == rhs.sequencesCountOffset ) + && ( sequencesIndexBuffer == rhs.sequencesIndexBuffer ) + && ( sequencesIndexOffset == rhs.sequencesIndexOffset ); + } + + bool operator!=( CmdProcessCommandsInfoNVX const& rhs ) const + { + return !operator==( rhs ); + } + + private: + StructureType sType = StructureType::eCmdProcessCommandsInfoNVX; + + public: + const void* pNext = nullptr; + ObjectTableNVX objectTable; + IndirectCommandsLayoutNVX indirectCommandsLayout; + uint32_t indirectCommandsTokenCount; + const IndirectCommandsTokenNVX* pIndirectCommandsTokens; + uint32_t maxSequencesCount; + CommandBuffer targetCommandBuffer; + Buffer sequencesCountBuffer; + DeviceSize sequencesCountOffset; + Buffer sequencesIndexBuffer; + DeviceSize sequencesIndexOffset; + }; + static_assert( sizeof( CmdProcessCommandsInfoNVX ) == sizeof( VkCmdProcessCommandsInfoNVX ), "struct and wrapper have different size!" ); + + struct PhysicalDeviceGroupProperties + { + operator VkPhysicalDeviceGroupProperties const&() const + { + return *reinterpret_cast(this); + } + + operator VkPhysicalDeviceGroupProperties &() + { + return *reinterpret_cast(this); + } + + bool operator==( PhysicalDeviceGroupProperties const& rhs ) const + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( physicalDeviceCount == rhs.physicalDeviceCount ) + && ( memcmp( physicalDevices, rhs.physicalDevices, VK_MAX_DEVICE_GROUP_SIZE * sizeof( PhysicalDevice ) ) == 0 ) + && ( subsetAllocation == rhs.subsetAllocation ); + } + + bool operator!=( PhysicalDeviceGroupProperties const& rhs ) const + { + return !operator==( rhs ); + } + + private: + StructureType sType = StructureType::ePhysicalDeviceGroupProperties; + + public: + void* pNext = nullptr; + uint32_t physicalDeviceCount; + PhysicalDevice physicalDevices[VK_MAX_DEVICE_GROUP_SIZE]; + Bool32 subsetAllocation; + }; + static_assert( sizeof( PhysicalDeviceGroupProperties ) == sizeof( VkPhysicalDeviceGroupProperties ), "struct and wrapper have different size!" ); + + using PhysicalDeviceGroupPropertiesKHR = PhysicalDeviceGroupProperties; + +#ifndef VULKAN_HPP_NO_SMART_HANDLE + class Instance; + + template class UniqueHandleTraits {public: using deleter = ObjectDestroy; }; + using UniqueDebugReportCallbackEXT = UniqueHandle; + template class UniqueHandleTraits {public: using deleter = ObjectDestroy; }; + using UniqueDebugUtilsMessengerEXT = UniqueHandle; + template class UniqueHandleTraits {public: using deleter = ObjectDestroy; }; + using UniqueSurfaceKHR = UniqueHandle; +#endif /*VULKAN_HPP_NO_SMART_HANDLE*/ + + class Instance + { + public: + VULKAN_HPP_CONSTEXPR Instance() + : m_instance(VK_NULL_HANDLE) + {} + + VULKAN_HPP_CONSTEXPR Instance( std::nullptr_t ) + : m_instance(VK_NULL_HANDLE) + {} + + VULKAN_HPP_TYPESAFE_EXPLICIT Instance( VkInstance instance ) + : m_instance( instance ) + {} + +#if defined(VULKAN_HPP_TYPESAFE_CONVERSION) + Instance & operator=(VkInstance instance) + { + m_instance = instance; + return *this; + } +#endif + + Instance & operator=( std::nullptr_t ) + { + m_instance = VK_NULL_HANDLE; + return *this; + } + + bool operator==( Instance const & rhs ) const + { + return m_instance == rhs.m_instance; + } + + bool operator!=(Instance const & rhs ) const + { + return m_instance != rhs.m_instance; + } + + bool operator<(Instance const & rhs ) const + { + return m_instance < rhs.m_instance; + } + + template + void destroy( const AllocationCallbacks* pAllocator, Dispatch const &d = Dispatch() ) const; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void destroy( Optional allocator = nullptr, Dispatch const &d = Dispatch() ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + Result enumeratePhysicalDevices( uint32_t* pPhysicalDeviceCount, PhysicalDevice* pPhysicalDevices, Dispatch const &d = Dispatch() ) const; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template , typename Dispatch = DispatchLoaderStatic> + typename ResultValueType>::type enumeratePhysicalDevices(Dispatch const &d = Dispatch() ) const; + template , typename Dispatch = DispatchLoaderStatic> + typename ResultValueType>::type enumeratePhysicalDevices(Allocator const& vectorAllocator, Dispatch const &d ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + PFN_vkVoidFunction getProcAddr( const char* pName, Dispatch const &d = Dispatch() ) const; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + PFN_vkVoidFunction getProcAddr( const std::string & name, Dispatch const &d = Dispatch() ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + +#ifdef VK_USE_PLATFORM_ANDROID_KHR + template + Result createAndroidSurfaceKHR( const AndroidSurfaceCreateInfoKHR* pCreateInfo, const AllocationCallbacks* pAllocator, SurfaceKHR* pSurface, Dispatch const &d = Dispatch() ) const; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + ResultValueType::type createAndroidSurfaceKHR( const AndroidSurfaceCreateInfoKHR & createInfo, Optional allocator = nullptr, Dispatch const &d = Dispatch() ) const; +#ifndef VULKAN_HPP_NO_SMART_HANDLE + template + typename ResultValueType>::type createAndroidSurfaceKHRUnique( const AndroidSurfaceCreateInfoKHR & createInfo, Optional allocator = nullptr, Dispatch const &d = Dispatch() ) const; +#endif /*VULKAN_HPP_NO_SMART_HANDLE*/ +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VK_USE_PLATFORM_ANDROID_KHR*/ + + template + Result createDisplayPlaneSurfaceKHR( const DisplaySurfaceCreateInfoKHR* pCreateInfo, const AllocationCallbacks* pAllocator, SurfaceKHR* pSurface, Dispatch const &d = Dispatch() ) const; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + ResultValueType::type createDisplayPlaneSurfaceKHR( const DisplaySurfaceCreateInfoKHR & createInfo, Optional allocator = nullptr, Dispatch const &d = Dispatch() ) const; +#ifndef VULKAN_HPP_NO_SMART_HANDLE + template + typename ResultValueType>::type createDisplayPlaneSurfaceKHRUnique( const DisplaySurfaceCreateInfoKHR & createInfo, Optional allocator = nullptr, Dispatch const &d = Dispatch() ) const; +#endif /*VULKAN_HPP_NO_SMART_HANDLE*/ +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void destroySurfaceKHR( SurfaceKHR surface, const AllocationCallbacks* pAllocator, Dispatch const &d = Dispatch() ) const; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void destroySurfaceKHR( SurfaceKHR surface, Optional allocator = nullptr, Dispatch const &d = Dispatch() ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void destroy( SurfaceKHR surface, const AllocationCallbacks* pAllocator, Dispatch const &d = Dispatch() ) const; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void destroy( SurfaceKHR surface, Optional allocator = nullptr, Dispatch const &d = Dispatch() ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + +#ifdef VK_USE_PLATFORM_VI_NN + template + Result createViSurfaceNN( const ViSurfaceCreateInfoNN* pCreateInfo, const AllocationCallbacks* pAllocator, SurfaceKHR* pSurface, Dispatch const &d = Dispatch() ) const; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + ResultValueType::type createViSurfaceNN( const ViSurfaceCreateInfoNN & createInfo, Optional allocator = nullptr, Dispatch const &d = Dispatch() ) const; +#ifndef VULKAN_HPP_NO_SMART_HANDLE + template + typename ResultValueType>::type createViSurfaceNNUnique( const ViSurfaceCreateInfoNN & createInfo, Optional allocator = nullptr, Dispatch const &d = Dispatch() ) const; +#endif /*VULKAN_HPP_NO_SMART_HANDLE*/ +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VK_USE_PLATFORM_VI_NN*/ + +#ifdef VK_USE_PLATFORM_WAYLAND_KHR + template + Result createWaylandSurfaceKHR( const WaylandSurfaceCreateInfoKHR* pCreateInfo, const AllocationCallbacks* pAllocator, SurfaceKHR* pSurface, Dispatch const &d = Dispatch() ) const; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + ResultValueType::type createWaylandSurfaceKHR( const WaylandSurfaceCreateInfoKHR & createInfo, Optional allocator = nullptr, Dispatch const &d = Dispatch() ) const; +#ifndef VULKAN_HPP_NO_SMART_HANDLE + template + typename ResultValueType>::type createWaylandSurfaceKHRUnique( const WaylandSurfaceCreateInfoKHR & createInfo, Optional allocator = nullptr, Dispatch const &d = Dispatch() ) const; +#endif /*VULKAN_HPP_NO_SMART_HANDLE*/ +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VK_USE_PLATFORM_WAYLAND_KHR*/ + +#ifdef VK_USE_PLATFORM_WIN32_KHR + template + Result createWin32SurfaceKHR( const Win32SurfaceCreateInfoKHR* pCreateInfo, const AllocationCallbacks* pAllocator, SurfaceKHR* pSurface, Dispatch const &d = Dispatch() ) const; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + ResultValueType::type createWin32SurfaceKHR( const Win32SurfaceCreateInfoKHR & createInfo, Optional allocator = nullptr, Dispatch const &d = Dispatch() ) const; +#ifndef VULKAN_HPP_NO_SMART_HANDLE + template + typename ResultValueType>::type createWin32SurfaceKHRUnique( const Win32SurfaceCreateInfoKHR & createInfo, Optional allocator = nullptr, Dispatch const &d = Dispatch() ) const; +#endif /*VULKAN_HPP_NO_SMART_HANDLE*/ +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VK_USE_PLATFORM_WIN32_KHR*/ + +#ifdef VK_USE_PLATFORM_XLIB_KHR + template + Result createXlibSurfaceKHR( const XlibSurfaceCreateInfoKHR* pCreateInfo, const AllocationCallbacks* pAllocator, SurfaceKHR* pSurface, Dispatch const &d = Dispatch() ) const; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + ResultValueType::type createXlibSurfaceKHR( const XlibSurfaceCreateInfoKHR & createInfo, Optional allocator = nullptr, Dispatch const &d = Dispatch() ) const; +#ifndef VULKAN_HPP_NO_SMART_HANDLE + template + typename ResultValueType>::type createXlibSurfaceKHRUnique( const XlibSurfaceCreateInfoKHR & createInfo, Optional allocator = nullptr, Dispatch const &d = Dispatch() ) const; +#endif /*VULKAN_HPP_NO_SMART_HANDLE*/ +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VK_USE_PLATFORM_XLIB_KHR*/ + +#ifdef VK_USE_PLATFORM_XCB_KHR + template + Result createXcbSurfaceKHR( const XcbSurfaceCreateInfoKHR* pCreateInfo, const AllocationCallbacks* pAllocator, SurfaceKHR* pSurface, Dispatch const &d = Dispatch() ) const; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + ResultValueType::type createXcbSurfaceKHR( const XcbSurfaceCreateInfoKHR & createInfo, Optional allocator = nullptr, Dispatch const &d = Dispatch() ) const; +#ifndef VULKAN_HPP_NO_SMART_HANDLE + template + typename ResultValueType>::type createXcbSurfaceKHRUnique( const XcbSurfaceCreateInfoKHR & createInfo, Optional allocator = nullptr, Dispatch const &d = Dispatch() ) const; +#endif /*VULKAN_HPP_NO_SMART_HANDLE*/ +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VK_USE_PLATFORM_XCB_KHR*/ + +#ifdef VK_USE_PLATFORM_FUCHSIA_FUCHSIA + template + Result createImagePipeSurfaceFUCHSIA( const ImagePipeSurfaceCreateInfoFUCHSIA* pCreateInfo, const AllocationCallbacks* pAllocator, SurfaceKHR* pSurface, Dispatch const &d = Dispatch() ) const; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + ResultValueType::type createImagePipeSurfaceFUCHSIA( const ImagePipeSurfaceCreateInfoFUCHSIA & createInfo, Optional allocator = nullptr, Dispatch const &d = Dispatch() ) const; +#ifndef VULKAN_HPP_NO_SMART_HANDLE + template + typename ResultValueType>::type createImagePipeSurfaceFUCHSIAUnique( const ImagePipeSurfaceCreateInfoFUCHSIA & createInfo, Optional allocator = nullptr, Dispatch const &d = Dispatch() ) const; +#endif /*VULKAN_HPP_NO_SMART_HANDLE*/ +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VK_USE_PLATFORM_FUCHSIA_FUCHSIA*/ + + template + Result createDebugReportCallbackEXT( const DebugReportCallbackCreateInfoEXT* pCreateInfo, const AllocationCallbacks* pAllocator, DebugReportCallbackEXT* pCallback, Dispatch const &d = Dispatch() ) const; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + ResultValueType::type createDebugReportCallbackEXT( const DebugReportCallbackCreateInfoEXT & createInfo, Optional allocator = nullptr, Dispatch const &d = Dispatch() ) const; +#ifndef VULKAN_HPP_NO_SMART_HANDLE + template + typename ResultValueType>::type createDebugReportCallbackEXTUnique( const DebugReportCallbackCreateInfoEXT & createInfo, Optional allocator = nullptr, Dispatch const &d = Dispatch() ) const; +#endif /*VULKAN_HPP_NO_SMART_HANDLE*/ +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void destroyDebugReportCallbackEXT( DebugReportCallbackEXT callback, const AllocationCallbacks* pAllocator, Dispatch const &d = Dispatch() ) const; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void destroyDebugReportCallbackEXT( DebugReportCallbackEXT callback, Optional allocator = nullptr, Dispatch const &d = Dispatch() ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void destroy( DebugReportCallbackEXT callback, const AllocationCallbacks* pAllocator, Dispatch const &d = Dispatch() ) const; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void destroy( DebugReportCallbackEXT callback, Optional allocator = nullptr, Dispatch const &d = Dispatch() ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void debugReportMessageEXT( DebugReportFlagsEXT flags, DebugReportObjectTypeEXT objectType, uint64_t object, size_t location, int32_t messageCode, const char* pLayerPrefix, const char* pMessage, Dispatch const &d = Dispatch() ) const; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void debugReportMessageEXT( DebugReportFlagsEXT flags, DebugReportObjectTypeEXT objectType, uint64_t object, size_t location, int32_t messageCode, const std::string & layerPrefix, const std::string & message, Dispatch const &d = Dispatch() ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + Result enumeratePhysicalDeviceGroups( uint32_t* pPhysicalDeviceGroupCount, PhysicalDeviceGroupProperties* pPhysicalDeviceGroupProperties, Dispatch const &d = Dispatch() ) const; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template , typename Dispatch = DispatchLoaderStatic> + typename ResultValueType>::type enumeratePhysicalDeviceGroups(Dispatch const &d = Dispatch() ) const; + template , typename Dispatch = DispatchLoaderStatic> + typename ResultValueType>::type enumeratePhysicalDeviceGroups(Allocator const& vectorAllocator, Dispatch const &d ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + Result enumeratePhysicalDeviceGroupsKHR( uint32_t* pPhysicalDeviceGroupCount, PhysicalDeviceGroupProperties* pPhysicalDeviceGroupProperties, Dispatch const &d = Dispatch() ) const; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template , typename Dispatch = DispatchLoaderStatic> + typename ResultValueType>::type enumeratePhysicalDeviceGroupsKHR(Dispatch const &d = Dispatch() ) const; + template , typename Dispatch = DispatchLoaderStatic> + typename ResultValueType>::type enumeratePhysicalDeviceGroupsKHR(Allocator const& vectorAllocator, Dispatch const &d ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + +#ifdef VK_USE_PLATFORM_IOS_MVK + template + Result createIOSSurfaceMVK( const IOSSurfaceCreateInfoMVK* pCreateInfo, const AllocationCallbacks* pAllocator, SurfaceKHR* pSurface, Dispatch const &d = Dispatch() ) const; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + ResultValueType::type createIOSSurfaceMVK( const IOSSurfaceCreateInfoMVK & createInfo, Optional allocator = nullptr, Dispatch const &d = Dispatch() ) const; +#ifndef VULKAN_HPP_NO_SMART_HANDLE + template + typename ResultValueType>::type createIOSSurfaceMVKUnique( const IOSSurfaceCreateInfoMVK & createInfo, Optional allocator = nullptr, Dispatch const &d = Dispatch() ) const; +#endif /*VULKAN_HPP_NO_SMART_HANDLE*/ +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VK_USE_PLATFORM_IOS_MVK*/ + +#ifdef VK_USE_PLATFORM_MACOS_MVK + template + Result createMacOSSurfaceMVK( const MacOSSurfaceCreateInfoMVK* pCreateInfo, const AllocationCallbacks* pAllocator, SurfaceKHR* pSurface, Dispatch const &d = Dispatch() ) const; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + ResultValueType::type createMacOSSurfaceMVK( const MacOSSurfaceCreateInfoMVK & createInfo, Optional allocator = nullptr, Dispatch const &d = Dispatch() ) const; +#ifndef VULKAN_HPP_NO_SMART_HANDLE + template + typename ResultValueType>::type createMacOSSurfaceMVKUnique( const MacOSSurfaceCreateInfoMVK & createInfo, Optional allocator = nullptr, Dispatch const &d = Dispatch() ) const; +#endif /*VULKAN_HPP_NO_SMART_HANDLE*/ +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VK_USE_PLATFORM_MACOS_MVK*/ + + template + Result createDebugUtilsMessengerEXT( const DebugUtilsMessengerCreateInfoEXT* pCreateInfo, const AllocationCallbacks* pAllocator, DebugUtilsMessengerEXT* pMessenger, Dispatch const &d = Dispatch() ) const; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + ResultValueType::type createDebugUtilsMessengerEXT( const DebugUtilsMessengerCreateInfoEXT & createInfo, Optional allocator = nullptr, Dispatch const &d = Dispatch() ) const; +#ifndef VULKAN_HPP_NO_SMART_HANDLE + template + typename ResultValueType>::type createDebugUtilsMessengerEXTUnique( const DebugUtilsMessengerCreateInfoEXT & createInfo, Optional allocator = nullptr, Dispatch const &d = Dispatch() ) const; +#endif /*VULKAN_HPP_NO_SMART_HANDLE*/ +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void destroyDebugUtilsMessengerEXT( DebugUtilsMessengerEXT messenger, const AllocationCallbacks* pAllocator, Dispatch const &d = Dispatch() ) const; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void destroyDebugUtilsMessengerEXT( DebugUtilsMessengerEXT messenger, Optional allocator = nullptr, Dispatch const &d = Dispatch() ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void destroy( DebugUtilsMessengerEXT messenger, const AllocationCallbacks* pAllocator, Dispatch const &d = Dispatch() ) const; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void destroy( DebugUtilsMessengerEXT messenger, Optional allocator = nullptr, Dispatch const &d = Dispatch() ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void submitDebugUtilsMessageEXT( DebugUtilsMessageSeverityFlagBitsEXT messageSeverity, DebugUtilsMessageTypeFlagsEXT messageTypes, const DebugUtilsMessengerCallbackDataEXT* pCallbackData, Dispatch const &d = Dispatch() ) const; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void submitDebugUtilsMessageEXT( DebugUtilsMessageSeverityFlagBitsEXT messageSeverity, DebugUtilsMessageTypeFlagsEXT messageTypes, const DebugUtilsMessengerCallbackDataEXT & callbackData, Dispatch const &d = Dispatch() ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + + + VULKAN_HPP_TYPESAFE_EXPLICIT operator VkInstance() const + { + return m_instance; + } + + explicit operator bool() const + { + return m_instance != VK_NULL_HANDLE; + } + + bool operator!() const + { + return m_instance == VK_NULL_HANDLE; + } + + private: + VkInstance m_instance; + }; + + static_assert( sizeof( Instance ) == sizeof( VkInstance ), "handle and wrapper have different size!" ); + + template + VULKAN_HPP_INLINE void Instance::destroy( const AllocationCallbacks* pAllocator, Dispatch const &d) const + { + d.vkDestroyInstance( m_instance, reinterpret_cast( pAllocator ) ); + } +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void Instance::destroy( Optional allocator, Dispatch const &d ) const + { + d.vkDestroyInstance( m_instance, reinterpret_cast( static_cast( allocator ) ) ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE Result Instance::enumeratePhysicalDevices( uint32_t* pPhysicalDeviceCount, PhysicalDevice* pPhysicalDevices, Dispatch const &d) const + { + return static_cast( d.vkEnumeratePhysicalDevices( m_instance, pPhysicalDeviceCount, reinterpret_cast( pPhysicalDevices ) ) ); + } +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE typename ResultValueType>::type Instance::enumeratePhysicalDevices(Dispatch const &d ) const + { + std::vector physicalDevices; + uint32_t physicalDeviceCount; + Result result; + do + { + result = static_cast( d.vkEnumeratePhysicalDevices( m_instance, &physicalDeviceCount, nullptr ) ); + if ( ( result == Result::eSuccess ) && physicalDeviceCount ) + { + physicalDevices.resize( physicalDeviceCount ); + result = static_cast( d.vkEnumeratePhysicalDevices( m_instance, &physicalDeviceCount, reinterpret_cast( physicalDevices.data() ) ) ); + } + } while ( result == Result::eIncomplete ); + VULKAN_HPP_ASSERT( physicalDeviceCount <= physicalDevices.size() ); + physicalDevices.resize( physicalDeviceCount ); + return createResultValue( result, physicalDevices, VULKAN_HPP_NAMESPACE_STRING"::Instance::enumeratePhysicalDevices" ); + } + template + VULKAN_HPP_INLINE typename ResultValueType>::type Instance::enumeratePhysicalDevices(Allocator const& vectorAllocator, Dispatch const &d ) const + { + std::vector physicalDevices( vectorAllocator ); + uint32_t physicalDeviceCount; + Result result; + do + { + result = static_cast( d.vkEnumeratePhysicalDevices( m_instance, &physicalDeviceCount, nullptr ) ); + if ( ( result == Result::eSuccess ) && physicalDeviceCount ) + { + physicalDevices.resize( physicalDeviceCount ); + result = static_cast( d.vkEnumeratePhysicalDevices( m_instance, &physicalDeviceCount, reinterpret_cast( physicalDevices.data() ) ) ); + } + } while ( result == Result::eIncomplete ); + VULKAN_HPP_ASSERT( physicalDeviceCount <= physicalDevices.size() ); + physicalDevices.resize( physicalDeviceCount ); + return createResultValue( result, physicalDevices, VULKAN_HPP_NAMESPACE_STRING"::Instance::enumeratePhysicalDevices" ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE PFN_vkVoidFunction Instance::getProcAddr( const char* pName, Dispatch const &d) const + { + return d.vkGetInstanceProcAddr( m_instance, pName ); + } +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE PFN_vkVoidFunction Instance::getProcAddr( const std::string & name, Dispatch const &d ) const + { + return d.vkGetInstanceProcAddr( m_instance, name.c_str() ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + +#ifdef VK_USE_PLATFORM_ANDROID_KHR + template + VULKAN_HPP_INLINE Result Instance::createAndroidSurfaceKHR( const AndroidSurfaceCreateInfoKHR* pCreateInfo, const AllocationCallbacks* pAllocator, SurfaceKHR* pSurface, Dispatch const &d) const + { + return static_cast( d.vkCreateAndroidSurfaceKHR( m_instance, reinterpret_cast( pCreateInfo ), reinterpret_cast( pAllocator ), reinterpret_cast( pSurface ) ) ); + } +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE ResultValueType::type Instance::createAndroidSurfaceKHR( const AndroidSurfaceCreateInfoKHR & createInfo, Optional allocator, Dispatch const &d ) const + { + SurfaceKHR surface; + Result result = static_cast( d.vkCreateAndroidSurfaceKHR( m_instance, reinterpret_cast( &createInfo ), reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( &surface ) ) ); + return createResultValue( result, surface, VULKAN_HPP_NAMESPACE_STRING"::Instance::createAndroidSurfaceKHR" ); + } +#ifndef VULKAN_HPP_NO_SMART_HANDLE + template + VULKAN_HPP_INLINE typename ResultValueType>::type Instance::createAndroidSurfaceKHRUnique( const AndroidSurfaceCreateInfoKHR & createInfo, Optional allocator, Dispatch const &d ) const + { + SurfaceKHR surface; + Result result = static_cast( d.vkCreateAndroidSurfaceKHR( m_instance, reinterpret_cast( &createInfo ), reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( &surface ) ) ); + + ObjectDestroy deleter( *this, allocator, d ); + return createResultValue( result, surface, VULKAN_HPP_NAMESPACE_STRING"::Instance::createAndroidSurfaceKHRUnique", deleter ); + } +#endif /*VULKAN_HPP_NO_SMART_HANDLE*/ +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VK_USE_PLATFORM_ANDROID_KHR*/ + + template + VULKAN_HPP_INLINE Result Instance::createDisplayPlaneSurfaceKHR( const DisplaySurfaceCreateInfoKHR* pCreateInfo, const AllocationCallbacks* pAllocator, SurfaceKHR* pSurface, Dispatch const &d) const + { + return static_cast( d.vkCreateDisplayPlaneSurfaceKHR( m_instance, reinterpret_cast( pCreateInfo ), reinterpret_cast( pAllocator ), reinterpret_cast( pSurface ) ) ); + } +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE ResultValueType::type Instance::createDisplayPlaneSurfaceKHR( const DisplaySurfaceCreateInfoKHR & createInfo, Optional allocator, Dispatch const &d ) const + { + SurfaceKHR surface; + Result result = static_cast( d.vkCreateDisplayPlaneSurfaceKHR( m_instance, reinterpret_cast( &createInfo ), reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( &surface ) ) ); + return createResultValue( result, surface, VULKAN_HPP_NAMESPACE_STRING"::Instance::createDisplayPlaneSurfaceKHR" ); + } +#ifndef VULKAN_HPP_NO_SMART_HANDLE + template + VULKAN_HPP_INLINE typename ResultValueType>::type Instance::createDisplayPlaneSurfaceKHRUnique( const DisplaySurfaceCreateInfoKHR & createInfo, Optional allocator, Dispatch const &d ) const + { + SurfaceKHR surface; + Result result = static_cast( d.vkCreateDisplayPlaneSurfaceKHR( m_instance, reinterpret_cast( &createInfo ), reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( &surface ) ) ); + + ObjectDestroy deleter( *this, allocator, d ); + return createResultValue( result, surface, VULKAN_HPP_NAMESPACE_STRING"::Instance::createDisplayPlaneSurfaceKHRUnique", deleter ); + } +#endif /*VULKAN_HPP_NO_SMART_HANDLE*/ +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void Instance::destroySurfaceKHR( SurfaceKHR surface, const AllocationCallbacks* pAllocator, Dispatch const &d) const + { + d.vkDestroySurfaceKHR( m_instance, static_cast( surface ), reinterpret_cast( pAllocator ) ); + } +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void Instance::destroySurfaceKHR( SurfaceKHR surface, Optional allocator, Dispatch const &d ) const + { + d.vkDestroySurfaceKHR( m_instance, static_cast( surface ), reinterpret_cast( static_cast( allocator ) ) ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void Instance::destroy( SurfaceKHR surface, const AllocationCallbacks* pAllocator, Dispatch const &d) const + { + d.vkDestroySurfaceKHR( m_instance, static_cast( surface ), reinterpret_cast( pAllocator ) ); + } +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void Instance::destroy( SurfaceKHR surface, Optional allocator, Dispatch const &d ) const + { + d.vkDestroySurfaceKHR( m_instance, static_cast( surface ), reinterpret_cast( static_cast( allocator ) ) ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + +#ifdef VK_USE_PLATFORM_VI_NN + template + VULKAN_HPP_INLINE Result Instance::createViSurfaceNN( const ViSurfaceCreateInfoNN* pCreateInfo, const AllocationCallbacks* pAllocator, SurfaceKHR* pSurface, Dispatch const &d) const + { + return static_cast( d.vkCreateViSurfaceNN( m_instance, reinterpret_cast( pCreateInfo ), reinterpret_cast( pAllocator ), reinterpret_cast( pSurface ) ) ); + } +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE ResultValueType::type Instance::createViSurfaceNN( const ViSurfaceCreateInfoNN & createInfo, Optional allocator, Dispatch const &d ) const + { + SurfaceKHR surface; + Result result = static_cast( d.vkCreateViSurfaceNN( m_instance, reinterpret_cast( &createInfo ), reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( &surface ) ) ); + return createResultValue( result, surface, VULKAN_HPP_NAMESPACE_STRING"::Instance::createViSurfaceNN" ); + } +#ifndef VULKAN_HPP_NO_SMART_HANDLE + template + VULKAN_HPP_INLINE typename ResultValueType>::type Instance::createViSurfaceNNUnique( const ViSurfaceCreateInfoNN & createInfo, Optional allocator, Dispatch const &d ) const + { + SurfaceKHR surface; + Result result = static_cast( d.vkCreateViSurfaceNN( m_instance, reinterpret_cast( &createInfo ), reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( &surface ) ) ); + + ObjectDestroy deleter( *this, allocator, d ); + return createResultValue( result, surface, VULKAN_HPP_NAMESPACE_STRING"::Instance::createViSurfaceNNUnique", deleter ); + } +#endif /*VULKAN_HPP_NO_SMART_HANDLE*/ +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VK_USE_PLATFORM_VI_NN*/ + +#ifdef VK_USE_PLATFORM_WAYLAND_KHR + template + VULKAN_HPP_INLINE Result Instance::createWaylandSurfaceKHR( const WaylandSurfaceCreateInfoKHR* pCreateInfo, const AllocationCallbacks* pAllocator, SurfaceKHR* pSurface, Dispatch const &d) const + { + return static_cast( d.vkCreateWaylandSurfaceKHR( m_instance, reinterpret_cast( pCreateInfo ), reinterpret_cast( pAllocator ), reinterpret_cast( pSurface ) ) ); + } +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE ResultValueType::type Instance::createWaylandSurfaceKHR( const WaylandSurfaceCreateInfoKHR & createInfo, Optional allocator, Dispatch const &d ) const + { + SurfaceKHR surface; + Result result = static_cast( d.vkCreateWaylandSurfaceKHR( m_instance, reinterpret_cast( &createInfo ), reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( &surface ) ) ); + return createResultValue( result, surface, VULKAN_HPP_NAMESPACE_STRING"::Instance::createWaylandSurfaceKHR" ); + } +#ifndef VULKAN_HPP_NO_SMART_HANDLE + template + VULKAN_HPP_INLINE typename ResultValueType>::type Instance::createWaylandSurfaceKHRUnique( const WaylandSurfaceCreateInfoKHR & createInfo, Optional allocator, Dispatch const &d ) const + { + SurfaceKHR surface; + Result result = static_cast( d.vkCreateWaylandSurfaceKHR( m_instance, reinterpret_cast( &createInfo ), reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( &surface ) ) ); + + ObjectDestroy deleter( *this, allocator, d ); + return createResultValue( result, surface, VULKAN_HPP_NAMESPACE_STRING"::Instance::createWaylandSurfaceKHRUnique", deleter ); + } +#endif /*VULKAN_HPP_NO_SMART_HANDLE*/ +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VK_USE_PLATFORM_WAYLAND_KHR*/ + +#ifdef VK_USE_PLATFORM_WIN32_KHR + template + VULKAN_HPP_INLINE Result Instance::createWin32SurfaceKHR( const Win32SurfaceCreateInfoKHR* pCreateInfo, const AllocationCallbacks* pAllocator, SurfaceKHR* pSurface, Dispatch const &d) const + { + return static_cast( d.vkCreateWin32SurfaceKHR( m_instance, reinterpret_cast( pCreateInfo ), reinterpret_cast( pAllocator ), reinterpret_cast( pSurface ) ) ); + } +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE ResultValueType::type Instance::createWin32SurfaceKHR( const Win32SurfaceCreateInfoKHR & createInfo, Optional allocator, Dispatch const &d ) const + { + SurfaceKHR surface; + Result result = static_cast( d.vkCreateWin32SurfaceKHR( m_instance, reinterpret_cast( &createInfo ), reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( &surface ) ) ); + return createResultValue( result, surface, VULKAN_HPP_NAMESPACE_STRING"::Instance::createWin32SurfaceKHR" ); + } +#ifndef VULKAN_HPP_NO_SMART_HANDLE + template + VULKAN_HPP_INLINE typename ResultValueType>::type Instance::createWin32SurfaceKHRUnique( const Win32SurfaceCreateInfoKHR & createInfo, Optional allocator, Dispatch const &d ) const + { + SurfaceKHR surface; + Result result = static_cast( d.vkCreateWin32SurfaceKHR( m_instance, reinterpret_cast( &createInfo ), reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( &surface ) ) ); + + ObjectDestroy deleter( *this, allocator, d ); + return createResultValue( result, surface, VULKAN_HPP_NAMESPACE_STRING"::Instance::createWin32SurfaceKHRUnique", deleter ); + } +#endif /*VULKAN_HPP_NO_SMART_HANDLE*/ +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VK_USE_PLATFORM_WIN32_KHR*/ + +#ifdef VK_USE_PLATFORM_XLIB_KHR + template + VULKAN_HPP_INLINE Result Instance::createXlibSurfaceKHR( const XlibSurfaceCreateInfoKHR* pCreateInfo, const AllocationCallbacks* pAllocator, SurfaceKHR* pSurface, Dispatch const &d) const + { + return static_cast( d.vkCreateXlibSurfaceKHR( m_instance, reinterpret_cast( pCreateInfo ), reinterpret_cast( pAllocator ), reinterpret_cast( pSurface ) ) ); + } +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE ResultValueType::type Instance::createXlibSurfaceKHR( const XlibSurfaceCreateInfoKHR & createInfo, Optional allocator, Dispatch const &d ) const + { + SurfaceKHR surface; + Result result = static_cast( d.vkCreateXlibSurfaceKHR( m_instance, reinterpret_cast( &createInfo ), reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( &surface ) ) ); + return createResultValue( result, surface, VULKAN_HPP_NAMESPACE_STRING"::Instance::createXlibSurfaceKHR" ); + } +#ifndef VULKAN_HPP_NO_SMART_HANDLE + template + VULKAN_HPP_INLINE typename ResultValueType>::type Instance::createXlibSurfaceKHRUnique( const XlibSurfaceCreateInfoKHR & createInfo, Optional allocator, Dispatch const &d ) const + { + SurfaceKHR surface; + Result result = static_cast( d.vkCreateXlibSurfaceKHR( m_instance, reinterpret_cast( &createInfo ), reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( &surface ) ) ); + + ObjectDestroy deleter( *this, allocator, d ); + return createResultValue( result, surface, VULKAN_HPP_NAMESPACE_STRING"::Instance::createXlibSurfaceKHRUnique", deleter ); + } +#endif /*VULKAN_HPP_NO_SMART_HANDLE*/ +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VK_USE_PLATFORM_XLIB_KHR*/ + +#ifdef VK_USE_PLATFORM_XCB_KHR + template + VULKAN_HPP_INLINE Result Instance::createXcbSurfaceKHR( const XcbSurfaceCreateInfoKHR* pCreateInfo, const AllocationCallbacks* pAllocator, SurfaceKHR* pSurface, Dispatch const &d) const + { + return static_cast( d.vkCreateXcbSurfaceKHR( m_instance, reinterpret_cast( pCreateInfo ), reinterpret_cast( pAllocator ), reinterpret_cast( pSurface ) ) ); + } +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE ResultValueType::type Instance::createXcbSurfaceKHR( const XcbSurfaceCreateInfoKHR & createInfo, Optional allocator, Dispatch const &d ) const + { + SurfaceKHR surface; + Result result = static_cast( d.vkCreateXcbSurfaceKHR( m_instance, reinterpret_cast( &createInfo ), reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( &surface ) ) ); + return createResultValue( result, surface, VULKAN_HPP_NAMESPACE_STRING"::Instance::createXcbSurfaceKHR" ); + } +#ifndef VULKAN_HPP_NO_SMART_HANDLE + template + VULKAN_HPP_INLINE typename ResultValueType>::type Instance::createXcbSurfaceKHRUnique( const XcbSurfaceCreateInfoKHR & createInfo, Optional allocator, Dispatch const &d ) const + { + SurfaceKHR surface; + Result result = static_cast( d.vkCreateXcbSurfaceKHR( m_instance, reinterpret_cast( &createInfo ), reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( &surface ) ) ); + + ObjectDestroy deleter( *this, allocator, d ); + return createResultValue( result, surface, VULKAN_HPP_NAMESPACE_STRING"::Instance::createXcbSurfaceKHRUnique", deleter ); + } +#endif /*VULKAN_HPP_NO_SMART_HANDLE*/ +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VK_USE_PLATFORM_XCB_KHR*/ + +#ifdef VK_USE_PLATFORM_FUCHSIA_FUCHSIA + template + VULKAN_HPP_INLINE Result Instance::createImagePipeSurfaceFUCHSIA( const ImagePipeSurfaceCreateInfoFUCHSIA* pCreateInfo, const AllocationCallbacks* pAllocator, SurfaceKHR* pSurface, Dispatch const &d) const + { + return static_cast( d.vkCreateImagePipeSurfaceFUCHSIA( m_instance, reinterpret_cast( pCreateInfo ), reinterpret_cast( pAllocator ), reinterpret_cast( pSurface ) ) ); + } +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE ResultValueType::type Instance::createImagePipeSurfaceFUCHSIA( const ImagePipeSurfaceCreateInfoFUCHSIA & createInfo, Optional allocator, Dispatch const &d ) const + { + SurfaceKHR surface; + Result result = static_cast( d.vkCreateImagePipeSurfaceFUCHSIA( m_instance, reinterpret_cast( &createInfo ), reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( &surface ) ) ); + return createResultValue( result, surface, VULKAN_HPP_NAMESPACE_STRING"::Instance::createImagePipeSurfaceFUCHSIA" ); + } +#ifndef VULKAN_HPP_NO_SMART_HANDLE + template + VULKAN_HPP_INLINE typename ResultValueType>::type Instance::createImagePipeSurfaceFUCHSIAUnique( const ImagePipeSurfaceCreateInfoFUCHSIA & createInfo, Optional allocator, Dispatch const &d ) const + { + SurfaceKHR surface; + Result result = static_cast( d.vkCreateImagePipeSurfaceFUCHSIA( m_instance, reinterpret_cast( &createInfo ), reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( &surface ) ) ); + + ObjectDestroy deleter( *this, allocator, d ); + return createResultValue( result, surface, VULKAN_HPP_NAMESPACE_STRING"::Instance::createImagePipeSurfaceFUCHSIAUnique", deleter ); + } +#endif /*VULKAN_HPP_NO_SMART_HANDLE*/ +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VK_USE_PLATFORM_FUCHSIA_FUCHSIA*/ + + template + VULKAN_HPP_INLINE Result Instance::createDebugReportCallbackEXT( const DebugReportCallbackCreateInfoEXT* pCreateInfo, const AllocationCallbacks* pAllocator, DebugReportCallbackEXT* pCallback, Dispatch const &d) const + { + return static_cast( d.vkCreateDebugReportCallbackEXT( m_instance, reinterpret_cast( pCreateInfo ), reinterpret_cast( pAllocator ), reinterpret_cast( pCallback ) ) ); + } +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE ResultValueType::type Instance::createDebugReportCallbackEXT( const DebugReportCallbackCreateInfoEXT & createInfo, Optional allocator, Dispatch const &d ) const + { + DebugReportCallbackEXT callback; + Result result = static_cast( d.vkCreateDebugReportCallbackEXT( m_instance, reinterpret_cast( &createInfo ), reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( &callback ) ) ); + return createResultValue( result, callback, VULKAN_HPP_NAMESPACE_STRING"::Instance::createDebugReportCallbackEXT" ); + } +#ifndef VULKAN_HPP_NO_SMART_HANDLE + template + VULKAN_HPP_INLINE typename ResultValueType>::type Instance::createDebugReportCallbackEXTUnique( const DebugReportCallbackCreateInfoEXT & createInfo, Optional allocator, Dispatch const &d ) const + { + DebugReportCallbackEXT callback; + Result result = static_cast( d.vkCreateDebugReportCallbackEXT( m_instance, reinterpret_cast( &createInfo ), reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( &callback ) ) ); + + ObjectDestroy deleter( *this, allocator, d ); + return createResultValue( result, callback, VULKAN_HPP_NAMESPACE_STRING"::Instance::createDebugReportCallbackEXTUnique", deleter ); + } +#endif /*VULKAN_HPP_NO_SMART_HANDLE*/ +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void Instance::destroyDebugReportCallbackEXT( DebugReportCallbackEXT callback, const AllocationCallbacks* pAllocator, Dispatch const &d) const + { + d.vkDestroyDebugReportCallbackEXT( m_instance, static_cast( callback ), reinterpret_cast( pAllocator ) ); + } +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void Instance::destroyDebugReportCallbackEXT( DebugReportCallbackEXT callback, Optional allocator, Dispatch const &d ) const + { + d.vkDestroyDebugReportCallbackEXT( m_instance, static_cast( callback ), reinterpret_cast( static_cast( allocator ) ) ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void Instance::destroy( DebugReportCallbackEXT callback, const AllocationCallbacks* pAllocator, Dispatch const &d) const + { + d.vkDestroyDebugReportCallbackEXT( m_instance, static_cast( callback ), reinterpret_cast( pAllocator ) ); + } +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void Instance::destroy( DebugReportCallbackEXT callback, Optional allocator, Dispatch const &d ) const + { + d.vkDestroyDebugReportCallbackEXT( m_instance, static_cast( callback ), reinterpret_cast( static_cast( allocator ) ) ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void Instance::debugReportMessageEXT( DebugReportFlagsEXT flags, DebugReportObjectTypeEXT objectType, uint64_t object, size_t location, int32_t messageCode, const char* pLayerPrefix, const char* pMessage, Dispatch const &d) const + { + d.vkDebugReportMessageEXT( m_instance, static_cast( flags ), static_cast( objectType ), object, location, messageCode, pLayerPrefix, pMessage ); + } +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void Instance::debugReportMessageEXT( DebugReportFlagsEXT flags, DebugReportObjectTypeEXT objectType, uint64_t object, size_t location, int32_t messageCode, const std::string & layerPrefix, const std::string & message, Dispatch const &d ) const + { +#ifdef VULKAN_HPP_NO_EXCEPTIONS + VULKAN_HPP_ASSERT( layerPrefix.size() == message.size() ); +#else + if ( layerPrefix.size() != message.size() ) + { + throw LogicError( VULKAN_HPP_NAMESPACE_STRING "::Instance::debugReportMessageEXT: layerPrefix.size() != message.size()" ); + } +#endif // VULKAN_HPP_NO_EXCEPTIONS + d.vkDebugReportMessageEXT( m_instance, static_cast( flags ), static_cast( objectType ), object, location, messageCode, layerPrefix.c_str(), message.c_str() ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE Result Instance::enumeratePhysicalDeviceGroups( uint32_t* pPhysicalDeviceGroupCount, PhysicalDeviceGroupProperties* pPhysicalDeviceGroupProperties, Dispatch const &d) const + { + return static_cast( d.vkEnumeratePhysicalDeviceGroups( m_instance, pPhysicalDeviceGroupCount, reinterpret_cast( pPhysicalDeviceGroupProperties ) ) ); + } +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE typename ResultValueType>::type Instance::enumeratePhysicalDeviceGroups(Dispatch const &d ) const + { + std::vector physicalDeviceGroupProperties; + uint32_t physicalDeviceGroupCount; + Result result; + do + { + result = static_cast( d.vkEnumeratePhysicalDeviceGroups( m_instance, &physicalDeviceGroupCount, nullptr ) ); + if ( ( result == Result::eSuccess ) && physicalDeviceGroupCount ) + { + physicalDeviceGroupProperties.resize( physicalDeviceGroupCount ); + result = static_cast( d.vkEnumeratePhysicalDeviceGroups( m_instance, &physicalDeviceGroupCount, reinterpret_cast( physicalDeviceGroupProperties.data() ) ) ); + } + } while ( result == Result::eIncomplete ); + VULKAN_HPP_ASSERT( physicalDeviceGroupCount <= physicalDeviceGroupProperties.size() ); + physicalDeviceGroupProperties.resize( physicalDeviceGroupCount ); + return createResultValue( result, physicalDeviceGroupProperties, VULKAN_HPP_NAMESPACE_STRING"::Instance::enumeratePhysicalDeviceGroups" ); + } + template + VULKAN_HPP_INLINE typename ResultValueType>::type Instance::enumeratePhysicalDeviceGroups(Allocator const& vectorAllocator, Dispatch const &d ) const + { + std::vector physicalDeviceGroupProperties( vectorAllocator ); + uint32_t physicalDeviceGroupCount; + Result result; + do + { + result = static_cast( d.vkEnumeratePhysicalDeviceGroups( m_instance, &physicalDeviceGroupCount, nullptr ) ); + if ( ( result == Result::eSuccess ) && physicalDeviceGroupCount ) + { + physicalDeviceGroupProperties.resize( physicalDeviceGroupCount ); + result = static_cast( d.vkEnumeratePhysicalDeviceGroups( m_instance, &physicalDeviceGroupCount, reinterpret_cast( physicalDeviceGroupProperties.data() ) ) ); + } + } while ( result == Result::eIncomplete ); + VULKAN_HPP_ASSERT( physicalDeviceGroupCount <= physicalDeviceGroupProperties.size() ); + physicalDeviceGroupProperties.resize( physicalDeviceGroupCount ); + return createResultValue( result, physicalDeviceGroupProperties, VULKAN_HPP_NAMESPACE_STRING"::Instance::enumeratePhysicalDeviceGroups" ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE Result Instance::enumeratePhysicalDeviceGroupsKHR( uint32_t* pPhysicalDeviceGroupCount, PhysicalDeviceGroupProperties* pPhysicalDeviceGroupProperties, Dispatch const &d) const + { + return static_cast( d.vkEnumeratePhysicalDeviceGroupsKHR( m_instance, pPhysicalDeviceGroupCount, reinterpret_cast( pPhysicalDeviceGroupProperties ) ) ); + } +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE typename ResultValueType>::type Instance::enumeratePhysicalDeviceGroupsKHR(Dispatch const &d ) const + { + std::vector physicalDeviceGroupProperties; + uint32_t physicalDeviceGroupCount; + Result result; + do + { + result = static_cast( d.vkEnumeratePhysicalDeviceGroupsKHR( m_instance, &physicalDeviceGroupCount, nullptr ) ); + if ( ( result == Result::eSuccess ) && physicalDeviceGroupCount ) + { + physicalDeviceGroupProperties.resize( physicalDeviceGroupCount ); + result = static_cast( d.vkEnumeratePhysicalDeviceGroupsKHR( m_instance, &physicalDeviceGroupCount, reinterpret_cast( physicalDeviceGroupProperties.data() ) ) ); + } + } while ( result == Result::eIncomplete ); + VULKAN_HPP_ASSERT( physicalDeviceGroupCount <= physicalDeviceGroupProperties.size() ); + physicalDeviceGroupProperties.resize( physicalDeviceGroupCount ); + return createResultValue( result, physicalDeviceGroupProperties, VULKAN_HPP_NAMESPACE_STRING"::Instance::enumeratePhysicalDeviceGroupsKHR" ); + } + template + VULKAN_HPP_INLINE typename ResultValueType>::type Instance::enumeratePhysicalDeviceGroupsKHR(Allocator const& vectorAllocator, Dispatch const &d ) const + { + std::vector physicalDeviceGroupProperties( vectorAllocator ); + uint32_t physicalDeviceGroupCount; + Result result; + do + { + result = static_cast( d.vkEnumeratePhysicalDeviceGroupsKHR( m_instance, &physicalDeviceGroupCount, nullptr ) ); + if ( ( result == Result::eSuccess ) && physicalDeviceGroupCount ) + { + physicalDeviceGroupProperties.resize( physicalDeviceGroupCount ); + result = static_cast( d.vkEnumeratePhysicalDeviceGroupsKHR( m_instance, &physicalDeviceGroupCount, reinterpret_cast( physicalDeviceGroupProperties.data() ) ) ); + } + } while ( result == Result::eIncomplete ); + VULKAN_HPP_ASSERT( physicalDeviceGroupCount <= physicalDeviceGroupProperties.size() ); + physicalDeviceGroupProperties.resize( physicalDeviceGroupCount ); + return createResultValue( result, physicalDeviceGroupProperties, VULKAN_HPP_NAMESPACE_STRING"::Instance::enumeratePhysicalDeviceGroupsKHR" ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + +#ifdef VK_USE_PLATFORM_IOS_MVK + template + VULKAN_HPP_INLINE Result Instance::createIOSSurfaceMVK( const IOSSurfaceCreateInfoMVK* pCreateInfo, const AllocationCallbacks* pAllocator, SurfaceKHR* pSurface, Dispatch const &d) const + { + return static_cast( d.vkCreateIOSSurfaceMVK( m_instance, reinterpret_cast( pCreateInfo ), reinterpret_cast( pAllocator ), reinterpret_cast( pSurface ) ) ); + } +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE ResultValueType::type Instance::createIOSSurfaceMVK( const IOSSurfaceCreateInfoMVK & createInfo, Optional allocator, Dispatch const &d ) const + { + SurfaceKHR surface; + Result result = static_cast( d.vkCreateIOSSurfaceMVK( m_instance, reinterpret_cast( &createInfo ), reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( &surface ) ) ); + return createResultValue( result, surface, VULKAN_HPP_NAMESPACE_STRING"::Instance::createIOSSurfaceMVK" ); + } +#ifndef VULKAN_HPP_NO_SMART_HANDLE + template + VULKAN_HPP_INLINE typename ResultValueType>::type Instance::createIOSSurfaceMVKUnique( const IOSSurfaceCreateInfoMVK & createInfo, Optional allocator, Dispatch const &d ) const + { + SurfaceKHR surface; + Result result = static_cast( d.vkCreateIOSSurfaceMVK( m_instance, reinterpret_cast( &createInfo ), reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( &surface ) ) ); + + ObjectDestroy deleter( *this, allocator, d ); + return createResultValue( result, surface, VULKAN_HPP_NAMESPACE_STRING"::Instance::createIOSSurfaceMVKUnique", deleter ); + } +#endif /*VULKAN_HPP_NO_SMART_HANDLE*/ +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VK_USE_PLATFORM_IOS_MVK*/ + +#ifdef VK_USE_PLATFORM_MACOS_MVK + template + VULKAN_HPP_INLINE Result Instance::createMacOSSurfaceMVK( const MacOSSurfaceCreateInfoMVK* pCreateInfo, const AllocationCallbacks* pAllocator, SurfaceKHR* pSurface, Dispatch const &d) const + { + return static_cast( d.vkCreateMacOSSurfaceMVK( m_instance, reinterpret_cast( pCreateInfo ), reinterpret_cast( pAllocator ), reinterpret_cast( pSurface ) ) ); + } +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE ResultValueType::type Instance::createMacOSSurfaceMVK( const MacOSSurfaceCreateInfoMVK & createInfo, Optional allocator, Dispatch const &d ) const + { + SurfaceKHR surface; + Result result = static_cast( d.vkCreateMacOSSurfaceMVK( m_instance, reinterpret_cast( &createInfo ), reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( &surface ) ) ); + return createResultValue( result, surface, VULKAN_HPP_NAMESPACE_STRING"::Instance::createMacOSSurfaceMVK" ); + } +#ifndef VULKAN_HPP_NO_SMART_HANDLE + template + VULKAN_HPP_INLINE typename ResultValueType>::type Instance::createMacOSSurfaceMVKUnique( const MacOSSurfaceCreateInfoMVK & createInfo, Optional allocator, Dispatch const &d ) const + { + SurfaceKHR surface; + Result result = static_cast( d.vkCreateMacOSSurfaceMVK( m_instance, reinterpret_cast( &createInfo ), reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( &surface ) ) ); + + ObjectDestroy deleter( *this, allocator, d ); + return createResultValue( result, surface, VULKAN_HPP_NAMESPACE_STRING"::Instance::createMacOSSurfaceMVKUnique", deleter ); + } +#endif /*VULKAN_HPP_NO_SMART_HANDLE*/ +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VK_USE_PLATFORM_MACOS_MVK*/ + + template + VULKAN_HPP_INLINE Result Instance::createDebugUtilsMessengerEXT( const DebugUtilsMessengerCreateInfoEXT* pCreateInfo, const AllocationCallbacks* pAllocator, DebugUtilsMessengerEXT* pMessenger, Dispatch const &d) const + { + return static_cast( d.vkCreateDebugUtilsMessengerEXT( m_instance, reinterpret_cast( pCreateInfo ), reinterpret_cast( pAllocator ), reinterpret_cast( pMessenger ) ) ); + } +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE ResultValueType::type Instance::createDebugUtilsMessengerEXT( const DebugUtilsMessengerCreateInfoEXT & createInfo, Optional allocator, Dispatch const &d ) const + { + DebugUtilsMessengerEXT messenger; + Result result = static_cast( d.vkCreateDebugUtilsMessengerEXT( m_instance, reinterpret_cast( &createInfo ), reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( &messenger ) ) ); + return createResultValue( result, messenger, VULKAN_HPP_NAMESPACE_STRING"::Instance::createDebugUtilsMessengerEXT" ); + } +#ifndef VULKAN_HPP_NO_SMART_HANDLE + template + VULKAN_HPP_INLINE typename ResultValueType>::type Instance::createDebugUtilsMessengerEXTUnique( const DebugUtilsMessengerCreateInfoEXT & createInfo, Optional allocator, Dispatch const &d ) const + { + DebugUtilsMessengerEXT messenger; + Result result = static_cast( d.vkCreateDebugUtilsMessengerEXT( m_instance, reinterpret_cast( &createInfo ), reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( &messenger ) ) ); + + ObjectDestroy deleter( *this, allocator, d ); + return createResultValue( result, messenger, VULKAN_HPP_NAMESPACE_STRING"::Instance::createDebugUtilsMessengerEXTUnique", deleter ); + } +#endif /*VULKAN_HPP_NO_SMART_HANDLE*/ +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void Instance::destroyDebugUtilsMessengerEXT( DebugUtilsMessengerEXT messenger, const AllocationCallbacks* pAllocator, Dispatch const &d) const + { + d.vkDestroyDebugUtilsMessengerEXT( m_instance, static_cast( messenger ), reinterpret_cast( pAllocator ) ); + } +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void Instance::destroyDebugUtilsMessengerEXT( DebugUtilsMessengerEXT messenger, Optional allocator, Dispatch const &d ) const + { + d.vkDestroyDebugUtilsMessengerEXT( m_instance, static_cast( messenger ), reinterpret_cast( static_cast( allocator ) ) ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void Instance::destroy( DebugUtilsMessengerEXT messenger, const AllocationCallbacks* pAllocator, Dispatch const &d) const + { + d.vkDestroyDebugUtilsMessengerEXT( m_instance, static_cast( messenger ), reinterpret_cast( pAllocator ) ); + } +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void Instance::destroy( DebugUtilsMessengerEXT messenger, Optional allocator, Dispatch const &d ) const + { + d.vkDestroyDebugUtilsMessengerEXT( m_instance, static_cast( messenger ), reinterpret_cast( static_cast( allocator ) ) ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void Instance::submitDebugUtilsMessageEXT( DebugUtilsMessageSeverityFlagBitsEXT messageSeverity, DebugUtilsMessageTypeFlagsEXT messageTypes, const DebugUtilsMessengerCallbackDataEXT* pCallbackData, Dispatch const &d) const + { + d.vkSubmitDebugUtilsMessageEXT( m_instance, static_cast( messageSeverity ), static_cast( messageTypes ), reinterpret_cast( pCallbackData ) ); + } +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void Instance::submitDebugUtilsMessageEXT( DebugUtilsMessageSeverityFlagBitsEXT messageSeverity, DebugUtilsMessageTypeFlagsEXT messageTypes, const DebugUtilsMessengerCallbackDataEXT & callbackData, Dispatch const &d ) const + { + d.vkSubmitDebugUtilsMessageEXT( m_instance, static_cast( messageSeverity ), static_cast( messageTypes ), reinterpret_cast( &callbackData ) ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + struct DeviceGroupDeviceCreateInfo + { + DeviceGroupDeviceCreateInfo( uint32_t physicalDeviceCount_ = 0, + const PhysicalDevice* pPhysicalDevices_ = nullptr ) + : physicalDeviceCount( physicalDeviceCount_ ) + , pPhysicalDevices( pPhysicalDevices_ ) + { + } + + DeviceGroupDeviceCreateInfo( VkDeviceGroupDeviceCreateInfo const & rhs ) + { + memcpy( this, &rhs, sizeof( DeviceGroupDeviceCreateInfo ) ); + } + + DeviceGroupDeviceCreateInfo& operator=( VkDeviceGroupDeviceCreateInfo const & rhs ) + { + memcpy( this, &rhs, sizeof( DeviceGroupDeviceCreateInfo ) ); + return *this; + } + DeviceGroupDeviceCreateInfo& setPNext( const void* pNext_ ) + { + pNext = pNext_; + return *this; + } + + DeviceGroupDeviceCreateInfo& setPhysicalDeviceCount( uint32_t physicalDeviceCount_ ) + { + physicalDeviceCount = physicalDeviceCount_; + return *this; + } + + DeviceGroupDeviceCreateInfo& setPPhysicalDevices( const PhysicalDevice* pPhysicalDevices_ ) + { + pPhysicalDevices = pPhysicalDevices_; + return *this; + } + + operator VkDeviceGroupDeviceCreateInfo const&() const + { + return *reinterpret_cast(this); + } + + operator VkDeviceGroupDeviceCreateInfo &() + { + return *reinterpret_cast(this); + } + + bool operator==( DeviceGroupDeviceCreateInfo const& rhs ) const + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( physicalDeviceCount == rhs.physicalDeviceCount ) + && ( pPhysicalDevices == rhs.pPhysicalDevices ); + } + + bool operator!=( DeviceGroupDeviceCreateInfo const& rhs ) const + { + return !operator==( rhs ); + } + + private: + StructureType sType = StructureType::eDeviceGroupDeviceCreateInfo; + + public: + const void* pNext = nullptr; + uint32_t physicalDeviceCount; + const PhysicalDevice* pPhysicalDevices; + }; + static_assert( sizeof( DeviceGroupDeviceCreateInfo ) == sizeof( VkDeviceGroupDeviceCreateInfo ), "struct and wrapper have different size!" ); + + using DeviceGroupDeviceCreateInfoKHR = DeviceGroupDeviceCreateInfo; + +#ifndef VULKAN_HPP_NO_SMART_HANDLE + + template class UniqueHandleTraits {public: using deleter = ObjectDestroy; }; + using UniqueInstance = UniqueHandle; +#endif /*VULKAN_HPP_NO_SMART_HANDLE*/ + + template + Result createInstance( const InstanceCreateInfo* pCreateInfo, const AllocationCallbacks* pAllocator, Instance* pInstance, Dispatch const &d = Dispatch() ); +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + ResultValueType::type createInstance( const InstanceCreateInfo & createInfo, Optional allocator = nullptr, Dispatch const &d = Dispatch() ); +#ifndef VULKAN_HPP_NO_SMART_HANDLE + template + typename ResultValueType>::type createInstanceUnique( const InstanceCreateInfo & createInfo, Optional allocator = nullptr, Dispatch const &d = Dispatch() ); +#endif /*VULKAN_HPP_NO_SMART_HANDLE*/ +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE Result createInstance( const InstanceCreateInfo* pCreateInfo, const AllocationCallbacks* pAllocator, Instance* pInstance, Dispatch const &d) + { + return static_cast( d.vkCreateInstance( reinterpret_cast( pCreateInfo ), reinterpret_cast( pAllocator ), reinterpret_cast( pInstance ) ) ); + } +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE ResultValueType::type createInstance( const InstanceCreateInfo & createInfo, Optional allocator, Dispatch const &d ) + { + Instance instance; + Result result = static_cast( d.vkCreateInstance( reinterpret_cast( &createInfo ), reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( &instance ) ) ); + return createResultValue( result, instance, VULKAN_HPP_NAMESPACE_STRING"::createInstance" ); + } +#ifndef VULKAN_HPP_NO_SMART_HANDLE + template + VULKAN_HPP_INLINE typename ResultValueType>::type createInstanceUnique( const InstanceCreateInfo & createInfo, Optional allocator, Dispatch const &d ) + { + Instance instance; + Result result = static_cast( d.vkCreateInstance( reinterpret_cast( &createInfo ), reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( &instance ) ) ); + + ObjectDestroy deleter( allocator, d ); + return createResultValue( result, instance, VULKAN_HPP_NAMESPACE_STRING"::createInstanceUnique", deleter ); + } +#endif /*VULKAN_HPP_NO_SMART_HANDLE*/ +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + + struct BaseOutStructure + { + BaseOutStructure( ) + { + } + + BaseOutStructure( VkBaseOutStructure const & rhs ) + { + memcpy( this, &rhs, sizeof( BaseOutStructure ) ); + } + + BaseOutStructure& operator=( VkBaseOutStructure const & rhs ) + { + memcpy( this, &rhs, sizeof( BaseOutStructure ) ); + return *this; + } + BaseOutStructure& setPNext( struct BaseOutStructure* pNext_ ) + { + pNext = pNext_; + return *this; + } + + operator VkBaseOutStructure const&() const + { + return *reinterpret_cast(this); + } + + operator VkBaseOutStructure &() + { + return *reinterpret_cast(this); + } + + bool operator==( BaseOutStructure const& rhs ) const + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ); + } + + bool operator!=( BaseOutStructure const& rhs ) const + { + return !operator==( rhs ); + } + + StructureType sType; + struct BaseOutStructure* pNext = nullptr; + }; + static_assert( sizeof( BaseOutStructure ) == sizeof( VkBaseOutStructure ), "struct and wrapper have different size!" ); + + struct BaseInStructure + { + BaseInStructure( ) + { + } + + BaseInStructure( VkBaseInStructure const & rhs ) + { + memcpy( this, &rhs, sizeof( BaseInStructure ) ); + } + + BaseInStructure& operator=( VkBaseInStructure const & rhs ) + { + memcpy( this, &rhs, sizeof( BaseInStructure ) ); + return *this; + } + BaseInStructure& setPNext( const struct BaseInStructure* pNext_ ) + { + pNext = pNext_; + return *this; + } + + operator VkBaseInStructure const&() const + { + return *reinterpret_cast(this); + } + + operator VkBaseInStructure &() + { + return *reinterpret_cast(this); + } + + bool operator==( BaseInStructure const& rhs ) const + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ); + } + + bool operator!=( BaseInStructure const& rhs ) const + { + return !operator==( rhs ); + } + + StructureType sType; + const struct BaseInStructure* pNext = nullptr; + }; + static_assert( sizeof( BaseInStructure ) == sizeof( VkBaseInStructure ), "struct and wrapper have different size!" ); + + template <> struct isStructureChainValid{ enum { value = true }; }; + template <> struct isStructureChainValid{ enum { value = true }; }; + template <> struct isStructureChainValid{ enum { value = true }; }; + template <> struct isStructureChainValid{ enum { value = true }; }; +#ifdef VK_USE_PLATFORM_WIN32_NV + template <> struct isStructureChainValid{ enum { value = true }; }; +#endif /*VK_USE_PLATFORM_WIN32_NV*/ +#ifdef VK_USE_PLATFORM_WIN32_NV + template <> struct isStructureChainValid{ enum { value = true }; }; +#endif /*VK_USE_PLATFORM_WIN32_NV*/ + template <> struct isStructureChainValid{ enum { value = true }; }; + template <> struct isStructureChainValid{ enum { value = true }; }; + template <> struct isStructureChainValid{ enum { value = true }; }; + template <> struct isStructureChainValid{ enum { value = true }; }; + template <> struct isStructureChainValid{ enum { value = true }; }; + template <> struct isStructureChainValid{ enum { value = true }; }; +#ifdef VK_USE_PLATFORM_WIN32_KHR + template <> struct isStructureChainValid{ enum { value = true }; }; +#endif /*VK_USE_PLATFORM_WIN32_KHR*/ +#ifdef VK_USE_PLATFORM_WIN32_KHR + template <> struct isStructureChainValid{ enum { value = true }; }; +#endif /*VK_USE_PLATFORM_WIN32_KHR*/ +#ifdef VK_USE_PLATFORM_WIN32_KHR + template <> struct isStructureChainValid{ enum { value = true }; }; +#endif /*VK_USE_PLATFORM_WIN32_KHR*/ +#ifdef VK_USE_PLATFORM_WIN32_KHR + template <> struct isStructureChainValid{ enum { value = true }; }; +#endif /*VK_USE_PLATFORM_WIN32_KHR*/ +#ifdef VK_USE_PLATFORM_WIN32_KHR + template <> struct isStructureChainValid{ enum { value = true }; }; +#endif /*VK_USE_PLATFORM_WIN32_KHR*/ + template <> struct isStructureChainValid{ enum { value = true }; }; + template <> struct isStructureChainValid{ enum { value = true }; }; + template <> struct isStructureChainValid{ enum { value = true }; }; + template <> struct isStructureChainValid{ enum { value = true }; }; + template <> struct isStructureChainValid{ enum { value = true }; }; + template <> struct isStructureChainValid{ enum { value = true }; }; + template <> struct isStructureChainValid{ enum { value = true }; }; + template <> struct isStructureChainValid{ enum { value = true }; }; + template <> struct isStructureChainValid{ enum { value = true }; }; + template <> struct isStructureChainValid{ enum { value = true }; }; + template <> struct isStructureChainValid{ enum { value = true }; }; + template <> struct isStructureChainValid{ enum { value = true }; }; + template <> struct isStructureChainValid{ enum { value = true }; }; + template <> struct isStructureChainValid{ enum { value = true }; }; + template <> struct isStructureChainValid{ enum { value = true }; }; + template <> struct isStructureChainValid{ enum { value = true }; }; + template <> struct isStructureChainValid{ enum { value = true }; }; + template <> struct isStructureChainValid{ enum { value = true }; }; + template <> struct isStructureChainValid{ enum { value = true }; }; + template <> struct isStructureChainValid{ enum { value = true }; }; + template <> struct isStructureChainValid{ enum { value = true }; }; + template <> struct isStructureChainValid{ enum { value = true }; }; + template <> struct isStructureChainValid{ enum { value = true }; }; + template <> struct isStructureChainValid{ enum { value = true }; }; + template <> struct isStructureChainValid{ enum { value = true }; }; + template <> struct isStructureChainValid{ enum { value = true }; }; + template <> struct isStructureChainValid{ enum { value = true }; }; + template <> struct isStructureChainValid{ enum { value = true }; }; + template <> struct isStructureChainValid{ enum { value = true }; }; + template <> struct isStructureChainValid{ enum { value = true }; }; + template <> struct isStructureChainValid{ enum { value = true }; }; + template <> struct isStructureChainValid{ enum { value = true }; }; + template <> struct isStructureChainValid{ enum { value = true }; }; + template <> struct isStructureChainValid{ enum { value = true }; }; + template <> struct isStructureChainValid{ enum { value = true }; }; + template <> struct isStructureChainValid{ enum { value = true }; }; + template <> struct isStructureChainValid{ enum { value = true }; }; + template <> struct isStructureChainValid{ enum { value = true }; }; + template <> struct isStructureChainValid{ enum { value = true }; }; + template <> struct isStructureChainValid{ enum { value = true }; }; + template <> struct isStructureChainValid{ enum { value = true }; }; + template <> struct isStructureChainValid{ enum { value = true }; }; + template <> struct isStructureChainValid{ enum { value = true }; }; + template <> struct isStructureChainValid{ enum { value = true }; }; + template <> struct isStructureChainValid{ enum { value = true }; }; + template <> struct isStructureChainValid{ enum { value = true }; }; + template <> struct isStructureChainValid{ enum { value = true }; }; + template <> struct isStructureChainValid{ enum { value = true }; }; + template <> struct isStructureChainValid{ enum { value = true }; }; + template <> struct isStructureChainValid{ enum { value = true }; }; + template <> struct isStructureChainValid{ enum { value = true }; }; + template <> struct isStructureChainValid{ enum { value = true }; }; + template <> struct isStructureChainValid{ enum { value = true }; }; + template <> struct isStructureChainValid{ enum { value = true }; }; + template <> struct isStructureChainValid{ enum { value = true }; }; + template <> struct isStructureChainValid{ enum { value = true }; }; + template <> struct isStructureChainValid{ enum { value = true }; }; +#ifdef VK_USE_PLATFORM_ANDROID_ANDROID + template <> struct isStructureChainValid{ enum { value = true }; }; +#endif /*VK_USE_PLATFORM_ANDROID_ANDROID*/ +#ifdef VK_USE_PLATFORM_ANDROID_ANDROID + template <> struct isStructureChainValid{ enum { value = true }; }; +#endif /*VK_USE_PLATFORM_ANDROID_ANDROID*/ + template <> struct isStructureChainValid{ enum { value = true }; }; +#ifdef VK_USE_PLATFORM_ANDROID_ANDROID + template <> struct isStructureChainValid{ enum { value = true }; }; + template <> struct isStructureChainValid{ enum { value = true }; }; +#endif /*VK_USE_PLATFORM_ANDROID_ANDROID*/ + template <> struct isStructureChainValid{ enum { value = true }; }; + template <> struct isStructureChainValid{ enum { value = true }; }; + template <> struct isStructureChainValid{ enum { value = true }; }; + template <> struct isStructureChainValid{ enum { value = true }; }; + template <> struct isStructureChainValid{ enum { value = true }; }; + template <> struct isStructureChainValid{ enum { value = true }; }; + template <> struct isStructureChainValid{ enum { value = true }; }; + template <> struct isStructureChainValid{ enum { value = true }; }; + template <> struct isStructureChainValid{ enum { value = true }; }; + template <> struct isStructureChainValid{ enum { value = true }; }; + template <> struct isStructureChainValid{ enum { value = true }; }; + template <> struct isStructureChainValid{ enum { value = true }; }; + template <> struct isStructureChainValid{ enum { value = true }; }; + template <> struct isStructureChainValid{ enum { value = true }; }; + template <> struct isStructureChainValid{ enum { value = true }; }; + template <> struct isStructureChainValid{ enum { value = true }; }; + template <> struct isStructureChainValid{ enum { value = true }; }; + template <> struct isStructureChainValid{ enum { value = true }; }; + template <> struct isStructureChainValid{ enum { value = true }; }; + template <> struct isStructureChainValid{ enum { value = true }; }; + template <> struct isStructureChainValid{ enum { value = true }; }; + template <> struct isStructureChainValid{ enum { value = true }; }; + template <> struct isStructureChainValid{ enum { value = true }; }; + template <> struct isStructureChainValid{ enum { value = true }; }; + template <> struct isStructureChainValid{ enum { value = true }; }; + template <> struct isStructureChainValid{ enum { value = true }; }; + template <> struct isStructureChainValid{ enum { value = true }; }; + template <> struct isStructureChainValid{ enum { value = true }; }; + template <> struct isStructureChainValid{ enum { value = true }; }; + template <> struct isStructureChainValid{ enum { value = true }; }; + template <> struct isStructureChainValid{ enum { value = true }; }; + template <> struct isStructureChainValid{ enum { value = true }; }; + template <> struct isStructureChainValid{ enum { value = true }; }; + template <> struct isStructureChainValid{ enum { value = true }; }; + template <> struct isStructureChainValid{ enum { value = true }; }; + template <> struct isStructureChainValid{ enum { value = true }; }; + template <> struct isStructureChainValid{ enum { value = true }; }; + template <> struct isStructureChainValid{ enum { value = true }; }; + template <> struct isStructureChainValid{ enum { value = true }; }; + template <> struct isStructureChainValid{ enum { value = true }; }; + template <> struct isStructureChainValid{ enum { value = true }; }; + template <> struct isStructureChainValid{ enum { value = true }; }; + template <> struct isStructureChainValid{ enum { value = true }; }; + template <> struct isStructureChainValid{ enum { value = true }; }; + template <> struct isStructureChainValid{ enum { value = true }; }; + template <> struct isStructureChainValid{ enum { value = true }; }; + template <> struct isStructureChainValid{ enum { value = true }; }; + template <> struct isStructureChainValid{ enum { value = true }; }; + template <> struct isStructureChainValid{ enum { value = true }; }; + template <> struct isStructureChainValid{ enum { value = true }; }; + template <> struct isStructureChainValid{ enum { value = true }; }; + template <> struct isStructureChainValid{ enum { value = true }; }; + template <> struct isStructureChainValid{ enum { value = true }; }; + template <> struct isStructureChainValid{ enum { value = true }; }; + template <> struct isStructureChainValid{ enum { value = true }; }; + template <> struct isStructureChainValid{ enum { value = true }; }; + template <> struct isStructureChainValid{ enum { value = true }; }; +#ifdef VK_USE_PLATFORM_WIN32_NV + template <> struct isStructureChainValid{ enum { value = true }; }; +#endif /*VK_USE_PLATFORM_WIN32_NV*/ + template <> struct isStructureChainValid{ enum { value = true }; }; + template <> struct isStructureChainValid{ enum { value = true }; }; + template <> struct isStructureChainValid{ enum { value = true }; }; + template <> struct isStructureChainValid{ enum { value = true }; }; + template <> struct isStructureChainValid{ enum { value = true }; }; + template <> struct isStructureChainValid{ enum { value = true }; }; +#ifdef VK_USE_PLATFORM_WIN32_KHR + template <> struct isStructureChainValid{ enum { value = true }; }; +#endif /*VK_USE_PLATFORM_WIN32_KHR*/ + template <> struct isStructureChainValid{ enum { value = true }; }; + template <> struct isStructureChainValid{ enum { value = true }; }; + template <> struct isStructureChainValid{ enum { value = true }; }; + template <> struct isStructureChainValid{ enum { value = true }; }; + template <> struct isStructureChainValid{ enum { value = true }; }; + template <> struct isStructureChainValid{ enum { value = true }; }; + template <> struct isStructureChainValid{ enum { value = true }; }; + template <> struct isStructureChainValid{ enum { value = true }; }; + template <> struct isStructureChainValid{ enum { value = true }; }; + template <> struct isStructureChainValid{ enum { value = true }; }; + template <> struct isStructureChainValid{ enum { value = true }; }; + template <> struct isStructureChainValid{ enum { value = true }; }; + template <> struct isStructureChainValid{ enum { value = true }; }; + template <> struct isStructureChainValid{ enum { value = true }; }; +#ifdef VK_USE_PLATFORM_ANDROID_ANDROID +#ifdef VK_USE_PLATFORM_ANDROID_ANDROID + template <> struct isStructureChainValid{ enum { value = true }; }; +#endif /*VK_USE_PLATFORM_ANDROID_ANDROID*/ +#endif /*VK_USE_PLATFORM_ANDROID_ANDROID*/ + template <> struct isStructureChainValid{ enum { value = true }; }; + template <> struct isStructureChainValid{ enum { value = true }; }; + template <> struct isStructureChainValid{ enum { value = true }; }; + template <> struct isStructureChainValid{ enum { value = true }; }; + template <> struct isStructureChainValid{ enum { value = true }; }; + template <> struct isStructureChainValid{ enum { value = true }; }; + template <> struct isStructureChainValid{ enum { value = true }; }; + template <> struct isStructureChainValid{ enum { value = true }; }; + template <> struct isStructureChainValid{ enum { value = true }; }; + template <> struct isStructureChainValid{ enum { value = true }; }; + template <> struct isStructureChainValid{ enum { value = true }; }; + VULKAN_HPP_INLINE std::string to_string(FramebufferCreateFlagBits) + { + return "(void)"; + } + + VULKAN_HPP_INLINE std::string to_string(FramebufferCreateFlags) + { + return "{}"; + } + + VULKAN_HPP_INLINE std::string to_string(QueryPoolCreateFlagBits) + { + return "(void)"; + } + + VULKAN_HPP_INLINE std::string to_string(QueryPoolCreateFlags) + { + return "{}"; + } + + VULKAN_HPP_INLINE std::string to_string(RenderPassCreateFlagBits) + { + return "(void)"; + } + + VULKAN_HPP_INLINE std::string to_string(RenderPassCreateFlags) + { + return "{}"; + } + + VULKAN_HPP_INLINE std::string to_string(SamplerCreateFlagBits) + { + return "(void)"; + } + + VULKAN_HPP_INLINE std::string to_string(SamplerCreateFlags) + { + return "{}"; + } + + VULKAN_HPP_INLINE std::string to_string(PipelineLayoutCreateFlagBits) + { + return "(void)"; + } + + VULKAN_HPP_INLINE std::string to_string(PipelineLayoutCreateFlags) + { + return "{}"; + } + + VULKAN_HPP_INLINE std::string to_string(PipelineCacheCreateFlagBits) + { + return "(void)"; + } + + VULKAN_HPP_INLINE std::string to_string(PipelineCacheCreateFlags) + { + return "{}"; + } + + VULKAN_HPP_INLINE std::string to_string(PipelineDepthStencilStateCreateFlagBits) + { + return "(void)"; + } + + VULKAN_HPP_INLINE std::string to_string(PipelineDepthStencilStateCreateFlags) + { + return "{}"; + } + + VULKAN_HPP_INLINE std::string to_string(PipelineDynamicStateCreateFlagBits) + { + return "(void)"; + } + + VULKAN_HPP_INLINE std::string to_string(PipelineDynamicStateCreateFlags) + { + return "{}"; + } + + VULKAN_HPP_INLINE std::string to_string(PipelineColorBlendStateCreateFlagBits) + { + return "(void)"; + } + + VULKAN_HPP_INLINE std::string to_string(PipelineColorBlendStateCreateFlags) + { + return "{}"; + } + + VULKAN_HPP_INLINE std::string to_string(PipelineMultisampleStateCreateFlagBits) + { + return "(void)"; + } + + VULKAN_HPP_INLINE std::string to_string(PipelineMultisampleStateCreateFlags) + { + return "{}"; + } + + VULKAN_HPP_INLINE std::string to_string(PipelineRasterizationStateCreateFlagBits) + { + return "(void)"; + } + + VULKAN_HPP_INLINE std::string to_string(PipelineRasterizationStateCreateFlags) + { + return "{}"; + } + + VULKAN_HPP_INLINE std::string to_string(PipelineViewportStateCreateFlagBits) + { + return "(void)"; + } + + VULKAN_HPP_INLINE std::string to_string(PipelineViewportStateCreateFlags) + { + return "{}"; + } + + VULKAN_HPP_INLINE std::string to_string(PipelineTessellationStateCreateFlagBits) + { + return "(void)"; + } + + VULKAN_HPP_INLINE std::string to_string(PipelineTessellationStateCreateFlags) + { + return "{}"; + } + + VULKAN_HPP_INLINE std::string to_string(PipelineInputAssemblyStateCreateFlagBits) + { + return "(void)"; + } + + VULKAN_HPP_INLINE std::string to_string(PipelineInputAssemblyStateCreateFlags) + { + return "{}"; + } + + VULKAN_HPP_INLINE std::string to_string(PipelineVertexInputStateCreateFlagBits) + { + return "(void)"; + } + + VULKAN_HPP_INLINE std::string to_string(PipelineVertexInputStateCreateFlags) + { + return "{}"; + } + + VULKAN_HPP_INLINE std::string to_string(PipelineShaderStageCreateFlagBits) + { + return "(void)"; + } + + VULKAN_HPP_INLINE std::string to_string(PipelineShaderStageCreateFlags) + { + return "{}"; + } + + VULKAN_HPP_INLINE std::string to_string(BufferViewCreateFlagBits) + { + return "(void)"; + } + + VULKAN_HPP_INLINE std::string to_string(BufferViewCreateFlags) + { + return "{}"; + } + + VULKAN_HPP_INLINE std::string to_string(InstanceCreateFlagBits) + { + return "(void)"; + } + + VULKAN_HPP_INLINE std::string to_string(InstanceCreateFlags) + { + return "{}"; + } + + VULKAN_HPP_INLINE std::string to_string(DeviceCreateFlagBits) + { + return "(void)"; + } + + VULKAN_HPP_INLINE std::string to_string(DeviceCreateFlags) + { + return "{}"; + } + + VULKAN_HPP_INLINE std::string to_string(ImageViewCreateFlagBits) + { + return "(void)"; + } + + VULKAN_HPP_INLINE std::string to_string(ImageViewCreateFlags) + { + return "{}"; + } + + VULKAN_HPP_INLINE std::string to_string(SemaphoreCreateFlagBits) + { + return "(void)"; + } + + VULKAN_HPP_INLINE std::string to_string(SemaphoreCreateFlags) + { + return "{}"; + } + + VULKAN_HPP_INLINE std::string to_string(ShaderModuleCreateFlagBits) + { + return "(void)"; + } + + VULKAN_HPP_INLINE std::string to_string(ShaderModuleCreateFlags) + { + return "{}"; + } + + VULKAN_HPP_INLINE std::string to_string(EventCreateFlagBits) + { + return "(void)"; + } + + VULKAN_HPP_INLINE std::string to_string(EventCreateFlags) + { + return "{}"; + } + + VULKAN_HPP_INLINE std::string to_string(MemoryMapFlagBits) + { + return "(void)"; + } + + VULKAN_HPP_INLINE std::string to_string(MemoryMapFlags) + { + return "{}"; + } + + VULKAN_HPP_INLINE std::string to_string(DescriptorPoolResetFlagBits) + { + return "(void)"; + } + + VULKAN_HPP_INLINE std::string to_string(DescriptorPoolResetFlags) + { + return "{}"; + } + + VULKAN_HPP_INLINE std::string to_string(DescriptorUpdateTemplateCreateFlagBits) + { + return "(void)"; + } + + VULKAN_HPP_INLINE std::string to_string(DescriptorUpdateTemplateCreateFlags) + { + return "{}"; + } + + VULKAN_HPP_INLINE std::string to_string(DisplayModeCreateFlagBitsKHR) + { + return "(void)"; + } + + VULKAN_HPP_INLINE std::string to_string(DisplayModeCreateFlagsKHR) + { + return "{}"; + } + + VULKAN_HPP_INLINE std::string to_string(DisplaySurfaceCreateFlagBitsKHR) + { + return "(void)"; + } + + VULKAN_HPP_INLINE std::string to_string(DisplaySurfaceCreateFlagsKHR) + { + return "{}"; + } + +#ifdef VK_USE_PLATFORM_ANDROID_KHR + VULKAN_HPP_INLINE std::string to_string(AndroidSurfaceCreateFlagBitsKHR) + { + return "(void)"; + } +#endif /*VK_USE_PLATFORM_ANDROID_KHR*/ + +#ifdef VK_USE_PLATFORM_ANDROID_KHR + VULKAN_HPP_INLINE std::string to_string(AndroidSurfaceCreateFlagsKHR) + { + return "{}"; + } +#endif /*VK_USE_PLATFORM_ANDROID_KHR*/ + +#ifdef VK_USE_PLATFORM_VI_NN + VULKAN_HPP_INLINE std::string to_string(ViSurfaceCreateFlagBitsNN) + { + return "(void)"; + } +#endif /*VK_USE_PLATFORM_VI_NN*/ + +#ifdef VK_USE_PLATFORM_VI_NN + VULKAN_HPP_INLINE std::string to_string(ViSurfaceCreateFlagsNN) + { + return "{}"; + } +#endif /*VK_USE_PLATFORM_VI_NN*/ + +#ifdef VK_USE_PLATFORM_WAYLAND_KHR + VULKAN_HPP_INLINE std::string to_string(WaylandSurfaceCreateFlagBitsKHR) + { + return "(void)"; + } +#endif /*VK_USE_PLATFORM_WAYLAND_KHR*/ + +#ifdef VK_USE_PLATFORM_WAYLAND_KHR + VULKAN_HPP_INLINE std::string to_string(WaylandSurfaceCreateFlagsKHR) + { + return "{}"; + } +#endif /*VK_USE_PLATFORM_WAYLAND_KHR*/ + +#ifdef VK_USE_PLATFORM_WIN32_KHR + VULKAN_HPP_INLINE std::string to_string(Win32SurfaceCreateFlagBitsKHR) + { + return "(void)"; + } +#endif /*VK_USE_PLATFORM_WIN32_KHR*/ + +#ifdef VK_USE_PLATFORM_WIN32_KHR + VULKAN_HPP_INLINE std::string to_string(Win32SurfaceCreateFlagsKHR) + { + return "{}"; + } +#endif /*VK_USE_PLATFORM_WIN32_KHR*/ + +#ifdef VK_USE_PLATFORM_XLIB_KHR + VULKAN_HPP_INLINE std::string to_string(XlibSurfaceCreateFlagBitsKHR) + { + return "(void)"; + } +#endif /*VK_USE_PLATFORM_XLIB_KHR*/ + +#ifdef VK_USE_PLATFORM_XLIB_KHR + VULKAN_HPP_INLINE std::string to_string(XlibSurfaceCreateFlagsKHR) + { + return "{}"; + } +#endif /*VK_USE_PLATFORM_XLIB_KHR*/ + +#ifdef VK_USE_PLATFORM_XCB_KHR + VULKAN_HPP_INLINE std::string to_string(XcbSurfaceCreateFlagBitsKHR) + { + return "(void)"; + } +#endif /*VK_USE_PLATFORM_XCB_KHR*/ + +#ifdef VK_USE_PLATFORM_XCB_KHR + VULKAN_HPP_INLINE std::string to_string(XcbSurfaceCreateFlagsKHR) + { + return "{}"; + } +#endif /*VK_USE_PLATFORM_XCB_KHR*/ + +#ifdef VK_USE_PLATFORM_IOS_MVK + VULKAN_HPP_INLINE std::string to_string(IOSSurfaceCreateFlagBitsMVK) + { + return "(void)"; + } +#endif /*VK_USE_PLATFORM_IOS_MVK*/ + +#ifdef VK_USE_PLATFORM_IOS_MVK + VULKAN_HPP_INLINE std::string to_string(IOSSurfaceCreateFlagsMVK) + { + return "{}"; + } +#endif /*VK_USE_PLATFORM_IOS_MVK*/ + +#ifdef VK_USE_PLATFORM_MACOS_MVK + VULKAN_HPP_INLINE std::string to_string(MacOSSurfaceCreateFlagBitsMVK) + { + return "(void)"; + } +#endif /*VK_USE_PLATFORM_MACOS_MVK*/ + +#ifdef VK_USE_PLATFORM_MACOS_MVK + VULKAN_HPP_INLINE std::string to_string(MacOSSurfaceCreateFlagsMVK) + { + return "{}"; + } +#endif /*VK_USE_PLATFORM_MACOS_MVK*/ + +#ifdef VK_USE_PLATFORM_FUCHSIA_FUCHSIA + VULKAN_HPP_INLINE std::string to_string(ImagePipeSurfaceCreateFlagBitsFUCHSIA) + { + return "(void)"; + } +#endif /*VK_USE_PLATFORM_FUCHSIA_FUCHSIA*/ + +#ifdef VK_USE_PLATFORM_FUCHSIA_FUCHSIA + VULKAN_HPP_INLINE std::string to_string(ImagePipeSurfaceCreateFlagsFUCHSIA) + { + return "{}"; + } +#endif /*VK_USE_PLATFORM_FUCHSIA_FUCHSIA*/ + + VULKAN_HPP_INLINE std::string to_string(CommandPoolTrimFlagBits) + { + return "(void)"; + } + + VULKAN_HPP_INLINE std::string to_string(CommandPoolTrimFlags) + { + return "{}"; + } + + VULKAN_HPP_INLINE std::string to_string(PipelineViewportSwizzleStateCreateFlagBitsNV) + { + return "(void)"; + } + + VULKAN_HPP_INLINE std::string to_string(PipelineViewportSwizzleStateCreateFlagsNV) + { + return "{}"; + } + + VULKAN_HPP_INLINE std::string to_string(PipelineDiscardRectangleStateCreateFlagBitsEXT) + { + return "(void)"; + } + + VULKAN_HPP_INLINE std::string to_string(PipelineDiscardRectangleStateCreateFlagsEXT) + { + return "{}"; + } + + VULKAN_HPP_INLINE std::string to_string(PipelineCoverageToColorStateCreateFlagBitsNV) + { + return "(void)"; + } + + VULKAN_HPP_INLINE std::string to_string(PipelineCoverageToColorStateCreateFlagsNV) + { + return "{}"; + } + + VULKAN_HPP_INLINE std::string to_string(PipelineCoverageModulationStateCreateFlagBitsNV) + { + return "(void)"; + } + + VULKAN_HPP_INLINE std::string to_string(PipelineCoverageModulationStateCreateFlagsNV) + { + return "{}"; + } + + VULKAN_HPP_INLINE std::string to_string(ValidationCacheCreateFlagBitsEXT) + { + return "(void)"; + } + + VULKAN_HPP_INLINE std::string to_string(ValidationCacheCreateFlagsEXT) + { + return "{}"; + } + + VULKAN_HPP_INLINE std::string to_string(DebugUtilsMessengerCreateFlagBitsEXT) + { + return "(void)"; + } + + VULKAN_HPP_INLINE std::string to_string(DebugUtilsMessengerCreateFlagsEXT) + { + return "{}"; + } + + VULKAN_HPP_INLINE std::string to_string(DebugUtilsMessengerCallbackDataFlagBitsEXT) + { + return "(void)"; + } + + VULKAN_HPP_INLINE std::string to_string(DebugUtilsMessengerCallbackDataFlagsEXT) + { + return "{}"; + } + + VULKAN_HPP_INLINE std::string to_string(PipelineRasterizationConservativeStateCreateFlagBitsEXT) + { + return "(void)"; + } + + VULKAN_HPP_INLINE std::string to_string(PipelineRasterizationConservativeStateCreateFlagsEXT) + { + return "{}"; + } + + VULKAN_HPP_INLINE std::string to_string(PipelineRasterizationStateStreamCreateFlagBitsEXT) + { + return "(void)"; + } + + VULKAN_HPP_INLINE std::string to_string(PipelineRasterizationStateStreamCreateFlagsEXT) + { + return "{}"; + } + + VULKAN_HPP_INLINE std::string to_string(ImageLayout value) + { + switch (value) + { + case ImageLayout::eUndefined: return "Undefined"; + case ImageLayout::eGeneral: return "General"; + case ImageLayout::eColorAttachmentOptimal: return "ColorAttachmentOptimal"; + case ImageLayout::eDepthStencilAttachmentOptimal: return "DepthStencilAttachmentOptimal"; + case ImageLayout::eDepthStencilReadOnlyOptimal: return "DepthStencilReadOnlyOptimal"; + case ImageLayout::eShaderReadOnlyOptimal: return "ShaderReadOnlyOptimal"; + case ImageLayout::eTransferSrcOptimal: return "TransferSrcOptimal"; + case ImageLayout::eTransferDstOptimal: return "TransferDstOptimal"; + case ImageLayout::ePreinitialized: return "Preinitialized"; + case ImageLayout::eDepthReadOnlyStencilAttachmentOptimal: return "DepthReadOnlyStencilAttachmentOptimal"; + case ImageLayout::eDepthAttachmentStencilReadOnlyOptimal: return "DepthAttachmentStencilReadOnlyOptimal"; + case ImageLayout::ePresentSrcKHR: return "PresentSrcKHR"; + case ImageLayout::eSharedPresentKHR: return "SharedPresentKHR"; + case ImageLayout::eShadingRateOptimalNV: return "ShadingRateOptimalNV"; + default: return "invalid"; + } + } + + VULKAN_HPP_INLINE std::string to_string(AttachmentLoadOp value) + { + switch (value) + { + case AttachmentLoadOp::eLoad: return "Load"; + case AttachmentLoadOp::eClear: return "Clear"; + case AttachmentLoadOp::eDontCare: return "DontCare"; + default: return "invalid"; + } + } + + VULKAN_HPP_INLINE std::string to_string(AttachmentStoreOp value) + { + switch (value) + { + case AttachmentStoreOp::eStore: return "Store"; + case AttachmentStoreOp::eDontCare: return "DontCare"; + default: return "invalid"; + } + } + + VULKAN_HPP_INLINE std::string to_string(ImageType value) + { + switch (value) + { + case ImageType::e1D: return "1D"; + case ImageType::e2D: return "2D"; + case ImageType::e3D: return "3D"; + default: return "invalid"; + } + } + + VULKAN_HPP_INLINE std::string to_string(ImageTiling value) + { + switch (value) + { + case ImageTiling::eOptimal: return "Optimal"; + case ImageTiling::eLinear: return "Linear"; + case ImageTiling::eDrmFormatModifierEXT: return "DrmFormatModifierEXT"; + default: return "invalid"; + } + } + + VULKAN_HPP_INLINE std::string to_string(ImageViewType value) + { + switch (value) + { + case ImageViewType::e1D: return "1D"; + case ImageViewType::e2D: return "2D"; + case ImageViewType::e3D: return "3D"; + case ImageViewType::eCube: return "Cube"; + case ImageViewType::e1DArray: return "1DArray"; + case ImageViewType::e2DArray: return "2DArray"; + case ImageViewType::eCubeArray: return "CubeArray"; + default: return "invalid"; + } + } + + VULKAN_HPP_INLINE std::string to_string(CommandBufferLevel value) + { + switch (value) + { + case CommandBufferLevel::ePrimary: return "Primary"; + case CommandBufferLevel::eSecondary: return "Secondary"; + default: return "invalid"; + } + } + + VULKAN_HPP_INLINE std::string to_string(ComponentSwizzle value) + { + switch (value) + { + case ComponentSwizzle::eIdentity: return "Identity"; + case ComponentSwizzle::eZero: return "Zero"; + case ComponentSwizzle::eOne: return "One"; + case ComponentSwizzle::eR: return "R"; + case ComponentSwizzle::eG: return "G"; + case ComponentSwizzle::eB: return "B"; + case ComponentSwizzle::eA: return "A"; + default: return "invalid"; + } + } + + VULKAN_HPP_INLINE std::string to_string(DescriptorType value) + { + switch (value) + { + case DescriptorType::eSampler: return "Sampler"; + case DescriptorType::eCombinedImageSampler: return "CombinedImageSampler"; + case DescriptorType::eSampledImage: return "SampledImage"; + case DescriptorType::eStorageImage: return "StorageImage"; + case DescriptorType::eUniformTexelBuffer: return "UniformTexelBuffer"; + case DescriptorType::eStorageTexelBuffer: return "StorageTexelBuffer"; + case DescriptorType::eUniformBuffer: return "UniformBuffer"; + case DescriptorType::eStorageBuffer: return "StorageBuffer"; + case DescriptorType::eUniformBufferDynamic: return "UniformBufferDynamic"; + case DescriptorType::eStorageBufferDynamic: return "StorageBufferDynamic"; + case DescriptorType::eInputAttachment: return "InputAttachment"; + case DescriptorType::eInlineUniformBlockEXT: return "InlineUniformBlockEXT"; + case DescriptorType::eAccelerationStructureNV: return "AccelerationStructureNV"; + default: return "invalid"; + } + } + + VULKAN_HPP_INLINE std::string to_string(QueryType value) + { + switch (value) + { + case QueryType::eOcclusion: return "Occlusion"; + case QueryType::ePipelineStatistics: return "PipelineStatistics"; + case QueryType::eTimestamp: return "Timestamp"; + case QueryType::eTransformFeedbackStreamEXT: return "TransformFeedbackStreamEXT"; + case QueryType::eAccelerationStructureCompactedSizeNV: return "AccelerationStructureCompactedSizeNV"; + default: return "invalid"; + } + } + + VULKAN_HPP_INLINE std::string to_string(BorderColor value) + { + switch (value) + { + case BorderColor::eFloatTransparentBlack: return "FloatTransparentBlack"; + case BorderColor::eIntTransparentBlack: return "IntTransparentBlack"; + case BorderColor::eFloatOpaqueBlack: return "FloatOpaqueBlack"; + case BorderColor::eIntOpaqueBlack: return "IntOpaqueBlack"; + case BorderColor::eFloatOpaqueWhite: return "FloatOpaqueWhite"; + case BorderColor::eIntOpaqueWhite: return "IntOpaqueWhite"; + default: return "invalid"; + } + } + + VULKAN_HPP_INLINE std::string to_string(PipelineBindPoint value) + { + switch (value) + { + case PipelineBindPoint::eGraphics: return "Graphics"; + case PipelineBindPoint::eCompute: return "Compute"; + case PipelineBindPoint::eRayTracingNV: return "RayTracingNV"; + default: return "invalid"; + } + } + + VULKAN_HPP_INLINE std::string to_string(PipelineCacheHeaderVersion value) + { + switch (value) + { + case PipelineCacheHeaderVersion::eOne: return "One"; + default: return "invalid"; + } + } + + VULKAN_HPP_INLINE std::string to_string(PrimitiveTopology value) + { + switch (value) + { + case PrimitiveTopology::ePointList: return "PointList"; + case PrimitiveTopology::eLineList: return "LineList"; + case PrimitiveTopology::eLineStrip: return "LineStrip"; + case PrimitiveTopology::eTriangleList: return "TriangleList"; + case PrimitiveTopology::eTriangleStrip: return "TriangleStrip"; + case PrimitiveTopology::eTriangleFan: return "TriangleFan"; + case PrimitiveTopology::eLineListWithAdjacency: return "LineListWithAdjacency"; + case PrimitiveTopology::eLineStripWithAdjacency: return "LineStripWithAdjacency"; + case PrimitiveTopology::eTriangleListWithAdjacency: return "TriangleListWithAdjacency"; + case PrimitiveTopology::eTriangleStripWithAdjacency: return "TriangleStripWithAdjacency"; + case PrimitiveTopology::ePatchList: return "PatchList"; + default: return "invalid"; + } + } + + VULKAN_HPP_INLINE std::string to_string(SharingMode value) + { + switch (value) + { + case SharingMode::eExclusive: return "Exclusive"; + case SharingMode::eConcurrent: return "Concurrent"; + default: return "invalid"; + } + } + + VULKAN_HPP_INLINE std::string to_string(IndexType value) + { + switch (value) + { + case IndexType::eUint16: return "Uint16"; + case IndexType::eUint32: return "Uint32"; + case IndexType::eNoneNV: return "NoneNV"; + default: return "invalid"; + } + } + + VULKAN_HPP_INLINE std::string to_string(Filter value) + { + switch (value) + { + case Filter::eNearest: return "Nearest"; + case Filter::eLinear: return "Linear"; + case Filter::eCubicIMG: return "CubicIMG"; + default: return "invalid"; + } + } + + VULKAN_HPP_INLINE std::string to_string(SamplerMipmapMode value) + { + switch (value) + { + case SamplerMipmapMode::eNearest: return "Nearest"; + case SamplerMipmapMode::eLinear: return "Linear"; + default: return "invalid"; + } + } + + VULKAN_HPP_INLINE std::string to_string(SamplerAddressMode value) + { + switch (value) + { + case SamplerAddressMode::eRepeat: return "Repeat"; + case SamplerAddressMode::eMirroredRepeat: return "MirroredRepeat"; + case SamplerAddressMode::eClampToEdge: return "ClampToEdge"; + case SamplerAddressMode::eClampToBorder: return "ClampToBorder"; + case SamplerAddressMode::eMirrorClampToEdge: return "MirrorClampToEdge"; + default: return "invalid"; + } + } + + VULKAN_HPP_INLINE std::string to_string(CompareOp value) + { + switch (value) + { + case CompareOp::eNever: return "Never"; + case CompareOp::eLess: return "Less"; + case CompareOp::eEqual: return "Equal"; + case CompareOp::eLessOrEqual: return "LessOrEqual"; + case CompareOp::eGreater: return "Greater"; + case CompareOp::eNotEqual: return "NotEqual"; + case CompareOp::eGreaterOrEqual: return "GreaterOrEqual"; + case CompareOp::eAlways: return "Always"; + default: return "invalid"; + } + } + + VULKAN_HPP_INLINE std::string to_string(PolygonMode value) + { + switch (value) + { + case PolygonMode::eFill: return "Fill"; + case PolygonMode::eLine: return "Line"; + case PolygonMode::ePoint: return "Point"; + case PolygonMode::eFillRectangleNV: return "FillRectangleNV"; + default: return "invalid"; + } + } + + VULKAN_HPP_INLINE std::string to_string(CullModeFlagBits value) + { + switch (value) + { + case CullModeFlagBits::eNone: return "None"; + case CullModeFlagBits::eFront: return "Front"; + case CullModeFlagBits::eBack: return "Back"; + case CullModeFlagBits::eFrontAndBack: return "FrontAndBack"; + default: return "invalid"; + } + } + + VULKAN_HPP_INLINE std::string to_string(CullModeFlags value) + { + if (!value) return "{}"; + std::string result; + if (value & CullModeFlagBits::eNone) result += "None | "; + if (value & CullModeFlagBits::eFront) result += "Front | "; + if (value & CullModeFlagBits::eBack) result += "Back | "; + if (value & CullModeFlagBits::eFrontAndBack) result += "FrontAndBack | "; + return "{" + result.substr(0, result.size() - 3) + "}"; + } + + VULKAN_HPP_INLINE std::string to_string(FrontFace value) + { + switch (value) + { + case FrontFace::eCounterClockwise: return "CounterClockwise"; + case FrontFace::eClockwise: return "Clockwise"; + default: return "invalid"; + } + } + + VULKAN_HPP_INLINE std::string to_string(BlendFactor value) + { + switch (value) + { + case BlendFactor::eZero: return "Zero"; + case BlendFactor::eOne: return "One"; + case BlendFactor::eSrcColor: return "SrcColor"; + case BlendFactor::eOneMinusSrcColor: return "OneMinusSrcColor"; + case BlendFactor::eDstColor: return "DstColor"; + case BlendFactor::eOneMinusDstColor: return "OneMinusDstColor"; + case BlendFactor::eSrcAlpha: return "SrcAlpha"; + case BlendFactor::eOneMinusSrcAlpha: return "OneMinusSrcAlpha"; + case BlendFactor::eDstAlpha: return "DstAlpha"; + case BlendFactor::eOneMinusDstAlpha: return "OneMinusDstAlpha"; + case BlendFactor::eConstantColor: return "ConstantColor"; + case BlendFactor::eOneMinusConstantColor: return "OneMinusConstantColor"; + case BlendFactor::eConstantAlpha: return "ConstantAlpha"; + case BlendFactor::eOneMinusConstantAlpha: return "OneMinusConstantAlpha"; + case BlendFactor::eSrcAlphaSaturate: return "SrcAlphaSaturate"; + case BlendFactor::eSrc1Color: return "Src1Color"; + case BlendFactor::eOneMinusSrc1Color: return "OneMinusSrc1Color"; + case BlendFactor::eSrc1Alpha: return "Src1Alpha"; + case BlendFactor::eOneMinusSrc1Alpha: return "OneMinusSrc1Alpha"; + default: return "invalid"; + } + } + + VULKAN_HPP_INLINE std::string to_string(BlendOp value) + { + switch (value) + { + case BlendOp::eAdd: return "Add"; + case BlendOp::eSubtract: return "Subtract"; + case BlendOp::eReverseSubtract: return "ReverseSubtract"; + case BlendOp::eMin: return "Min"; + case BlendOp::eMax: return "Max"; + case BlendOp::eZeroEXT: return "ZeroEXT"; + case BlendOp::eSrcEXT: return "SrcEXT"; + case BlendOp::eDstEXT: return "DstEXT"; + case BlendOp::eSrcOverEXT: return "SrcOverEXT"; + case BlendOp::eDstOverEXT: return "DstOverEXT"; + case BlendOp::eSrcInEXT: return "SrcInEXT"; + case BlendOp::eDstInEXT: return "DstInEXT"; + case BlendOp::eSrcOutEXT: return "SrcOutEXT"; + case BlendOp::eDstOutEXT: return "DstOutEXT"; + case BlendOp::eSrcAtopEXT: return "SrcAtopEXT"; + case BlendOp::eDstAtopEXT: return "DstAtopEXT"; + case BlendOp::eXorEXT: return "XorEXT"; + case BlendOp::eMultiplyEXT: return "MultiplyEXT"; + case BlendOp::eScreenEXT: return "ScreenEXT"; + case BlendOp::eOverlayEXT: return "OverlayEXT"; + case BlendOp::eDarkenEXT: return "DarkenEXT"; + case BlendOp::eLightenEXT: return "LightenEXT"; + case BlendOp::eColordodgeEXT: return "ColordodgeEXT"; + case BlendOp::eColorburnEXT: return "ColorburnEXT"; + case BlendOp::eHardlightEXT: return "HardlightEXT"; + case BlendOp::eSoftlightEXT: return "SoftlightEXT"; + case BlendOp::eDifferenceEXT: return "DifferenceEXT"; + case BlendOp::eExclusionEXT: return "ExclusionEXT"; + case BlendOp::eInvertEXT: return "InvertEXT"; + case BlendOp::eInvertRgbEXT: return "InvertRgbEXT"; + case BlendOp::eLineardodgeEXT: return "LineardodgeEXT"; + case BlendOp::eLinearburnEXT: return "LinearburnEXT"; + case BlendOp::eVividlightEXT: return "VividlightEXT"; + case BlendOp::eLinearlightEXT: return "LinearlightEXT"; + case BlendOp::ePinlightEXT: return "PinlightEXT"; + case BlendOp::eHardmixEXT: return "HardmixEXT"; + case BlendOp::eHslHueEXT: return "HslHueEXT"; + case BlendOp::eHslSaturationEXT: return "HslSaturationEXT"; + case BlendOp::eHslColorEXT: return "HslColorEXT"; + case BlendOp::eHslLuminosityEXT: return "HslLuminosityEXT"; + case BlendOp::ePlusEXT: return "PlusEXT"; + case BlendOp::ePlusClampedEXT: return "PlusClampedEXT"; + case BlendOp::ePlusClampedAlphaEXT: return "PlusClampedAlphaEXT"; + case BlendOp::ePlusDarkerEXT: return "PlusDarkerEXT"; + case BlendOp::eMinusEXT: return "MinusEXT"; + case BlendOp::eMinusClampedEXT: return "MinusClampedEXT"; + case BlendOp::eContrastEXT: return "ContrastEXT"; + case BlendOp::eInvertOvgEXT: return "InvertOvgEXT"; + case BlendOp::eRedEXT: return "RedEXT"; + case BlendOp::eGreenEXT: return "GreenEXT"; + case BlendOp::eBlueEXT: return "BlueEXT"; + default: return "invalid"; + } + } + + VULKAN_HPP_INLINE std::string to_string(StencilOp value) + { + switch (value) + { + case StencilOp::eKeep: return "Keep"; + case StencilOp::eZero: return "Zero"; + case StencilOp::eReplace: return "Replace"; + case StencilOp::eIncrementAndClamp: return "IncrementAndClamp"; + case StencilOp::eDecrementAndClamp: return "DecrementAndClamp"; + case StencilOp::eInvert: return "Invert"; + case StencilOp::eIncrementAndWrap: return "IncrementAndWrap"; + case StencilOp::eDecrementAndWrap: return "DecrementAndWrap"; + default: return "invalid"; + } + } + + VULKAN_HPP_INLINE std::string to_string(LogicOp value) + { + switch (value) + { + case LogicOp::eClear: return "Clear"; + case LogicOp::eAnd: return "And"; + case LogicOp::eAndReverse: return "AndReverse"; + case LogicOp::eCopy: return "Copy"; + case LogicOp::eAndInverted: return "AndInverted"; + case LogicOp::eNoOp: return "NoOp"; + case LogicOp::eXor: return "Xor"; + case LogicOp::eOr: return "Or"; + case LogicOp::eNor: return "Nor"; + case LogicOp::eEquivalent: return "Equivalent"; + case LogicOp::eInvert: return "Invert"; + case LogicOp::eOrReverse: return "OrReverse"; + case LogicOp::eCopyInverted: return "CopyInverted"; + case LogicOp::eOrInverted: return "OrInverted"; + case LogicOp::eNand: return "Nand"; + case LogicOp::eSet: return "Set"; + default: return "invalid"; + } + } + + VULKAN_HPP_INLINE std::string to_string(InternalAllocationType value) + { + switch (value) + { + case InternalAllocationType::eExecutable: return "Executable"; + default: return "invalid"; + } + } + + VULKAN_HPP_INLINE std::string to_string(SystemAllocationScope value) + { + switch (value) + { + case SystemAllocationScope::eCommand: return "Command"; + case SystemAllocationScope::eObject: return "Object"; + case SystemAllocationScope::eCache: return "Cache"; + case SystemAllocationScope::eDevice: return "Device"; + case SystemAllocationScope::eInstance: return "Instance"; + default: return "invalid"; + } + } + + VULKAN_HPP_INLINE std::string to_string(PhysicalDeviceType value) + { + switch (value) + { + case PhysicalDeviceType::eOther: return "Other"; + case PhysicalDeviceType::eIntegratedGpu: return "IntegratedGpu"; + case PhysicalDeviceType::eDiscreteGpu: return "DiscreteGpu"; + case PhysicalDeviceType::eVirtualGpu: return "VirtualGpu"; + case PhysicalDeviceType::eCpu: return "Cpu"; + default: return "invalid"; + } + } + + VULKAN_HPP_INLINE std::string to_string(VertexInputRate value) + { + switch (value) + { + case VertexInputRate::eVertex: return "Vertex"; + case VertexInputRate::eInstance: return "Instance"; + default: return "invalid"; + } + } + + VULKAN_HPP_INLINE std::string to_string(Format value) + { + switch (value) + { + case Format::eUndefined: return "Undefined"; + case Format::eR4G4UnormPack8: return "R4G4UnormPack8"; + case Format::eR4G4B4A4UnormPack16: return "R4G4B4A4UnormPack16"; + case Format::eB4G4R4A4UnormPack16: return "B4G4R4A4UnormPack16"; + case Format::eR5G6B5UnormPack16: return "R5G6B5UnormPack16"; + case Format::eB5G6R5UnormPack16: return "B5G6R5UnormPack16"; + case Format::eR5G5B5A1UnormPack16: return "R5G5B5A1UnormPack16"; + case Format::eB5G5R5A1UnormPack16: return "B5G5R5A1UnormPack16"; + case Format::eA1R5G5B5UnormPack16: return "A1R5G5B5UnormPack16"; + case Format::eR8Unorm: return "R8Unorm"; + case Format::eR8Snorm: return "R8Snorm"; + case Format::eR8Uscaled: return "R8Uscaled"; + case Format::eR8Sscaled: return "R8Sscaled"; + case Format::eR8Uint: return "R8Uint"; + case Format::eR8Sint: return "R8Sint"; + case Format::eR8Srgb: return "R8Srgb"; + case Format::eR8G8Unorm: return "R8G8Unorm"; + case Format::eR8G8Snorm: return "R8G8Snorm"; + case Format::eR8G8Uscaled: return "R8G8Uscaled"; + case Format::eR8G8Sscaled: return "R8G8Sscaled"; + case Format::eR8G8Uint: return "R8G8Uint"; + case Format::eR8G8Sint: return "R8G8Sint"; + case Format::eR8G8Srgb: return "R8G8Srgb"; + case Format::eR8G8B8Unorm: return "R8G8B8Unorm"; + case Format::eR8G8B8Snorm: return "R8G8B8Snorm"; + case Format::eR8G8B8Uscaled: return "R8G8B8Uscaled"; + case Format::eR8G8B8Sscaled: return "R8G8B8Sscaled"; + case Format::eR8G8B8Uint: return "R8G8B8Uint"; + case Format::eR8G8B8Sint: return "R8G8B8Sint"; + case Format::eR8G8B8Srgb: return "R8G8B8Srgb"; + case Format::eB8G8R8Unorm: return "B8G8R8Unorm"; + case Format::eB8G8R8Snorm: return "B8G8R8Snorm"; + case Format::eB8G8R8Uscaled: return "B8G8R8Uscaled"; + case Format::eB8G8R8Sscaled: return "B8G8R8Sscaled"; + case Format::eB8G8R8Uint: return "B8G8R8Uint"; + case Format::eB8G8R8Sint: return "B8G8R8Sint"; + case Format::eB8G8R8Srgb: return "B8G8R8Srgb"; + case Format::eR8G8B8A8Unorm: return "R8G8B8A8Unorm"; + case Format::eR8G8B8A8Snorm: return "R8G8B8A8Snorm"; + case Format::eR8G8B8A8Uscaled: return "R8G8B8A8Uscaled"; + case Format::eR8G8B8A8Sscaled: return "R8G8B8A8Sscaled"; + case Format::eR8G8B8A8Uint: return "R8G8B8A8Uint"; + case Format::eR8G8B8A8Sint: return "R8G8B8A8Sint"; + case Format::eR8G8B8A8Srgb: return "R8G8B8A8Srgb"; + case Format::eB8G8R8A8Unorm: return "B8G8R8A8Unorm"; + case Format::eB8G8R8A8Snorm: return "B8G8R8A8Snorm"; + case Format::eB8G8R8A8Uscaled: return "B8G8R8A8Uscaled"; + case Format::eB8G8R8A8Sscaled: return "B8G8R8A8Sscaled"; + case Format::eB8G8R8A8Uint: return "B8G8R8A8Uint"; + case Format::eB8G8R8A8Sint: return "B8G8R8A8Sint"; + case Format::eB8G8R8A8Srgb: return "B8G8R8A8Srgb"; + case Format::eA8B8G8R8UnormPack32: return "A8B8G8R8UnormPack32"; + case Format::eA8B8G8R8SnormPack32: return "A8B8G8R8SnormPack32"; + case Format::eA8B8G8R8UscaledPack32: return "A8B8G8R8UscaledPack32"; + case Format::eA8B8G8R8SscaledPack32: return "A8B8G8R8SscaledPack32"; + case Format::eA8B8G8R8UintPack32: return "A8B8G8R8UintPack32"; + case Format::eA8B8G8R8SintPack32: return "A8B8G8R8SintPack32"; + case Format::eA8B8G8R8SrgbPack32: return "A8B8G8R8SrgbPack32"; + case Format::eA2R10G10B10UnormPack32: return "A2R10G10B10UnormPack32"; + case Format::eA2R10G10B10SnormPack32: return "A2R10G10B10SnormPack32"; + case Format::eA2R10G10B10UscaledPack32: return "A2R10G10B10UscaledPack32"; + case Format::eA2R10G10B10SscaledPack32: return "A2R10G10B10SscaledPack32"; + case Format::eA2R10G10B10UintPack32: return "A2R10G10B10UintPack32"; + case Format::eA2R10G10B10SintPack32: return "A2R10G10B10SintPack32"; + case Format::eA2B10G10R10UnormPack32: return "A2B10G10R10UnormPack32"; + case Format::eA2B10G10R10SnormPack32: return "A2B10G10R10SnormPack32"; + case Format::eA2B10G10R10UscaledPack32: return "A2B10G10R10UscaledPack32"; + case Format::eA2B10G10R10SscaledPack32: return "A2B10G10R10SscaledPack32"; + case Format::eA2B10G10R10UintPack32: return "A2B10G10R10UintPack32"; + case Format::eA2B10G10R10SintPack32: return "A2B10G10R10SintPack32"; + case Format::eR16Unorm: return "R16Unorm"; + case Format::eR16Snorm: return "R16Snorm"; + case Format::eR16Uscaled: return "R16Uscaled"; + case Format::eR16Sscaled: return "R16Sscaled"; + case Format::eR16Uint: return "R16Uint"; + case Format::eR16Sint: return "R16Sint"; + case Format::eR16Sfloat: return "R16Sfloat"; + case Format::eR16G16Unorm: return "R16G16Unorm"; + case Format::eR16G16Snorm: return "R16G16Snorm"; + case Format::eR16G16Uscaled: return "R16G16Uscaled"; + case Format::eR16G16Sscaled: return "R16G16Sscaled"; + case Format::eR16G16Uint: return "R16G16Uint"; + case Format::eR16G16Sint: return "R16G16Sint"; + case Format::eR16G16Sfloat: return "R16G16Sfloat"; + case Format::eR16G16B16Unorm: return "R16G16B16Unorm"; + case Format::eR16G16B16Snorm: return "R16G16B16Snorm"; + case Format::eR16G16B16Uscaled: return "R16G16B16Uscaled"; + case Format::eR16G16B16Sscaled: return "R16G16B16Sscaled"; + case Format::eR16G16B16Uint: return "R16G16B16Uint"; + case Format::eR16G16B16Sint: return "R16G16B16Sint"; + case Format::eR16G16B16Sfloat: return "R16G16B16Sfloat"; + case Format::eR16G16B16A16Unorm: return "R16G16B16A16Unorm"; + case Format::eR16G16B16A16Snorm: return "R16G16B16A16Snorm"; + case Format::eR16G16B16A16Uscaled: return "R16G16B16A16Uscaled"; + case Format::eR16G16B16A16Sscaled: return "R16G16B16A16Sscaled"; + case Format::eR16G16B16A16Uint: return "R16G16B16A16Uint"; + case Format::eR16G16B16A16Sint: return "R16G16B16A16Sint"; + case Format::eR16G16B16A16Sfloat: return "R16G16B16A16Sfloat"; + case Format::eR32Uint: return "R32Uint"; + case Format::eR32Sint: return "R32Sint"; + case Format::eR32Sfloat: return "R32Sfloat"; + case Format::eR32G32Uint: return "R32G32Uint"; + case Format::eR32G32Sint: return "R32G32Sint"; + case Format::eR32G32Sfloat: return "R32G32Sfloat"; + case Format::eR32G32B32Uint: return "R32G32B32Uint"; + case Format::eR32G32B32Sint: return "R32G32B32Sint"; + case Format::eR32G32B32Sfloat: return "R32G32B32Sfloat"; + case Format::eR32G32B32A32Uint: return "R32G32B32A32Uint"; + case Format::eR32G32B32A32Sint: return "R32G32B32A32Sint"; + case Format::eR32G32B32A32Sfloat: return "R32G32B32A32Sfloat"; + case Format::eR64Uint: return "R64Uint"; + case Format::eR64Sint: return "R64Sint"; + case Format::eR64Sfloat: return "R64Sfloat"; + case Format::eR64G64Uint: return "R64G64Uint"; + case Format::eR64G64Sint: return "R64G64Sint"; + case Format::eR64G64Sfloat: return "R64G64Sfloat"; + case Format::eR64G64B64Uint: return "R64G64B64Uint"; + case Format::eR64G64B64Sint: return "R64G64B64Sint"; + case Format::eR64G64B64Sfloat: return "R64G64B64Sfloat"; + case Format::eR64G64B64A64Uint: return "R64G64B64A64Uint"; + case Format::eR64G64B64A64Sint: return "R64G64B64A64Sint"; + case Format::eR64G64B64A64Sfloat: return "R64G64B64A64Sfloat"; + case Format::eB10G11R11UfloatPack32: return "B10G11R11UfloatPack32"; + case Format::eE5B9G9R9UfloatPack32: return "E5B9G9R9UfloatPack32"; + case Format::eD16Unorm: return "D16Unorm"; + case Format::eX8D24UnormPack32: return "X8D24UnormPack32"; + case Format::eD32Sfloat: return "D32Sfloat"; + case Format::eS8Uint: return "S8Uint"; + case Format::eD16UnormS8Uint: return "D16UnormS8Uint"; + case Format::eD24UnormS8Uint: return "D24UnormS8Uint"; + case Format::eD32SfloatS8Uint: return "D32SfloatS8Uint"; + case Format::eBc1RgbUnormBlock: return "Bc1RgbUnormBlock"; + case Format::eBc1RgbSrgbBlock: return "Bc1RgbSrgbBlock"; + case Format::eBc1RgbaUnormBlock: return "Bc1RgbaUnormBlock"; + case Format::eBc1RgbaSrgbBlock: return "Bc1RgbaSrgbBlock"; + case Format::eBc2UnormBlock: return "Bc2UnormBlock"; + case Format::eBc2SrgbBlock: return "Bc2SrgbBlock"; + case Format::eBc3UnormBlock: return "Bc3UnormBlock"; + case Format::eBc3SrgbBlock: return "Bc3SrgbBlock"; + case Format::eBc4UnormBlock: return "Bc4UnormBlock"; + case Format::eBc4SnormBlock: return "Bc4SnormBlock"; + case Format::eBc5UnormBlock: return "Bc5UnormBlock"; + case Format::eBc5SnormBlock: return "Bc5SnormBlock"; + case Format::eBc6HUfloatBlock: return "Bc6HUfloatBlock"; + case Format::eBc6HSfloatBlock: return "Bc6HSfloatBlock"; + case Format::eBc7UnormBlock: return "Bc7UnormBlock"; + case Format::eBc7SrgbBlock: return "Bc7SrgbBlock"; + case Format::eEtc2R8G8B8UnormBlock: return "Etc2R8G8B8UnormBlock"; + case Format::eEtc2R8G8B8SrgbBlock: return "Etc2R8G8B8SrgbBlock"; + case Format::eEtc2R8G8B8A1UnormBlock: return "Etc2R8G8B8A1UnormBlock"; + case Format::eEtc2R8G8B8A1SrgbBlock: return "Etc2R8G8B8A1SrgbBlock"; + case Format::eEtc2R8G8B8A8UnormBlock: return "Etc2R8G8B8A8UnormBlock"; + case Format::eEtc2R8G8B8A8SrgbBlock: return "Etc2R8G8B8A8SrgbBlock"; + case Format::eEacR11UnormBlock: return "EacR11UnormBlock"; + case Format::eEacR11SnormBlock: return "EacR11SnormBlock"; + case Format::eEacR11G11UnormBlock: return "EacR11G11UnormBlock"; + case Format::eEacR11G11SnormBlock: return "EacR11G11SnormBlock"; + case Format::eAstc4x4UnormBlock: return "Astc4x4UnormBlock"; + case Format::eAstc4x4SrgbBlock: return "Astc4x4SrgbBlock"; + case Format::eAstc5x4UnormBlock: return "Astc5x4UnormBlock"; + case Format::eAstc5x4SrgbBlock: return "Astc5x4SrgbBlock"; + case Format::eAstc5x5UnormBlock: return "Astc5x5UnormBlock"; + case Format::eAstc5x5SrgbBlock: return "Astc5x5SrgbBlock"; + case Format::eAstc6x5UnormBlock: return "Astc6x5UnormBlock"; + case Format::eAstc6x5SrgbBlock: return "Astc6x5SrgbBlock"; + case Format::eAstc6x6UnormBlock: return "Astc6x6UnormBlock"; + case Format::eAstc6x6SrgbBlock: return "Astc6x6SrgbBlock"; + case Format::eAstc8x5UnormBlock: return "Astc8x5UnormBlock"; + case Format::eAstc8x5SrgbBlock: return "Astc8x5SrgbBlock"; + case Format::eAstc8x6UnormBlock: return "Astc8x6UnormBlock"; + case Format::eAstc8x6SrgbBlock: return "Astc8x6SrgbBlock"; + case Format::eAstc8x8UnormBlock: return "Astc8x8UnormBlock"; + case Format::eAstc8x8SrgbBlock: return "Astc8x8SrgbBlock"; + case Format::eAstc10x5UnormBlock: return "Astc10x5UnormBlock"; + case Format::eAstc10x5SrgbBlock: return "Astc10x5SrgbBlock"; + case Format::eAstc10x6UnormBlock: return "Astc10x6UnormBlock"; + case Format::eAstc10x6SrgbBlock: return "Astc10x6SrgbBlock"; + case Format::eAstc10x8UnormBlock: return "Astc10x8UnormBlock"; + case Format::eAstc10x8SrgbBlock: return "Astc10x8SrgbBlock"; + case Format::eAstc10x10UnormBlock: return "Astc10x10UnormBlock"; + case Format::eAstc10x10SrgbBlock: return "Astc10x10SrgbBlock"; + case Format::eAstc12x10UnormBlock: return "Astc12x10UnormBlock"; + case Format::eAstc12x10SrgbBlock: return "Astc12x10SrgbBlock"; + case Format::eAstc12x12UnormBlock: return "Astc12x12UnormBlock"; + case Format::eAstc12x12SrgbBlock: return "Astc12x12SrgbBlock"; + case Format::eG8B8G8R8422Unorm: return "G8B8G8R8422Unorm"; + case Format::eB8G8R8G8422Unorm: return "B8G8R8G8422Unorm"; + case Format::eG8B8R83Plane420Unorm: return "G8B8R83Plane420Unorm"; + case Format::eG8B8R82Plane420Unorm: return "G8B8R82Plane420Unorm"; + case Format::eG8B8R83Plane422Unorm: return "G8B8R83Plane422Unorm"; + case Format::eG8B8R82Plane422Unorm: return "G8B8R82Plane422Unorm"; + case Format::eG8B8R83Plane444Unorm: return "G8B8R83Plane444Unorm"; + case Format::eR10X6UnormPack16: return "R10X6UnormPack16"; + case Format::eR10X6G10X6Unorm2Pack16: return "R10X6G10X6Unorm2Pack16"; + case Format::eR10X6G10X6B10X6A10X6Unorm4Pack16: return "R10X6G10X6B10X6A10X6Unorm4Pack16"; + case Format::eG10X6B10X6G10X6R10X6422Unorm4Pack16: return "G10X6B10X6G10X6R10X6422Unorm4Pack16"; + case Format::eB10X6G10X6R10X6G10X6422Unorm4Pack16: return "B10X6G10X6R10X6G10X6422Unorm4Pack16"; + case Format::eG10X6B10X6R10X63Plane420Unorm3Pack16: return "G10X6B10X6R10X63Plane420Unorm3Pack16"; + case Format::eG10X6B10X6R10X62Plane420Unorm3Pack16: return "G10X6B10X6R10X62Plane420Unorm3Pack16"; + case Format::eG10X6B10X6R10X63Plane422Unorm3Pack16: return "G10X6B10X6R10X63Plane422Unorm3Pack16"; + case Format::eG10X6B10X6R10X62Plane422Unorm3Pack16: return "G10X6B10X6R10X62Plane422Unorm3Pack16"; + case Format::eG10X6B10X6R10X63Plane444Unorm3Pack16: return "G10X6B10X6R10X63Plane444Unorm3Pack16"; + case Format::eR12X4UnormPack16: return "R12X4UnormPack16"; + case Format::eR12X4G12X4Unorm2Pack16: return "R12X4G12X4Unorm2Pack16"; + case Format::eR12X4G12X4B12X4A12X4Unorm4Pack16: return "R12X4G12X4B12X4A12X4Unorm4Pack16"; + case Format::eG12X4B12X4G12X4R12X4422Unorm4Pack16: return "G12X4B12X4G12X4R12X4422Unorm4Pack16"; + case Format::eB12X4G12X4R12X4G12X4422Unorm4Pack16: return "B12X4G12X4R12X4G12X4422Unorm4Pack16"; + case Format::eG12X4B12X4R12X43Plane420Unorm3Pack16: return "G12X4B12X4R12X43Plane420Unorm3Pack16"; + case Format::eG12X4B12X4R12X42Plane420Unorm3Pack16: return "G12X4B12X4R12X42Plane420Unorm3Pack16"; + case Format::eG12X4B12X4R12X43Plane422Unorm3Pack16: return "G12X4B12X4R12X43Plane422Unorm3Pack16"; + case Format::eG12X4B12X4R12X42Plane422Unorm3Pack16: return "G12X4B12X4R12X42Plane422Unorm3Pack16"; + case Format::eG12X4B12X4R12X43Plane444Unorm3Pack16: return "G12X4B12X4R12X43Plane444Unorm3Pack16"; + case Format::eG16B16G16R16422Unorm: return "G16B16G16R16422Unorm"; + case Format::eB16G16R16G16422Unorm: return "B16G16R16G16422Unorm"; + case Format::eG16B16R163Plane420Unorm: return "G16B16R163Plane420Unorm"; + case Format::eG16B16R162Plane420Unorm: return "G16B16R162Plane420Unorm"; + case Format::eG16B16R163Plane422Unorm: return "G16B16R163Plane422Unorm"; + case Format::eG16B16R162Plane422Unorm: return "G16B16R162Plane422Unorm"; + case Format::eG16B16R163Plane444Unorm: return "G16B16R163Plane444Unorm"; + case Format::ePvrtc12BppUnormBlockIMG: return "Pvrtc12BppUnormBlockIMG"; + case Format::ePvrtc14BppUnormBlockIMG: return "Pvrtc14BppUnormBlockIMG"; + case Format::ePvrtc22BppUnormBlockIMG: return "Pvrtc22BppUnormBlockIMG"; + case Format::ePvrtc24BppUnormBlockIMG: return "Pvrtc24BppUnormBlockIMG"; + case Format::ePvrtc12BppSrgbBlockIMG: return "Pvrtc12BppSrgbBlockIMG"; + case Format::ePvrtc14BppSrgbBlockIMG: return "Pvrtc14BppSrgbBlockIMG"; + case Format::ePvrtc22BppSrgbBlockIMG: return "Pvrtc22BppSrgbBlockIMG"; + case Format::ePvrtc24BppSrgbBlockIMG: return "Pvrtc24BppSrgbBlockIMG"; + default: return "invalid"; + } + } + + VULKAN_HPP_INLINE std::string to_string(StructureType value) + { + switch (value) + { + case StructureType::eApplicationInfo: return "ApplicationInfo"; + case StructureType::eInstanceCreateInfo: return "InstanceCreateInfo"; + case StructureType::eDeviceQueueCreateInfo: return "DeviceQueueCreateInfo"; + case StructureType::eDeviceCreateInfo: return "DeviceCreateInfo"; + case StructureType::eSubmitInfo: return "SubmitInfo"; + case StructureType::eMemoryAllocateInfo: return "MemoryAllocateInfo"; + case StructureType::eMappedMemoryRange: return "MappedMemoryRange"; + case StructureType::eBindSparseInfo: return "BindSparseInfo"; + case StructureType::eFenceCreateInfo: return "FenceCreateInfo"; + case StructureType::eSemaphoreCreateInfo: return "SemaphoreCreateInfo"; + case StructureType::eEventCreateInfo: return "EventCreateInfo"; + case StructureType::eQueryPoolCreateInfo: return "QueryPoolCreateInfo"; + case StructureType::eBufferCreateInfo: return "BufferCreateInfo"; + case StructureType::eBufferViewCreateInfo: return "BufferViewCreateInfo"; + case StructureType::eImageCreateInfo: return "ImageCreateInfo"; + case StructureType::eImageViewCreateInfo: return "ImageViewCreateInfo"; + case StructureType::eShaderModuleCreateInfo: return "ShaderModuleCreateInfo"; + case StructureType::ePipelineCacheCreateInfo: return "PipelineCacheCreateInfo"; + case StructureType::ePipelineShaderStageCreateInfo: return "PipelineShaderStageCreateInfo"; + case StructureType::ePipelineVertexInputStateCreateInfo: return "PipelineVertexInputStateCreateInfo"; + case StructureType::ePipelineInputAssemblyStateCreateInfo: return "PipelineInputAssemblyStateCreateInfo"; + case StructureType::ePipelineTessellationStateCreateInfo: return "PipelineTessellationStateCreateInfo"; + case StructureType::ePipelineViewportStateCreateInfo: return "PipelineViewportStateCreateInfo"; + case StructureType::ePipelineRasterizationStateCreateInfo: return "PipelineRasterizationStateCreateInfo"; + case StructureType::ePipelineMultisampleStateCreateInfo: return "PipelineMultisampleStateCreateInfo"; + case StructureType::ePipelineDepthStencilStateCreateInfo: return "PipelineDepthStencilStateCreateInfo"; + case StructureType::ePipelineColorBlendStateCreateInfo: return "PipelineColorBlendStateCreateInfo"; + case StructureType::ePipelineDynamicStateCreateInfo: return "PipelineDynamicStateCreateInfo"; + case StructureType::eGraphicsPipelineCreateInfo: return "GraphicsPipelineCreateInfo"; + case StructureType::eComputePipelineCreateInfo: return "ComputePipelineCreateInfo"; + case StructureType::ePipelineLayoutCreateInfo: return "PipelineLayoutCreateInfo"; + case StructureType::eSamplerCreateInfo: return "SamplerCreateInfo"; + case StructureType::eDescriptorSetLayoutCreateInfo: return "DescriptorSetLayoutCreateInfo"; + case StructureType::eDescriptorPoolCreateInfo: return "DescriptorPoolCreateInfo"; + case StructureType::eDescriptorSetAllocateInfo: return "DescriptorSetAllocateInfo"; + case StructureType::eWriteDescriptorSet: return "WriteDescriptorSet"; + case StructureType::eCopyDescriptorSet: return "CopyDescriptorSet"; + case StructureType::eFramebufferCreateInfo: return "FramebufferCreateInfo"; + case StructureType::eRenderPassCreateInfo: return "RenderPassCreateInfo"; + case StructureType::eCommandPoolCreateInfo: return "CommandPoolCreateInfo"; + case StructureType::eCommandBufferAllocateInfo: return "CommandBufferAllocateInfo"; + case StructureType::eCommandBufferInheritanceInfo: return "CommandBufferInheritanceInfo"; + case StructureType::eCommandBufferBeginInfo: return "CommandBufferBeginInfo"; + case StructureType::eRenderPassBeginInfo: return "RenderPassBeginInfo"; + case StructureType::eBufferMemoryBarrier: return "BufferMemoryBarrier"; + case StructureType::eImageMemoryBarrier: return "ImageMemoryBarrier"; + case StructureType::eMemoryBarrier: return "MemoryBarrier"; + case StructureType::eLoaderInstanceCreateInfo: return "LoaderInstanceCreateInfo"; + case StructureType::eLoaderDeviceCreateInfo: return "LoaderDeviceCreateInfo"; + case StructureType::ePhysicalDeviceSubgroupProperties: return "PhysicalDeviceSubgroupProperties"; + case StructureType::eBindBufferMemoryInfo: return "BindBufferMemoryInfo"; + case StructureType::eBindImageMemoryInfo: return "BindImageMemoryInfo"; + case StructureType::ePhysicalDevice16BitStorageFeatures: return "PhysicalDevice16BitStorageFeatures"; + case StructureType::eMemoryDedicatedRequirements: return "MemoryDedicatedRequirements"; + case StructureType::eMemoryDedicatedAllocateInfo: return "MemoryDedicatedAllocateInfo"; + case StructureType::eMemoryAllocateFlagsInfo: return "MemoryAllocateFlagsInfo"; + case StructureType::eDeviceGroupRenderPassBeginInfo: return "DeviceGroupRenderPassBeginInfo"; + case StructureType::eDeviceGroupCommandBufferBeginInfo: return "DeviceGroupCommandBufferBeginInfo"; + case StructureType::eDeviceGroupSubmitInfo: return "DeviceGroupSubmitInfo"; + case StructureType::eDeviceGroupBindSparseInfo: return "DeviceGroupBindSparseInfo"; + case StructureType::eBindBufferMemoryDeviceGroupInfo: return "BindBufferMemoryDeviceGroupInfo"; + case StructureType::eBindImageMemoryDeviceGroupInfo: return "BindImageMemoryDeviceGroupInfo"; + case StructureType::ePhysicalDeviceGroupProperties: return "PhysicalDeviceGroupProperties"; + case StructureType::eDeviceGroupDeviceCreateInfo: return "DeviceGroupDeviceCreateInfo"; + case StructureType::eBufferMemoryRequirementsInfo2: return "BufferMemoryRequirementsInfo2"; + case StructureType::eImageMemoryRequirementsInfo2: return "ImageMemoryRequirementsInfo2"; + case StructureType::eImageSparseMemoryRequirementsInfo2: return "ImageSparseMemoryRequirementsInfo2"; + case StructureType::eMemoryRequirements2: return "MemoryRequirements2"; + case StructureType::eSparseImageMemoryRequirements2: return "SparseImageMemoryRequirements2"; + case StructureType::ePhysicalDeviceFeatures2: return "PhysicalDeviceFeatures2"; + case StructureType::ePhysicalDeviceProperties2: return "PhysicalDeviceProperties2"; + case StructureType::eFormatProperties2: return "FormatProperties2"; + case StructureType::eImageFormatProperties2: return "ImageFormatProperties2"; + case StructureType::ePhysicalDeviceImageFormatInfo2: return "PhysicalDeviceImageFormatInfo2"; + case StructureType::eQueueFamilyProperties2: return "QueueFamilyProperties2"; + case StructureType::ePhysicalDeviceMemoryProperties2: return "PhysicalDeviceMemoryProperties2"; + case StructureType::eSparseImageFormatProperties2: return "SparseImageFormatProperties2"; + case StructureType::ePhysicalDeviceSparseImageFormatInfo2: return "PhysicalDeviceSparseImageFormatInfo2"; + case StructureType::ePhysicalDevicePointClippingProperties: return "PhysicalDevicePointClippingProperties"; + case StructureType::eRenderPassInputAttachmentAspectCreateInfo: return "RenderPassInputAttachmentAspectCreateInfo"; + case StructureType::eImageViewUsageCreateInfo: return "ImageViewUsageCreateInfo"; + case StructureType::ePipelineTessellationDomainOriginStateCreateInfo: return "PipelineTessellationDomainOriginStateCreateInfo"; + case StructureType::eRenderPassMultiviewCreateInfo: return "RenderPassMultiviewCreateInfo"; + case StructureType::ePhysicalDeviceMultiviewFeatures: return "PhysicalDeviceMultiviewFeatures"; + case StructureType::ePhysicalDeviceMultiviewProperties: return "PhysicalDeviceMultiviewProperties"; + case StructureType::ePhysicalDeviceVariablePointerFeatures: return "PhysicalDeviceVariablePointerFeatures"; + case StructureType::eProtectedSubmitInfo: return "ProtectedSubmitInfo"; + case StructureType::ePhysicalDeviceProtectedMemoryFeatures: return "PhysicalDeviceProtectedMemoryFeatures"; + case StructureType::ePhysicalDeviceProtectedMemoryProperties: return "PhysicalDeviceProtectedMemoryProperties"; + case StructureType::eDeviceQueueInfo2: return "DeviceQueueInfo2"; + case StructureType::eSamplerYcbcrConversionCreateInfo: return "SamplerYcbcrConversionCreateInfo"; + case StructureType::eSamplerYcbcrConversionInfo: return "SamplerYcbcrConversionInfo"; + case StructureType::eBindImagePlaneMemoryInfo: return "BindImagePlaneMemoryInfo"; + case StructureType::eImagePlaneMemoryRequirementsInfo: return "ImagePlaneMemoryRequirementsInfo"; + case StructureType::ePhysicalDeviceSamplerYcbcrConversionFeatures: return "PhysicalDeviceSamplerYcbcrConversionFeatures"; + case StructureType::eSamplerYcbcrConversionImageFormatProperties: return "SamplerYcbcrConversionImageFormatProperties"; + case StructureType::eDescriptorUpdateTemplateCreateInfo: return "DescriptorUpdateTemplateCreateInfo"; + case StructureType::ePhysicalDeviceExternalImageFormatInfo: return "PhysicalDeviceExternalImageFormatInfo"; + case StructureType::eExternalImageFormatProperties: return "ExternalImageFormatProperties"; + case StructureType::ePhysicalDeviceExternalBufferInfo: return "PhysicalDeviceExternalBufferInfo"; + case StructureType::eExternalBufferProperties: return "ExternalBufferProperties"; + case StructureType::ePhysicalDeviceIdProperties: return "PhysicalDeviceIdProperties"; + case StructureType::eExternalMemoryBufferCreateInfo: return "ExternalMemoryBufferCreateInfo"; + case StructureType::eExternalMemoryImageCreateInfo: return "ExternalMemoryImageCreateInfo"; + case StructureType::eExportMemoryAllocateInfo: return "ExportMemoryAllocateInfo"; + case StructureType::ePhysicalDeviceExternalFenceInfo: return "PhysicalDeviceExternalFenceInfo"; + case StructureType::eExternalFenceProperties: return "ExternalFenceProperties"; + case StructureType::eExportFenceCreateInfo: return "ExportFenceCreateInfo"; + case StructureType::eExportSemaphoreCreateInfo: return "ExportSemaphoreCreateInfo"; + case StructureType::ePhysicalDeviceExternalSemaphoreInfo: return "PhysicalDeviceExternalSemaphoreInfo"; + case StructureType::eExternalSemaphoreProperties: return "ExternalSemaphoreProperties"; + case StructureType::ePhysicalDeviceMaintenance3Properties: return "PhysicalDeviceMaintenance3Properties"; + case StructureType::eDescriptorSetLayoutSupport: return "DescriptorSetLayoutSupport"; + case StructureType::ePhysicalDeviceShaderDrawParameterFeatures: return "PhysicalDeviceShaderDrawParameterFeatures"; + case StructureType::eSwapchainCreateInfoKHR: return "SwapchainCreateInfoKHR"; + case StructureType::ePresentInfoKHR: return "PresentInfoKHR"; + case StructureType::eDeviceGroupPresentCapabilitiesKHR: return "DeviceGroupPresentCapabilitiesKHR"; + case StructureType::eImageSwapchainCreateInfoKHR: return "ImageSwapchainCreateInfoKHR"; + case StructureType::eBindImageMemorySwapchainInfoKHR: return "BindImageMemorySwapchainInfoKHR"; + case StructureType::eAcquireNextImageInfoKHR: return "AcquireNextImageInfoKHR"; + case StructureType::eDeviceGroupPresentInfoKHR: return "DeviceGroupPresentInfoKHR"; + case StructureType::eDeviceGroupSwapchainCreateInfoKHR: return "DeviceGroupSwapchainCreateInfoKHR"; + case StructureType::eDisplayModeCreateInfoKHR: return "DisplayModeCreateInfoKHR"; + case StructureType::eDisplaySurfaceCreateInfoKHR: return "DisplaySurfaceCreateInfoKHR"; + case StructureType::eDisplayPresentInfoKHR: return "DisplayPresentInfoKHR"; + case StructureType::eXlibSurfaceCreateInfoKHR: return "XlibSurfaceCreateInfoKHR"; + case StructureType::eXcbSurfaceCreateInfoKHR: return "XcbSurfaceCreateInfoKHR"; + case StructureType::eWaylandSurfaceCreateInfoKHR: return "WaylandSurfaceCreateInfoKHR"; + case StructureType::eAndroidSurfaceCreateInfoKHR: return "AndroidSurfaceCreateInfoKHR"; + case StructureType::eWin32SurfaceCreateInfoKHR: return "Win32SurfaceCreateInfoKHR"; + case StructureType::eDebugReportCallbackCreateInfoEXT: return "DebugReportCallbackCreateInfoEXT"; + case StructureType::ePipelineRasterizationStateRasterizationOrderAMD: return "PipelineRasterizationStateRasterizationOrderAMD"; + case StructureType::eDebugMarkerObjectNameInfoEXT: return "DebugMarkerObjectNameInfoEXT"; + case StructureType::eDebugMarkerObjectTagInfoEXT: return "DebugMarkerObjectTagInfoEXT"; + case StructureType::eDebugMarkerMarkerInfoEXT: return "DebugMarkerMarkerInfoEXT"; + case StructureType::eDedicatedAllocationImageCreateInfoNV: return "DedicatedAllocationImageCreateInfoNV"; + case StructureType::eDedicatedAllocationBufferCreateInfoNV: return "DedicatedAllocationBufferCreateInfoNV"; + case StructureType::eDedicatedAllocationMemoryAllocateInfoNV: return "DedicatedAllocationMemoryAllocateInfoNV"; + case StructureType::ePhysicalDeviceTransformFeedbackFeaturesEXT: return "PhysicalDeviceTransformFeedbackFeaturesEXT"; + case StructureType::ePhysicalDeviceTransformFeedbackPropertiesEXT: return "PhysicalDeviceTransformFeedbackPropertiesEXT"; + case StructureType::ePipelineRasterizationStateStreamCreateInfoEXT: return "PipelineRasterizationStateStreamCreateInfoEXT"; + case StructureType::eTextureLodGatherFormatPropertiesAMD: return "TextureLodGatherFormatPropertiesAMD"; + case StructureType::ePhysicalDeviceCornerSampledImageFeaturesNV: return "PhysicalDeviceCornerSampledImageFeaturesNV"; + case StructureType::eExternalMemoryImageCreateInfoNV: return "ExternalMemoryImageCreateInfoNV"; + case StructureType::eExportMemoryAllocateInfoNV: return "ExportMemoryAllocateInfoNV"; + case StructureType::eImportMemoryWin32HandleInfoNV: return "ImportMemoryWin32HandleInfoNV"; + case StructureType::eExportMemoryWin32HandleInfoNV: return "ExportMemoryWin32HandleInfoNV"; + case StructureType::eWin32KeyedMutexAcquireReleaseInfoNV: return "Win32KeyedMutexAcquireReleaseInfoNV"; + case StructureType::eValidationFlagsEXT: return "ValidationFlagsEXT"; + case StructureType::eViSurfaceCreateInfoNN: return "ViSurfaceCreateInfoNN"; + case StructureType::eImageViewAstcDecodeModeEXT: return "ImageViewAstcDecodeModeEXT"; + case StructureType::ePhysicalDeviceAstcDecodeFeaturesEXT: return "PhysicalDeviceAstcDecodeFeaturesEXT"; + case StructureType::eImportMemoryWin32HandleInfoKHR: return "ImportMemoryWin32HandleInfoKHR"; + case StructureType::eExportMemoryWin32HandleInfoKHR: return "ExportMemoryWin32HandleInfoKHR"; + case StructureType::eMemoryWin32HandlePropertiesKHR: return "MemoryWin32HandlePropertiesKHR"; + case StructureType::eMemoryGetWin32HandleInfoKHR: return "MemoryGetWin32HandleInfoKHR"; + case StructureType::eImportMemoryFdInfoKHR: return "ImportMemoryFdInfoKHR"; + case StructureType::eMemoryFdPropertiesKHR: return "MemoryFdPropertiesKHR"; + case StructureType::eMemoryGetFdInfoKHR: return "MemoryGetFdInfoKHR"; + case StructureType::eWin32KeyedMutexAcquireReleaseInfoKHR: return "Win32KeyedMutexAcquireReleaseInfoKHR"; + case StructureType::eImportSemaphoreWin32HandleInfoKHR: return "ImportSemaphoreWin32HandleInfoKHR"; + case StructureType::eExportSemaphoreWin32HandleInfoKHR: return "ExportSemaphoreWin32HandleInfoKHR"; + case StructureType::eD3D12FenceSubmitInfoKHR: return "D3D12FenceSubmitInfoKHR"; + case StructureType::eSemaphoreGetWin32HandleInfoKHR: return "SemaphoreGetWin32HandleInfoKHR"; + case StructureType::eImportSemaphoreFdInfoKHR: return "ImportSemaphoreFdInfoKHR"; + case StructureType::eSemaphoreGetFdInfoKHR: return "SemaphoreGetFdInfoKHR"; + case StructureType::ePhysicalDevicePushDescriptorPropertiesKHR: return "PhysicalDevicePushDescriptorPropertiesKHR"; + case StructureType::eCommandBufferInheritanceConditionalRenderingInfoEXT: return "CommandBufferInheritanceConditionalRenderingInfoEXT"; + case StructureType::ePhysicalDeviceConditionalRenderingFeaturesEXT: return "PhysicalDeviceConditionalRenderingFeaturesEXT"; + case StructureType::eConditionalRenderingBeginInfoEXT: return "ConditionalRenderingBeginInfoEXT"; + case StructureType::ePresentRegionsKHR: return "PresentRegionsKHR"; + case StructureType::eObjectTableCreateInfoNVX: return "ObjectTableCreateInfoNVX"; + case StructureType::eIndirectCommandsLayoutCreateInfoNVX: return "IndirectCommandsLayoutCreateInfoNVX"; + case StructureType::eCmdProcessCommandsInfoNVX: return "CmdProcessCommandsInfoNVX"; + case StructureType::eCmdReserveSpaceForCommandsInfoNVX: return "CmdReserveSpaceForCommandsInfoNVX"; + case StructureType::eDeviceGeneratedCommandsLimitsNVX: return "DeviceGeneratedCommandsLimitsNVX"; + case StructureType::eDeviceGeneratedCommandsFeaturesNVX: return "DeviceGeneratedCommandsFeaturesNVX"; + case StructureType::ePipelineViewportWScalingStateCreateInfoNV: return "PipelineViewportWScalingStateCreateInfoNV"; + case StructureType::eSurfaceCapabilities2EXT: return "SurfaceCapabilities2EXT"; + case StructureType::eDisplayPowerInfoEXT: return "DisplayPowerInfoEXT"; + case StructureType::eDeviceEventInfoEXT: return "DeviceEventInfoEXT"; + case StructureType::eDisplayEventInfoEXT: return "DisplayEventInfoEXT"; + case StructureType::eSwapchainCounterCreateInfoEXT: return "SwapchainCounterCreateInfoEXT"; + case StructureType::ePresentTimesInfoGOOGLE: return "PresentTimesInfoGOOGLE"; + case StructureType::ePhysicalDeviceMultiviewPerViewAttributesPropertiesNVX: return "PhysicalDeviceMultiviewPerViewAttributesPropertiesNVX"; + case StructureType::ePipelineViewportSwizzleStateCreateInfoNV: return "PipelineViewportSwizzleStateCreateInfoNV"; + case StructureType::ePhysicalDeviceDiscardRectanglePropertiesEXT: return "PhysicalDeviceDiscardRectanglePropertiesEXT"; + case StructureType::ePipelineDiscardRectangleStateCreateInfoEXT: return "PipelineDiscardRectangleStateCreateInfoEXT"; + case StructureType::ePhysicalDeviceConservativeRasterizationPropertiesEXT: return "PhysicalDeviceConservativeRasterizationPropertiesEXT"; + case StructureType::ePipelineRasterizationConservativeStateCreateInfoEXT: return "PipelineRasterizationConservativeStateCreateInfoEXT"; + case StructureType::eHdrMetadataEXT: return "HdrMetadataEXT"; + case StructureType::eAttachmentDescription2KHR: return "AttachmentDescription2KHR"; + case StructureType::eAttachmentReference2KHR: return "AttachmentReference2KHR"; + case StructureType::eSubpassDescription2KHR: return "SubpassDescription2KHR"; + case StructureType::eSubpassDependency2KHR: return "SubpassDependency2KHR"; + case StructureType::eRenderPassCreateInfo2KHR: return "RenderPassCreateInfo2KHR"; + case StructureType::eSubpassBeginInfoKHR: return "SubpassBeginInfoKHR"; + case StructureType::eSubpassEndInfoKHR: return "SubpassEndInfoKHR"; + case StructureType::eSharedPresentSurfaceCapabilitiesKHR: return "SharedPresentSurfaceCapabilitiesKHR"; + case StructureType::eImportFenceWin32HandleInfoKHR: return "ImportFenceWin32HandleInfoKHR"; + case StructureType::eExportFenceWin32HandleInfoKHR: return "ExportFenceWin32HandleInfoKHR"; + case StructureType::eFenceGetWin32HandleInfoKHR: return "FenceGetWin32HandleInfoKHR"; + case StructureType::eImportFenceFdInfoKHR: return "ImportFenceFdInfoKHR"; + case StructureType::eFenceGetFdInfoKHR: return "FenceGetFdInfoKHR"; + case StructureType::ePhysicalDeviceSurfaceInfo2KHR: return "PhysicalDeviceSurfaceInfo2KHR"; + case StructureType::eSurfaceCapabilities2KHR: return "SurfaceCapabilities2KHR"; + case StructureType::eSurfaceFormat2KHR: return "SurfaceFormat2KHR"; + case StructureType::eDisplayProperties2KHR: return "DisplayProperties2KHR"; + case StructureType::eDisplayPlaneProperties2KHR: return "DisplayPlaneProperties2KHR"; + case StructureType::eDisplayModeProperties2KHR: return "DisplayModeProperties2KHR"; + case StructureType::eDisplayPlaneInfo2KHR: return "DisplayPlaneInfo2KHR"; + case StructureType::eDisplayPlaneCapabilities2KHR: return "DisplayPlaneCapabilities2KHR"; + case StructureType::eIosSurfaceCreateInfoMVK: return "IosSurfaceCreateInfoMVK"; + case StructureType::eMacosSurfaceCreateInfoMVK: return "MacosSurfaceCreateInfoMVK"; + case StructureType::eDebugUtilsObjectNameInfoEXT: return "DebugUtilsObjectNameInfoEXT"; + case StructureType::eDebugUtilsObjectTagInfoEXT: return "DebugUtilsObjectTagInfoEXT"; + case StructureType::eDebugUtilsLabelEXT: return "DebugUtilsLabelEXT"; + case StructureType::eDebugUtilsMessengerCallbackDataEXT: return "DebugUtilsMessengerCallbackDataEXT"; + case StructureType::eDebugUtilsMessengerCreateInfoEXT: return "DebugUtilsMessengerCreateInfoEXT"; + case StructureType::eAndroidHardwareBufferUsageANDROID: return "AndroidHardwareBufferUsageANDROID"; + case StructureType::eAndroidHardwareBufferPropertiesANDROID: return "AndroidHardwareBufferPropertiesANDROID"; + case StructureType::eAndroidHardwareBufferFormatPropertiesANDROID: return "AndroidHardwareBufferFormatPropertiesANDROID"; + case StructureType::eImportAndroidHardwareBufferInfoANDROID: return "ImportAndroidHardwareBufferInfoANDROID"; + case StructureType::eMemoryGetAndroidHardwareBufferInfoANDROID: return "MemoryGetAndroidHardwareBufferInfoANDROID"; + case StructureType::eExternalFormatANDROID: return "ExternalFormatANDROID"; + case StructureType::ePhysicalDeviceSamplerFilterMinmaxPropertiesEXT: return "PhysicalDeviceSamplerFilterMinmaxPropertiesEXT"; + case StructureType::eSamplerReductionModeCreateInfoEXT: return "SamplerReductionModeCreateInfoEXT"; + case StructureType::ePhysicalDeviceInlineUniformBlockFeaturesEXT: return "PhysicalDeviceInlineUniformBlockFeaturesEXT"; + case StructureType::ePhysicalDeviceInlineUniformBlockPropertiesEXT: return "PhysicalDeviceInlineUniformBlockPropertiesEXT"; + case StructureType::eWriteDescriptorSetInlineUniformBlockEXT: return "WriteDescriptorSetInlineUniformBlockEXT"; + case StructureType::eDescriptorPoolInlineUniformBlockCreateInfoEXT: return "DescriptorPoolInlineUniformBlockCreateInfoEXT"; + case StructureType::eSampleLocationsInfoEXT: return "SampleLocationsInfoEXT"; + case StructureType::eRenderPassSampleLocationsBeginInfoEXT: return "RenderPassSampleLocationsBeginInfoEXT"; + case StructureType::ePipelineSampleLocationsStateCreateInfoEXT: return "PipelineSampleLocationsStateCreateInfoEXT"; + case StructureType::ePhysicalDeviceSampleLocationsPropertiesEXT: return "PhysicalDeviceSampleLocationsPropertiesEXT"; + case StructureType::eMultisamplePropertiesEXT: return "MultisamplePropertiesEXT"; + case StructureType::eImageFormatListCreateInfoKHR: return "ImageFormatListCreateInfoKHR"; + case StructureType::ePhysicalDeviceBlendOperationAdvancedFeaturesEXT: return "PhysicalDeviceBlendOperationAdvancedFeaturesEXT"; + case StructureType::ePhysicalDeviceBlendOperationAdvancedPropertiesEXT: return "PhysicalDeviceBlendOperationAdvancedPropertiesEXT"; + case StructureType::ePipelineColorBlendAdvancedStateCreateInfoEXT: return "PipelineColorBlendAdvancedStateCreateInfoEXT"; + case StructureType::ePipelineCoverageToColorStateCreateInfoNV: return "PipelineCoverageToColorStateCreateInfoNV"; + case StructureType::ePipelineCoverageModulationStateCreateInfoNV: return "PipelineCoverageModulationStateCreateInfoNV"; + case StructureType::eDrmFormatModifierPropertiesListEXT: return "DrmFormatModifierPropertiesListEXT"; + case StructureType::eDrmFormatModifierPropertiesEXT: return "DrmFormatModifierPropertiesEXT"; + case StructureType::ePhysicalDeviceImageDrmFormatModifierInfoEXT: return "PhysicalDeviceImageDrmFormatModifierInfoEXT"; + case StructureType::eImageDrmFormatModifierListCreateInfoEXT: return "ImageDrmFormatModifierListCreateInfoEXT"; + case StructureType::eImageDrmFormatModifierExplicitCreateInfoEXT: return "ImageDrmFormatModifierExplicitCreateInfoEXT"; + case StructureType::eImageDrmFormatModifierPropertiesEXT: return "ImageDrmFormatModifierPropertiesEXT"; + case StructureType::eValidationCacheCreateInfoEXT: return "ValidationCacheCreateInfoEXT"; + case StructureType::eShaderModuleValidationCacheCreateInfoEXT: return "ShaderModuleValidationCacheCreateInfoEXT"; + case StructureType::eDescriptorSetLayoutBindingFlagsCreateInfoEXT: return "DescriptorSetLayoutBindingFlagsCreateInfoEXT"; + case StructureType::ePhysicalDeviceDescriptorIndexingFeaturesEXT: return "PhysicalDeviceDescriptorIndexingFeaturesEXT"; + case StructureType::ePhysicalDeviceDescriptorIndexingPropertiesEXT: return "PhysicalDeviceDescriptorIndexingPropertiesEXT"; + case StructureType::eDescriptorSetVariableDescriptorCountAllocateInfoEXT: return "DescriptorSetVariableDescriptorCountAllocateInfoEXT"; + case StructureType::eDescriptorSetVariableDescriptorCountLayoutSupportEXT: return "DescriptorSetVariableDescriptorCountLayoutSupportEXT"; + case StructureType::ePipelineViewportShadingRateImageStateCreateInfoNV: return "PipelineViewportShadingRateImageStateCreateInfoNV"; + case StructureType::ePhysicalDeviceShadingRateImageFeaturesNV: return "PhysicalDeviceShadingRateImageFeaturesNV"; + case StructureType::ePhysicalDeviceShadingRateImagePropertiesNV: return "PhysicalDeviceShadingRateImagePropertiesNV"; + case StructureType::ePipelineViewportCoarseSampleOrderStateCreateInfoNV: return "PipelineViewportCoarseSampleOrderStateCreateInfoNV"; + case StructureType::eRayTracingPipelineCreateInfoNV: return "RayTracingPipelineCreateInfoNV"; + case StructureType::eAccelerationStructureCreateInfoNV: return "AccelerationStructureCreateInfoNV"; + case StructureType::eGeometryNV: return "GeometryNV"; + case StructureType::eGeometryTrianglesNV: return "GeometryTrianglesNV"; + case StructureType::eGeometryAabbNV: return "GeometryAabbNV"; + case StructureType::eBindAccelerationStructureMemoryInfoNV: return "BindAccelerationStructureMemoryInfoNV"; + case StructureType::eWriteDescriptorSetAccelerationStructureNV: return "WriteDescriptorSetAccelerationStructureNV"; + case StructureType::eAccelerationStructureMemoryRequirementsInfoNV: return "AccelerationStructureMemoryRequirementsInfoNV"; + case StructureType::ePhysicalDeviceRayTracingPropertiesNV: return "PhysicalDeviceRayTracingPropertiesNV"; + case StructureType::eRayTracingShaderGroupCreateInfoNV: return "RayTracingShaderGroupCreateInfoNV"; + case StructureType::eAccelerationStructureInfoNV: return "AccelerationStructureInfoNV"; + case StructureType::ePhysicalDeviceRepresentativeFragmentTestFeaturesNV: return "PhysicalDeviceRepresentativeFragmentTestFeaturesNV"; + case StructureType::ePipelineRepresentativeFragmentTestStateCreateInfoNV: return "PipelineRepresentativeFragmentTestStateCreateInfoNV"; + case StructureType::eDeviceQueueGlobalPriorityCreateInfoEXT: return "DeviceQueueGlobalPriorityCreateInfoEXT"; + case StructureType::ePhysicalDevice8BitStorageFeaturesKHR: return "PhysicalDevice8BitStorageFeaturesKHR"; + case StructureType::eImportMemoryHostPointerInfoEXT: return "ImportMemoryHostPointerInfoEXT"; + case StructureType::eMemoryHostPointerPropertiesEXT: return "MemoryHostPointerPropertiesEXT"; + case StructureType::ePhysicalDeviceExternalMemoryHostPropertiesEXT: return "PhysicalDeviceExternalMemoryHostPropertiesEXT"; + case StructureType::ePhysicalDeviceShaderAtomicInt64FeaturesKHR: return "PhysicalDeviceShaderAtomicInt64FeaturesKHR"; + case StructureType::eCalibratedTimestampInfoEXT: return "CalibratedTimestampInfoEXT"; + case StructureType::ePhysicalDeviceShaderCorePropertiesAMD: return "PhysicalDeviceShaderCorePropertiesAMD"; + case StructureType::eDeviceMemoryOverallocationCreateInfoAMD: return "DeviceMemoryOverallocationCreateInfoAMD"; + case StructureType::ePhysicalDeviceVertexAttributeDivisorPropertiesEXT: return "PhysicalDeviceVertexAttributeDivisorPropertiesEXT"; + case StructureType::ePipelineVertexInputDivisorStateCreateInfoEXT: return "PipelineVertexInputDivisorStateCreateInfoEXT"; + case StructureType::ePhysicalDeviceVertexAttributeDivisorFeaturesEXT: return "PhysicalDeviceVertexAttributeDivisorFeaturesEXT"; + case StructureType::ePhysicalDeviceDriverPropertiesKHR: return "PhysicalDeviceDriverPropertiesKHR"; + case StructureType::ePhysicalDeviceComputeShaderDerivativesFeaturesNV: return "PhysicalDeviceComputeShaderDerivativesFeaturesNV"; + case StructureType::ePhysicalDeviceMeshShaderFeaturesNV: return "PhysicalDeviceMeshShaderFeaturesNV"; + case StructureType::ePhysicalDeviceMeshShaderPropertiesNV: return "PhysicalDeviceMeshShaderPropertiesNV"; + case StructureType::ePhysicalDeviceFragmentShaderBarycentricFeaturesNV: return "PhysicalDeviceFragmentShaderBarycentricFeaturesNV"; + case StructureType::ePhysicalDeviceShaderImageFootprintFeaturesNV: return "PhysicalDeviceShaderImageFootprintFeaturesNV"; + case StructureType::ePipelineViewportExclusiveScissorStateCreateInfoNV: return "PipelineViewportExclusiveScissorStateCreateInfoNV"; + case StructureType::ePhysicalDeviceExclusiveScissorFeaturesNV: return "PhysicalDeviceExclusiveScissorFeaturesNV"; + case StructureType::eCheckpointDataNV: return "CheckpointDataNV"; + case StructureType::eQueueFamilyCheckpointPropertiesNV: return "QueueFamilyCheckpointPropertiesNV"; + case StructureType::ePhysicalDeviceVulkanMemoryModelFeaturesKHR: return "PhysicalDeviceVulkanMemoryModelFeaturesKHR"; + case StructureType::ePhysicalDevicePciBusInfoPropertiesEXT: return "PhysicalDevicePciBusInfoPropertiesEXT"; + case StructureType::eImagepipeSurfaceCreateInfoFUCHSIA: return "ImagepipeSurfaceCreateInfoFUCHSIA"; + default: return "invalid"; + } + } + + VULKAN_HPP_INLINE std::string to_string(SubpassContents value) + { + switch (value) + { + case SubpassContents::eInline: return "Inline"; + case SubpassContents::eSecondaryCommandBuffers: return "SecondaryCommandBuffers"; + default: return "invalid"; + } + } + + VULKAN_HPP_INLINE std::string to_string(DynamicState value) + { + switch (value) + { + case DynamicState::eViewport: return "Viewport"; + case DynamicState::eScissor: return "Scissor"; + case DynamicState::eLineWidth: return "LineWidth"; + case DynamicState::eDepthBias: return "DepthBias"; + case DynamicState::eBlendConstants: return "BlendConstants"; + case DynamicState::eDepthBounds: return "DepthBounds"; + case DynamicState::eStencilCompareMask: return "StencilCompareMask"; + case DynamicState::eStencilWriteMask: return "StencilWriteMask"; + case DynamicState::eStencilReference: return "StencilReference"; + case DynamicState::eViewportWScalingNV: return "ViewportWScalingNV"; + case DynamicState::eDiscardRectangleEXT: return "DiscardRectangleEXT"; + case DynamicState::eSampleLocationsEXT: return "SampleLocationsEXT"; + case DynamicState::eViewportShadingRatePaletteNV: return "ViewportShadingRatePaletteNV"; + case DynamicState::eViewportCoarseSampleOrderNV: return "ViewportCoarseSampleOrderNV"; + case DynamicState::eExclusiveScissorNV: return "ExclusiveScissorNV"; + default: return "invalid"; + } + } + + VULKAN_HPP_INLINE std::string to_string(DescriptorUpdateTemplateType value) + { + switch (value) + { + case DescriptorUpdateTemplateType::eDescriptorSet: return "DescriptorSet"; + case DescriptorUpdateTemplateType::ePushDescriptorsKHR: return "PushDescriptorsKHR"; + default: return "invalid"; + } + } + + VULKAN_HPP_INLINE std::string to_string(ObjectType value) + { + switch (value) + { + case ObjectType::eUnknown: return "Unknown"; + case ObjectType::eInstance: return "Instance"; + case ObjectType::ePhysicalDevice: return "PhysicalDevice"; + case ObjectType::eDevice: return "Device"; + case ObjectType::eQueue: return "Queue"; + case ObjectType::eSemaphore: return "Semaphore"; + case ObjectType::eCommandBuffer: return "CommandBuffer"; + case ObjectType::eFence: return "Fence"; + case ObjectType::eDeviceMemory: return "DeviceMemory"; + case ObjectType::eBuffer: return "Buffer"; + case ObjectType::eImage: return "Image"; + case ObjectType::eEvent: return "Event"; + case ObjectType::eQueryPool: return "QueryPool"; + case ObjectType::eBufferView: return "BufferView"; + case ObjectType::eImageView: return "ImageView"; + case ObjectType::eShaderModule: return "ShaderModule"; + case ObjectType::ePipelineCache: return "PipelineCache"; + case ObjectType::ePipelineLayout: return "PipelineLayout"; + case ObjectType::eRenderPass: return "RenderPass"; + case ObjectType::ePipeline: return "Pipeline"; + case ObjectType::eDescriptorSetLayout: return "DescriptorSetLayout"; + case ObjectType::eSampler: return "Sampler"; + case ObjectType::eDescriptorPool: return "DescriptorPool"; + case ObjectType::eDescriptorSet: return "DescriptorSet"; + case ObjectType::eFramebuffer: return "Framebuffer"; + case ObjectType::eCommandPool: return "CommandPool"; + case ObjectType::eSamplerYcbcrConversion: return "SamplerYcbcrConversion"; + case ObjectType::eDescriptorUpdateTemplate: return "DescriptorUpdateTemplate"; + case ObjectType::eSurfaceKHR: return "SurfaceKHR"; + case ObjectType::eSwapchainKHR: return "SwapchainKHR"; + case ObjectType::eDisplayKHR: return "DisplayKHR"; + case ObjectType::eDisplayModeKHR: return "DisplayModeKHR"; + case ObjectType::eDebugReportCallbackEXT: return "DebugReportCallbackEXT"; + case ObjectType::eObjectTableNVX: return "ObjectTableNVX"; + case ObjectType::eIndirectCommandsLayoutNVX: return "IndirectCommandsLayoutNVX"; + case ObjectType::eDebugUtilsMessengerEXT: return "DebugUtilsMessengerEXT"; + case ObjectType::eValidationCacheEXT: return "ValidationCacheEXT"; + case ObjectType::eAccelerationStructureNV: return "AccelerationStructureNV"; + default: return "invalid"; + } + } + + VULKAN_HPP_INLINE std::string to_string(QueueFlagBits value) + { + switch (value) + { + case QueueFlagBits::eGraphics: return "Graphics"; + case QueueFlagBits::eCompute: return "Compute"; + case QueueFlagBits::eTransfer: return "Transfer"; + case QueueFlagBits::eSparseBinding: return "SparseBinding"; + case QueueFlagBits::eProtected: return "Protected"; + default: return "invalid"; + } + } + + VULKAN_HPP_INLINE std::string to_string(QueueFlags value) + { + if (!value) return "{}"; + std::string result; + if (value & QueueFlagBits::eGraphics) result += "Graphics | "; + if (value & QueueFlagBits::eCompute) result += "Compute | "; + if (value & QueueFlagBits::eTransfer) result += "Transfer | "; + if (value & QueueFlagBits::eSparseBinding) result += "SparseBinding | "; + if (value & QueueFlagBits::eProtected) result += "Protected | "; + return "{" + result.substr(0, result.size() - 3) + "}"; + } + + VULKAN_HPP_INLINE std::string to_string(DeviceQueueCreateFlagBits value) + { + switch (value) + { + case DeviceQueueCreateFlagBits::eProtected: return "Protected"; + default: return "invalid"; + } + } + + VULKAN_HPP_INLINE std::string to_string(DeviceQueueCreateFlags value) + { + if (!value) return "{}"; + std::string result; + if (value & DeviceQueueCreateFlagBits::eProtected) result += "Protected | "; + return "{" + result.substr(0, result.size() - 3) + "}"; + } + + VULKAN_HPP_INLINE std::string to_string(MemoryPropertyFlagBits value) + { + switch (value) + { + case MemoryPropertyFlagBits::eDeviceLocal: return "DeviceLocal"; + case MemoryPropertyFlagBits::eHostVisible: return "HostVisible"; + case MemoryPropertyFlagBits::eHostCoherent: return "HostCoherent"; + case MemoryPropertyFlagBits::eHostCached: return "HostCached"; + case MemoryPropertyFlagBits::eLazilyAllocated: return "LazilyAllocated"; + case MemoryPropertyFlagBits::eProtected: return "Protected"; + default: return "invalid"; + } + } + + VULKAN_HPP_INLINE std::string to_string(MemoryPropertyFlags value) + { + if (!value) return "{}"; + std::string result; + if (value & MemoryPropertyFlagBits::eDeviceLocal) result += "DeviceLocal | "; + if (value & MemoryPropertyFlagBits::eHostVisible) result += "HostVisible | "; + if (value & MemoryPropertyFlagBits::eHostCoherent) result += "HostCoherent | "; + if (value & MemoryPropertyFlagBits::eHostCached) result += "HostCached | "; + if (value & MemoryPropertyFlagBits::eLazilyAllocated) result += "LazilyAllocated | "; + if (value & MemoryPropertyFlagBits::eProtected) result += "Protected | "; + return "{" + result.substr(0, result.size() - 3) + "}"; + } + + VULKAN_HPP_INLINE std::string to_string(MemoryHeapFlagBits value) + { + switch (value) + { + case MemoryHeapFlagBits::eDeviceLocal: return "DeviceLocal"; + case MemoryHeapFlagBits::eMultiInstance: return "MultiInstance"; + default: return "invalid"; + } + } + + VULKAN_HPP_INLINE std::string to_string(MemoryHeapFlags value) + { + if (!value) return "{}"; + std::string result; + if (value & MemoryHeapFlagBits::eDeviceLocal) result += "DeviceLocal | "; + if (value & MemoryHeapFlagBits::eMultiInstance) result += "MultiInstance | "; + return "{" + result.substr(0, result.size() - 3) + "}"; + } + + VULKAN_HPP_INLINE std::string to_string(AccessFlagBits value) + { + switch (value) + { + case AccessFlagBits::eIndirectCommandRead: return "IndirectCommandRead"; + case AccessFlagBits::eIndexRead: return "IndexRead"; + case AccessFlagBits::eVertexAttributeRead: return "VertexAttributeRead"; + case AccessFlagBits::eUniformRead: return "UniformRead"; + case AccessFlagBits::eInputAttachmentRead: return "InputAttachmentRead"; + case AccessFlagBits::eShaderRead: return "ShaderRead"; + case AccessFlagBits::eShaderWrite: return "ShaderWrite"; + case AccessFlagBits::eColorAttachmentRead: return "ColorAttachmentRead"; + case AccessFlagBits::eColorAttachmentWrite: return "ColorAttachmentWrite"; + case AccessFlagBits::eDepthStencilAttachmentRead: return "DepthStencilAttachmentRead"; + case AccessFlagBits::eDepthStencilAttachmentWrite: return "DepthStencilAttachmentWrite"; + case AccessFlagBits::eTransferRead: return "TransferRead"; + case AccessFlagBits::eTransferWrite: return "TransferWrite"; + case AccessFlagBits::eHostRead: return "HostRead"; + case AccessFlagBits::eHostWrite: return "HostWrite"; + case AccessFlagBits::eMemoryRead: return "MemoryRead"; + case AccessFlagBits::eMemoryWrite: return "MemoryWrite"; + case AccessFlagBits::eTransformFeedbackWriteEXT: return "TransformFeedbackWriteEXT"; + case AccessFlagBits::eTransformFeedbackCounterReadEXT: return "TransformFeedbackCounterReadEXT"; + case AccessFlagBits::eTransformFeedbackCounterWriteEXT: return "TransformFeedbackCounterWriteEXT"; + case AccessFlagBits::eConditionalRenderingReadEXT: return "ConditionalRenderingReadEXT"; + case AccessFlagBits::eCommandProcessReadNVX: return "CommandProcessReadNVX"; + case AccessFlagBits::eCommandProcessWriteNVX: return "CommandProcessWriteNVX"; + case AccessFlagBits::eColorAttachmentReadNoncoherentEXT: return "ColorAttachmentReadNoncoherentEXT"; + case AccessFlagBits::eShadingRateImageReadNV: return "ShadingRateImageReadNV"; + case AccessFlagBits::eAccelerationStructureReadNV: return "AccelerationStructureReadNV"; + case AccessFlagBits::eAccelerationStructureWriteNV: return "AccelerationStructureWriteNV"; + default: return "invalid"; + } + } + + VULKAN_HPP_INLINE std::string to_string(AccessFlags value) + { + if (!value) return "{}"; + std::string result; + if (value & AccessFlagBits::eIndirectCommandRead) result += "IndirectCommandRead | "; + if (value & AccessFlagBits::eIndexRead) result += "IndexRead | "; + if (value & AccessFlagBits::eVertexAttributeRead) result += "VertexAttributeRead | "; + if (value & AccessFlagBits::eUniformRead) result += "UniformRead | "; + if (value & AccessFlagBits::eInputAttachmentRead) result += "InputAttachmentRead | "; + if (value & AccessFlagBits::eShaderRead) result += "ShaderRead | "; + if (value & AccessFlagBits::eShaderWrite) result += "ShaderWrite | "; + if (value & AccessFlagBits::eColorAttachmentRead) result += "ColorAttachmentRead | "; + if (value & AccessFlagBits::eColorAttachmentWrite) result += "ColorAttachmentWrite | "; + if (value & AccessFlagBits::eDepthStencilAttachmentRead) result += "DepthStencilAttachmentRead | "; + if (value & AccessFlagBits::eDepthStencilAttachmentWrite) result += "DepthStencilAttachmentWrite | "; + if (value & AccessFlagBits::eTransferRead) result += "TransferRead | "; + if (value & AccessFlagBits::eTransferWrite) result += "TransferWrite | "; + if (value & AccessFlagBits::eHostRead) result += "HostRead | "; + if (value & AccessFlagBits::eHostWrite) result += "HostWrite | "; + if (value & AccessFlagBits::eMemoryRead) result += "MemoryRead | "; + if (value & AccessFlagBits::eMemoryWrite) result += "MemoryWrite | "; + if (value & AccessFlagBits::eTransformFeedbackWriteEXT) result += "TransformFeedbackWriteEXT | "; + if (value & AccessFlagBits::eTransformFeedbackCounterReadEXT) result += "TransformFeedbackCounterReadEXT | "; + if (value & AccessFlagBits::eTransformFeedbackCounterWriteEXT) result += "TransformFeedbackCounterWriteEXT | "; + if (value & AccessFlagBits::eConditionalRenderingReadEXT) result += "ConditionalRenderingReadEXT | "; + if (value & AccessFlagBits::eCommandProcessReadNVX) result += "CommandProcessReadNVX | "; + if (value & AccessFlagBits::eCommandProcessWriteNVX) result += "CommandProcessWriteNVX | "; + if (value & AccessFlagBits::eColorAttachmentReadNoncoherentEXT) result += "ColorAttachmentReadNoncoherentEXT | "; + if (value & AccessFlagBits::eShadingRateImageReadNV) result += "ShadingRateImageReadNV | "; + if (value & AccessFlagBits::eAccelerationStructureReadNV) result += "AccelerationStructureReadNV | "; + if (value & AccessFlagBits::eAccelerationStructureWriteNV) result += "AccelerationStructureWriteNV | "; + return "{" + result.substr(0, result.size() - 3) + "}"; + } + + VULKAN_HPP_INLINE std::string to_string(BufferUsageFlagBits value) + { + switch (value) + { + case BufferUsageFlagBits::eTransferSrc: return "TransferSrc"; + case BufferUsageFlagBits::eTransferDst: return "TransferDst"; + case BufferUsageFlagBits::eUniformTexelBuffer: return "UniformTexelBuffer"; + case BufferUsageFlagBits::eStorageTexelBuffer: return "StorageTexelBuffer"; + case BufferUsageFlagBits::eUniformBuffer: return "UniformBuffer"; + case BufferUsageFlagBits::eStorageBuffer: return "StorageBuffer"; + case BufferUsageFlagBits::eIndexBuffer: return "IndexBuffer"; + case BufferUsageFlagBits::eVertexBuffer: return "VertexBuffer"; + case BufferUsageFlagBits::eIndirectBuffer: return "IndirectBuffer"; + case BufferUsageFlagBits::eTransformFeedbackBufferEXT: return "TransformFeedbackBufferEXT"; + case BufferUsageFlagBits::eTransformFeedbackCounterBufferEXT: return "TransformFeedbackCounterBufferEXT"; + case BufferUsageFlagBits::eConditionalRenderingEXT: return "ConditionalRenderingEXT"; + case BufferUsageFlagBits::eRayTracingNV: return "RayTracingNV"; + default: return "invalid"; + } + } + + VULKAN_HPP_INLINE std::string to_string(BufferUsageFlags value) + { + if (!value) return "{}"; + std::string result; + if (value & BufferUsageFlagBits::eTransferSrc) result += "TransferSrc | "; + if (value & BufferUsageFlagBits::eTransferDst) result += "TransferDst | "; + if (value & BufferUsageFlagBits::eUniformTexelBuffer) result += "UniformTexelBuffer | "; + if (value & BufferUsageFlagBits::eStorageTexelBuffer) result += "StorageTexelBuffer | "; + if (value & BufferUsageFlagBits::eUniformBuffer) result += "UniformBuffer | "; + if (value & BufferUsageFlagBits::eStorageBuffer) result += "StorageBuffer | "; + if (value & BufferUsageFlagBits::eIndexBuffer) result += "IndexBuffer | "; + if (value & BufferUsageFlagBits::eVertexBuffer) result += "VertexBuffer | "; + if (value & BufferUsageFlagBits::eIndirectBuffer) result += "IndirectBuffer | "; + if (value & BufferUsageFlagBits::eTransformFeedbackBufferEXT) result += "TransformFeedbackBufferEXT | "; + if (value & BufferUsageFlagBits::eTransformFeedbackCounterBufferEXT) result += "TransformFeedbackCounterBufferEXT | "; + if (value & BufferUsageFlagBits::eConditionalRenderingEXT) result += "ConditionalRenderingEXT | "; + if (value & BufferUsageFlagBits::eRayTracingNV) result += "RayTracingNV | "; + return "{" + result.substr(0, result.size() - 3) + "}"; + } + + VULKAN_HPP_INLINE std::string to_string(BufferCreateFlagBits value) + { + switch (value) + { + case BufferCreateFlagBits::eSparseBinding: return "SparseBinding"; + case BufferCreateFlagBits::eSparseResidency: return "SparseResidency"; + case BufferCreateFlagBits::eSparseAliased: return "SparseAliased"; + case BufferCreateFlagBits::eProtected: return "Protected"; + default: return "invalid"; + } + } + + VULKAN_HPP_INLINE std::string to_string(BufferCreateFlags value) + { + if (!value) return "{}"; + std::string result; + if (value & BufferCreateFlagBits::eSparseBinding) result += "SparseBinding | "; + if (value & BufferCreateFlagBits::eSparseResidency) result += "SparseResidency | "; + if (value & BufferCreateFlagBits::eSparseAliased) result += "SparseAliased | "; + if (value & BufferCreateFlagBits::eProtected) result += "Protected | "; + return "{" + result.substr(0, result.size() - 3) + "}"; + } + + VULKAN_HPP_INLINE std::string to_string(ShaderStageFlagBits value) + { + switch (value) + { + case ShaderStageFlagBits::eVertex: return "Vertex"; + case ShaderStageFlagBits::eTessellationControl: return "TessellationControl"; + case ShaderStageFlagBits::eTessellationEvaluation: return "TessellationEvaluation"; + case ShaderStageFlagBits::eGeometry: return "Geometry"; + case ShaderStageFlagBits::eFragment: return "Fragment"; + case ShaderStageFlagBits::eCompute: return "Compute"; + case ShaderStageFlagBits::eAllGraphics: return "AllGraphics"; + case ShaderStageFlagBits::eAll: return "All"; + case ShaderStageFlagBits::eRaygenNV: return "RaygenNV"; + case ShaderStageFlagBits::eAnyHitNV: return "AnyHitNV"; + case ShaderStageFlagBits::eClosestHitNV: return "ClosestHitNV"; + case ShaderStageFlagBits::eMissNV: return "MissNV"; + case ShaderStageFlagBits::eIntersectionNV: return "IntersectionNV"; + case ShaderStageFlagBits::eCallableNV: return "CallableNV"; + case ShaderStageFlagBits::eTaskNV: return "TaskNV"; + case ShaderStageFlagBits::eMeshNV: return "MeshNV"; + default: return "invalid"; + } + } + + VULKAN_HPP_INLINE std::string to_string(ShaderStageFlags value) + { + if (!value) return "{}"; + std::string result; + if (value & ShaderStageFlagBits::eVertex) result += "Vertex | "; + if (value & ShaderStageFlagBits::eTessellationControl) result += "TessellationControl | "; + if (value & ShaderStageFlagBits::eTessellationEvaluation) result += "TessellationEvaluation | "; + if (value & ShaderStageFlagBits::eGeometry) result += "Geometry | "; + if (value & ShaderStageFlagBits::eFragment) result += "Fragment | "; + if (value & ShaderStageFlagBits::eCompute) result += "Compute | "; + if (value & ShaderStageFlagBits::eAllGraphics) result += "AllGraphics | "; + if (value & ShaderStageFlagBits::eAll) result += "All | "; + if (value & ShaderStageFlagBits::eRaygenNV) result += "RaygenNV | "; + if (value & ShaderStageFlagBits::eAnyHitNV) result += "AnyHitNV | "; + if (value & ShaderStageFlagBits::eClosestHitNV) result += "ClosestHitNV | "; + if (value & ShaderStageFlagBits::eMissNV) result += "MissNV | "; + if (value & ShaderStageFlagBits::eIntersectionNV) result += "IntersectionNV | "; + if (value & ShaderStageFlagBits::eCallableNV) result += "CallableNV | "; + if (value & ShaderStageFlagBits::eTaskNV) result += "TaskNV | "; + if (value & ShaderStageFlagBits::eMeshNV) result += "MeshNV | "; + return "{" + result.substr(0, result.size() - 3) + "}"; + } + + VULKAN_HPP_INLINE std::string to_string(ImageUsageFlagBits value) + { + switch (value) + { + case ImageUsageFlagBits::eTransferSrc: return "TransferSrc"; + case ImageUsageFlagBits::eTransferDst: return "TransferDst"; + case ImageUsageFlagBits::eSampled: return "Sampled"; + case ImageUsageFlagBits::eStorage: return "Storage"; + case ImageUsageFlagBits::eColorAttachment: return "ColorAttachment"; + case ImageUsageFlagBits::eDepthStencilAttachment: return "DepthStencilAttachment"; + case ImageUsageFlagBits::eTransientAttachment: return "TransientAttachment"; + case ImageUsageFlagBits::eInputAttachment: return "InputAttachment"; + case ImageUsageFlagBits::eShadingRateImageNV: return "ShadingRateImageNV"; + default: return "invalid"; + } + } + + VULKAN_HPP_INLINE std::string to_string(ImageUsageFlags value) + { + if (!value) return "{}"; + std::string result; + if (value & ImageUsageFlagBits::eTransferSrc) result += "TransferSrc | "; + if (value & ImageUsageFlagBits::eTransferDst) result += "TransferDst | "; + if (value & ImageUsageFlagBits::eSampled) result += "Sampled | "; + if (value & ImageUsageFlagBits::eStorage) result += "Storage | "; + if (value & ImageUsageFlagBits::eColorAttachment) result += "ColorAttachment | "; + if (value & ImageUsageFlagBits::eDepthStencilAttachment) result += "DepthStencilAttachment | "; + if (value & ImageUsageFlagBits::eTransientAttachment) result += "TransientAttachment | "; + if (value & ImageUsageFlagBits::eInputAttachment) result += "InputAttachment | "; + if (value & ImageUsageFlagBits::eShadingRateImageNV) result += "ShadingRateImageNV | "; + return "{" + result.substr(0, result.size() - 3) + "}"; + } + + VULKAN_HPP_INLINE std::string to_string(ImageCreateFlagBits value) + { + switch (value) + { + case ImageCreateFlagBits::eSparseBinding: return "SparseBinding"; + case ImageCreateFlagBits::eSparseResidency: return "SparseResidency"; + case ImageCreateFlagBits::eSparseAliased: return "SparseAliased"; + case ImageCreateFlagBits::eMutableFormat: return "MutableFormat"; + case ImageCreateFlagBits::eCubeCompatible: return "CubeCompatible"; + case ImageCreateFlagBits::eAlias: return "Alias"; + case ImageCreateFlagBits::eSplitInstanceBindRegions: return "SplitInstanceBindRegions"; + case ImageCreateFlagBits::e2DArrayCompatible: return "2DArrayCompatible"; + case ImageCreateFlagBits::eBlockTexelViewCompatible: return "BlockTexelViewCompatible"; + case ImageCreateFlagBits::eExtendedUsage: return "ExtendedUsage"; + case ImageCreateFlagBits::eProtected: return "Protected"; + case ImageCreateFlagBits::eDisjoint: return "Disjoint"; + case ImageCreateFlagBits::eCornerSampledNV: return "CornerSampledNV"; + case ImageCreateFlagBits::eSampleLocationsCompatibleDepthEXT: return "SampleLocationsCompatibleDepthEXT"; + default: return "invalid"; + } + } + + VULKAN_HPP_INLINE std::string to_string(ImageCreateFlags value) + { + if (!value) return "{}"; + std::string result; + if (value & ImageCreateFlagBits::eSparseBinding) result += "SparseBinding | "; + if (value & ImageCreateFlagBits::eSparseResidency) result += "SparseResidency | "; + if (value & ImageCreateFlagBits::eSparseAliased) result += "SparseAliased | "; + if (value & ImageCreateFlagBits::eMutableFormat) result += "MutableFormat | "; + if (value & ImageCreateFlagBits::eCubeCompatible) result += "CubeCompatible | "; + if (value & ImageCreateFlagBits::eAlias) result += "Alias | "; + if (value & ImageCreateFlagBits::eSplitInstanceBindRegions) result += "SplitInstanceBindRegions | "; + if (value & ImageCreateFlagBits::e2DArrayCompatible) result += "2DArrayCompatible | "; + if (value & ImageCreateFlagBits::eBlockTexelViewCompatible) result += "BlockTexelViewCompatible | "; + if (value & ImageCreateFlagBits::eExtendedUsage) result += "ExtendedUsage | "; + if (value & ImageCreateFlagBits::eProtected) result += "Protected | "; + if (value & ImageCreateFlagBits::eDisjoint) result += "Disjoint | "; + if (value & ImageCreateFlagBits::eCornerSampledNV) result += "CornerSampledNV | "; + if (value & ImageCreateFlagBits::eSampleLocationsCompatibleDepthEXT) result += "SampleLocationsCompatibleDepthEXT | "; + return "{" + result.substr(0, result.size() - 3) + "}"; + } + + VULKAN_HPP_INLINE std::string to_string(PipelineCreateFlagBits value) + { + switch (value) + { + case PipelineCreateFlagBits::eDisableOptimization: return "DisableOptimization"; + case PipelineCreateFlagBits::eAllowDerivatives: return "AllowDerivatives"; + case PipelineCreateFlagBits::eDerivative: return "Derivative"; + case PipelineCreateFlagBits::eViewIndexFromDeviceIndex: return "ViewIndexFromDeviceIndex"; + case PipelineCreateFlagBits::eDispatchBase: return "DispatchBase"; + case PipelineCreateFlagBits::eDeferCompileNV: return "DeferCompileNV"; + default: return "invalid"; + } + } + + VULKAN_HPP_INLINE std::string to_string(PipelineCreateFlags value) + { + if (!value) return "{}"; + std::string result; + if (value & PipelineCreateFlagBits::eDisableOptimization) result += "DisableOptimization | "; + if (value & PipelineCreateFlagBits::eAllowDerivatives) result += "AllowDerivatives | "; + if (value & PipelineCreateFlagBits::eDerivative) result += "Derivative | "; + if (value & PipelineCreateFlagBits::eViewIndexFromDeviceIndex) result += "ViewIndexFromDeviceIndex | "; + if (value & PipelineCreateFlagBits::eDispatchBase) result += "DispatchBase | "; + if (value & PipelineCreateFlagBits::eDeferCompileNV) result += "DeferCompileNV | "; + return "{" + result.substr(0, result.size() - 3) + "}"; + } + + VULKAN_HPP_INLINE std::string to_string(ColorComponentFlagBits value) + { + switch (value) + { + case ColorComponentFlagBits::eR: return "R"; + case ColorComponentFlagBits::eG: return "G"; + case ColorComponentFlagBits::eB: return "B"; + case ColorComponentFlagBits::eA: return "A"; + default: return "invalid"; + } + } + + VULKAN_HPP_INLINE std::string to_string(ColorComponentFlags value) + { + if (!value) return "{}"; + std::string result; + if (value & ColorComponentFlagBits::eR) result += "R | "; + if (value & ColorComponentFlagBits::eG) result += "G | "; + if (value & ColorComponentFlagBits::eB) result += "B | "; + if (value & ColorComponentFlagBits::eA) result += "A | "; + return "{" + result.substr(0, result.size() - 3) + "}"; + } + + VULKAN_HPP_INLINE std::string to_string(FenceCreateFlagBits value) + { + switch (value) + { + case FenceCreateFlagBits::eSignaled: return "Signaled"; + default: return "invalid"; + } + } + + VULKAN_HPP_INLINE std::string to_string(FenceCreateFlags value) + { + if (!value) return "{}"; + std::string result; + if (value & FenceCreateFlagBits::eSignaled) result += "Signaled | "; + return "{" + result.substr(0, result.size() - 3) + "}"; + } + + VULKAN_HPP_INLINE std::string to_string(FormatFeatureFlagBits value) + { + switch (value) + { + case FormatFeatureFlagBits::eSampledImage: return "SampledImage"; + case FormatFeatureFlagBits::eStorageImage: return "StorageImage"; + case FormatFeatureFlagBits::eStorageImageAtomic: return "StorageImageAtomic"; + case FormatFeatureFlagBits::eUniformTexelBuffer: return "UniformTexelBuffer"; + case FormatFeatureFlagBits::eStorageTexelBuffer: return "StorageTexelBuffer"; + case FormatFeatureFlagBits::eStorageTexelBufferAtomic: return "StorageTexelBufferAtomic"; + case FormatFeatureFlagBits::eVertexBuffer: return "VertexBuffer"; + case FormatFeatureFlagBits::eColorAttachment: return "ColorAttachment"; + case FormatFeatureFlagBits::eColorAttachmentBlend: return "ColorAttachmentBlend"; + case FormatFeatureFlagBits::eDepthStencilAttachment: return "DepthStencilAttachment"; + case FormatFeatureFlagBits::eBlitSrc: return "BlitSrc"; + case FormatFeatureFlagBits::eBlitDst: return "BlitDst"; + case FormatFeatureFlagBits::eSampledImageFilterLinear: return "SampledImageFilterLinear"; + case FormatFeatureFlagBits::eTransferSrc: return "TransferSrc"; + case FormatFeatureFlagBits::eTransferDst: return "TransferDst"; + case FormatFeatureFlagBits::eMidpointChromaSamples: return "MidpointChromaSamples"; + case FormatFeatureFlagBits::eSampledImageYcbcrConversionLinearFilter: return "SampledImageYcbcrConversionLinearFilter"; + case FormatFeatureFlagBits::eSampledImageYcbcrConversionSeparateReconstructionFilter: return "SampledImageYcbcrConversionSeparateReconstructionFilter"; + case FormatFeatureFlagBits::eSampledImageYcbcrConversionChromaReconstructionExplicit: return "SampledImageYcbcrConversionChromaReconstructionExplicit"; + case FormatFeatureFlagBits::eSampledImageYcbcrConversionChromaReconstructionExplicitForceable: return "SampledImageYcbcrConversionChromaReconstructionExplicitForceable"; + case FormatFeatureFlagBits::eDisjoint: return "Disjoint"; + case FormatFeatureFlagBits::eCositedChromaSamples: return "CositedChromaSamples"; + case FormatFeatureFlagBits::eSampledImageFilterCubicIMG: return "SampledImageFilterCubicIMG"; + case FormatFeatureFlagBits::eSampledImageFilterMinmaxEXT: return "SampledImageFilterMinmaxEXT"; + default: return "invalid"; + } + } + + VULKAN_HPP_INLINE std::string to_string(FormatFeatureFlags value) + { + if (!value) return "{}"; + std::string result; + if (value & FormatFeatureFlagBits::eSampledImage) result += "SampledImage | "; + if (value & FormatFeatureFlagBits::eStorageImage) result += "StorageImage | "; + if (value & FormatFeatureFlagBits::eStorageImageAtomic) result += "StorageImageAtomic | "; + if (value & FormatFeatureFlagBits::eUniformTexelBuffer) result += "UniformTexelBuffer | "; + if (value & FormatFeatureFlagBits::eStorageTexelBuffer) result += "StorageTexelBuffer | "; + if (value & FormatFeatureFlagBits::eStorageTexelBufferAtomic) result += "StorageTexelBufferAtomic | "; + if (value & FormatFeatureFlagBits::eVertexBuffer) result += "VertexBuffer | "; + if (value & FormatFeatureFlagBits::eColorAttachment) result += "ColorAttachment | "; + if (value & FormatFeatureFlagBits::eColorAttachmentBlend) result += "ColorAttachmentBlend | "; + if (value & FormatFeatureFlagBits::eDepthStencilAttachment) result += "DepthStencilAttachment | "; + if (value & FormatFeatureFlagBits::eBlitSrc) result += "BlitSrc | "; + if (value & FormatFeatureFlagBits::eBlitDst) result += "BlitDst | "; + if (value & FormatFeatureFlagBits::eSampledImageFilterLinear) result += "SampledImageFilterLinear | "; + if (value & FormatFeatureFlagBits::eTransferSrc) result += "TransferSrc | "; + if (value & FormatFeatureFlagBits::eTransferDst) result += "TransferDst | "; + if (value & FormatFeatureFlagBits::eMidpointChromaSamples) result += "MidpointChromaSamples | "; + if (value & FormatFeatureFlagBits::eSampledImageYcbcrConversionLinearFilter) result += "SampledImageYcbcrConversionLinearFilter | "; + if (value & FormatFeatureFlagBits::eSampledImageYcbcrConversionSeparateReconstructionFilter) result += "SampledImageYcbcrConversionSeparateReconstructionFilter | "; + if (value & FormatFeatureFlagBits::eSampledImageYcbcrConversionChromaReconstructionExplicit) result += "SampledImageYcbcrConversionChromaReconstructionExplicit | "; + if (value & FormatFeatureFlagBits::eSampledImageYcbcrConversionChromaReconstructionExplicitForceable) result += "SampledImageYcbcrConversionChromaReconstructionExplicitForceable | "; + if (value & FormatFeatureFlagBits::eDisjoint) result += "Disjoint | "; + if (value & FormatFeatureFlagBits::eCositedChromaSamples) result += "CositedChromaSamples | "; + if (value & FormatFeatureFlagBits::eSampledImageFilterCubicIMG) result += "SampledImageFilterCubicIMG | "; + if (value & FormatFeatureFlagBits::eSampledImageFilterMinmaxEXT) result += "SampledImageFilterMinmaxEXT | "; + return "{" + result.substr(0, result.size() - 3) + "}"; + } + + VULKAN_HPP_INLINE std::string to_string(QueryControlFlagBits value) + { + switch (value) + { + case QueryControlFlagBits::ePrecise: return "Precise"; + default: return "invalid"; + } + } + + VULKAN_HPP_INLINE std::string to_string(QueryControlFlags value) + { + if (!value) return "{}"; + std::string result; + if (value & QueryControlFlagBits::ePrecise) result += "Precise | "; + return "{" + result.substr(0, result.size() - 3) + "}"; + } + + VULKAN_HPP_INLINE std::string to_string(QueryResultFlagBits value) + { + switch (value) + { + case QueryResultFlagBits::e64: return "64"; + case QueryResultFlagBits::eWait: return "Wait"; + case QueryResultFlagBits::eWithAvailability: return "WithAvailability"; + case QueryResultFlagBits::ePartial: return "Partial"; + default: return "invalid"; + } + } + + VULKAN_HPP_INLINE std::string to_string(QueryResultFlags value) + { + if (!value) return "{}"; + std::string result; + if (value & QueryResultFlagBits::e64) result += "64 | "; + if (value & QueryResultFlagBits::eWait) result += "Wait | "; + if (value & QueryResultFlagBits::eWithAvailability) result += "WithAvailability | "; + if (value & QueryResultFlagBits::ePartial) result += "Partial | "; + return "{" + result.substr(0, result.size() - 3) + "}"; + } + + VULKAN_HPP_INLINE std::string to_string(CommandBufferUsageFlagBits value) + { + switch (value) + { + case CommandBufferUsageFlagBits::eOneTimeSubmit: return "OneTimeSubmit"; + case CommandBufferUsageFlagBits::eRenderPassContinue: return "RenderPassContinue"; + case CommandBufferUsageFlagBits::eSimultaneousUse: return "SimultaneousUse"; + default: return "invalid"; + } + } + + VULKAN_HPP_INLINE std::string to_string(CommandBufferUsageFlags value) + { + if (!value) return "{}"; + std::string result; + if (value & CommandBufferUsageFlagBits::eOneTimeSubmit) result += "OneTimeSubmit | "; + if (value & CommandBufferUsageFlagBits::eRenderPassContinue) result += "RenderPassContinue | "; + if (value & CommandBufferUsageFlagBits::eSimultaneousUse) result += "SimultaneousUse | "; + return "{" + result.substr(0, result.size() - 3) + "}"; + } + + VULKAN_HPP_INLINE std::string to_string(QueryPipelineStatisticFlagBits value) + { + switch (value) + { + case QueryPipelineStatisticFlagBits::eInputAssemblyVertices: return "InputAssemblyVertices"; + case QueryPipelineStatisticFlagBits::eInputAssemblyPrimitives: return "InputAssemblyPrimitives"; + case QueryPipelineStatisticFlagBits::eVertexShaderInvocations: return "VertexShaderInvocations"; + case QueryPipelineStatisticFlagBits::eGeometryShaderInvocations: return "GeometryShaderInvocations"; + case QueryPipelineStatisticFlagBits::eGeometryShaderPrimitives: return "GeometryShaderPrimitives"; + case QueryPipelineStatisticFlagBits::eClippingInvocations: return "ClippingInvocations"; + case QueryPipelineStatisticFlagBits::eClippingPrimitives: return "ClippingPrimitives"; + case QueryPipelineStatisticFlagBits::eFragmentShaderInvocations: return "FragmentShaderInvocations"; + case QueryPipelineStatisticFlagBits::eTessellationControlShaderPatches: return "TessellationControlShaderPatches"; + case QueryPipelineStatisticFlagBits::eTessellationEvaluationShaderInvocations: return "TessellationEvaluationShaderInvocations"; + case QueryPipelineStatisticFlagBits::eComputeShaderInvocations: return "ComputeShaderInvocations"; + default: return "invalid"; + } + } + + VULKAN_HPP_INLINE std::string to_string(QueryPipelineStatisticFlags value) + { + if (!value) return "{}"; + std::string result; + if (value & QueryPipelineStatisticFlagBits::eInputAssemblyVertices) result += "InputAssemblyVertices | "; + if (value & QueryPipelineStatisticFlagBits::eInputAssemblyPrimitives) result += "InputAssemblyPrimitives | "; + if (value & QueryPipelineStatisticFlagBits::eVertexShaderInvocations) result += "VertexShaderInvocations | "; + if (value & QueryPipelineStatisticFlagBits::eGeometryShaderInvocations) result += "GeometryShaderInvocations | "; + if (value & QueryPipelineStatisticFlagBits::eGeometryShaderPrimitives) result += "GeometryShaderPrimitives | "; + if (value & QueryPipelineStatisticFlagBits::eClippingInvocations) result += "ClippingInvocations | "; + if (value & QueryPipelineStatisticFlagBits::eClippingPrimitives) result += "ClippingPrimitives | "; + if (value & QueryPipelineStatisticFlagBits::eFragmentShaderInvocations) result += "FragmentShaderInvocations | "; + if (value & QueryPipelineStatisticFlagBits::eTessellationControlShaderPatches) result += "TessellationControlShaderPatches | "; + if (value & QueryPipelineStatisticFlagBits::eTessellationEvaluationShaderInvocations) result += "TessellationEvaluationShaderInvocations | "; + if (value & QueryPipelineStatisticFlagBits::eComputeShaderInvocations) result += "ComputeShaderInvocations | "; + return "{" + result.substr(0, result.size() - 3) + "}"; + } + + VULKAN_HPP_INLINE std::string to_string(ImageAspectFlagBits value) + { + switch (value) + { + case ImageAspectFlagBits::eColor: return "Color"; + case ImageAspectFlagBits::eDepth: return "Depth"; + case ImageAspectFlagBits::eStencil: return "Stencil"; + case ImageAspectFlagBits::eMetadata: return "Metadata"; + case ImageAspectFlagBits::ePlane0: return "Plane0"; + case ImageAspectFlagBits::ePlane1: return "Plane1"; + case ImageAspectFlagBits::ePlane2: return "Plane2"; + case ImageAspectFlagBits::eMemoryPlane0EXT: return "MemoryPlane0EXT"; + case ImageAspectFlagBits::eMemoryPlane1EXT: return "MemoryPlane1EXT"; + case ImageAspectFlagBits::eMemoryPlane2EXT: return "MemoryPlane2EXT"; + case ImageAspectFlagBits::eMemoryPlane3EXT: return "MemoryPlane3EXT"; + default: return "invalid"; + } + } + + VULKAN_HPP_INLINE std::string to_string(ImageAspectFlags value) + { + if (!value) return "{}"; + std::string result; + if (value & ImageAspectFlagBits::eColor) result += "Color | "; + if (value & ImageAspectFlagBits::eDepth) result += "Depth | "; + if (value & ImageAspectFlagBits::eStencil) result += "Stencil | "; + if (value & ImageAspectFlagBits::eMetadata) result += "Metadata | "; + if (value & ImageAspectFlagBits::ePlane0) result += "Plane0 | "; + if (value & ImageAspectFlagBits::ePlane1) result += "Plane1 | "; + if (value & ImageAspectFlagBits::ePlane2) result += "Plane2 | "; + if (value & ImageAspectFlagBits::eMemoryPlane0EXT) result += "MemoryPlane0EXT | "; + if (value & ImageAspectFlagBits::eMemoryPlane1EXT) result += "MemoryPlane1EXT | "; + if (value & ImageAspectFlagBits::eMemoryPlane2EXT) result += "MemoryPlane2EXT | "; + if (value & ImageAspectFlagBits::eMemoryPlane3EXT) result += "MemoryPlane3EXT | "; + return "{" + result.substr(0, result.size() - 3) + "}"; + } + + VULKAN_HPP_INLINE std::string to_string(SparseImageFormatFlagBits value) + { + switch (value) + { + case SparseImageFormatFlagBits::eSingleMiptail: return "SingleMiptail"; + case SparseImageFormatFlagBits::eAlignedMipSize: return "AlignedMipSize"; + case SparseImageFormatFlagBits::eNonstandardBlockSize: return "NonstandardBlockSize"; + default: return "invalid"; + } + } + + VULKAN_HPP_INLINE std::string to_string(SparseImageFormatFlags value) + { + if (!value) return "{}"; + std::string result; + if (value & SparseImageFormatFlagBits::eSingleMiptail) result += "SingleMiptail | "; + if (value & SparseImageFormatFlagBits::eAlignedMipSize) result += "AlignedMipSize | "; + if (value & SparseImageFormatFlagBits::eNonstandardBlockSize) result += "NonstandardBlockSize | "; + return "{" + result.substr(0, result.size() - 3) + "}"; + } + + VULKAN_HPP_INLINE std::string to_string(SparseMemoryBindFlagBits value) + { + switch (value) + { + case SparseMemoryBindFlagBits::eMetadata: return "Metadata"; + default: return "invalid"; + } + } + + VULKAN_HPP_INLINE std::string to_string(SparseMemoryBindFlags value) + { + if (!value) return "{}"; + std::string result; + if (value & SparseMemoryBindFlagBits::eMetadata) result += "Metadata | "; + return "{" + result.substr(0, result.size() - 3) + "}"; + } + + VULKAN_HPP_INLINE std::string to_string(PipelineStageFlagBits value) + { + switch (value) + { + case PipelineStageFlagBits::eTopOfPipe: return "TopOfPipe"; + case PipelineStageFlagBits::eDrawIndirect: return "DrawIndirect"; + case PipelineStageFlagBits::eVertexInput: return "VertexInput"; + case PipelineStageFlagBits::eVertexShader: return "VertexShader"; + case PipelineStageFlagBits::eTessellationControlShader: return "TessellationControlShader"; + case PipelineStageFlagBits::eTessellationEvaluationShader: return "TessellationEvaluationShader"; + case PipelineStageFlagBits::eGeometryShader: return "GeometryShader"; + case PipelineStageFlagBits::eFragmentShader: return "FragmentShader"; + case PipelineStageFlagBits::eEarlyFragmentTests: return "EarlyFragmentTests"; + case PipelineStageFlagBits::eLateFragmentTests: return "LateFragmentTests"; + case PipelineStageFlagBits::eColorAttachmentOutput: return "ColorAttachmentOutput"; + case PipelineStageFlagBits::eComputeShader: return "ComputeShader"; + case PipelineStageFlagBits::eTransfer: return "Transfer"; + case PipelineStageFlagBits::eBottomOfPipe: return "BottomOfPipe"; + case PipelineStageFlagBits::eHost: return "Host"; + case PipelineStageFlagBits::eAllGraphics: return "AllGraphics"; + case PipelineStageFlagBits::eAllCommands: return "AllCommands"; + case PipelineStageFlagBits::eTransformFeedbackEXT: return "TransformFeedbackEXT"; + case PipelineStageFlagBits::eConditionalRenderingEXT: return "ConditionalRenderingEXT"; + case PipelineStageFlagBits::eCommandProcessNVX: return "CommandProcessNVX"; + case PipelineStageFlagBits::eShadingRateImageNV: return "ShadingRateImageNV"; + case PipelineStageFlagBits::eRayTracingShaderNV: return "RayTracingShaderNV"; + case PipelineStageFlagBits::eAccelerationStructureBuildNV: return "AccelerationStructureBuildNV"; + case PipelineStageFlagBits::eTaskShaderNV: return "TaskShaderNV"; + case PipelineStageFlagBits::eMeshShaderNV: return "MeshShaderNV"; + default: return "invalid"; + } + } + + VULKAN_HPP_INLINE std::string to_string(PipelineStageFlags value) + { + if (!value) return "{}"; + std::string result; + if (value & PipelineStageFlagBits::eTopOfPipe) result += "TopOfPipe | "; + if (value & PipelineStageFlagBits::eDrawIndirect) result += "DrawIndirect | "; + if (value & PipelineStageFlagBits::eVertexInput) result += "VertexInput | "; + if (value & PipelineStageFlagBits::eVertexShader) result += "VertexShader | "; + if (value & PipelineStageFlagBits::eTessellationControlShader) result += "TessellationControlShader | "; + if (value & PipelineStageFlagBits::eTessellationEvaluationShader) result += "TessellationEvaluationShader | "; + if (value & PipelineStageFlagBits::eGeometryShader) result += "GeometryShader | "; + if (value & PipelineStageFlagBits::eFragmentShader) result += "FragmentShader | "; + if (value & PipelineStageFlagBits::eEarlyFragmentTests) result += "EarlyFragmentTests | "; + if (value & PipelineStageFlagBits::eLateFragmentTests) result += "LateFragmentTests | "; + if (value & PipelineStageFlagBits::eColorAttachmentOutput) result += "ColorAttachmentOutput | "; + if (value & PipelineStageFlagBits::eComputeShader) result += "ComputeShader | "; + if (value & PipelineStageFlagBits::eTransfer) result += "Transfer | "; + if (value & PipelineStageFlagBits::eBottomOfPipe) result += "BottomOfPipe | "; + if (value & PipelineStageFlagBits::eHost) result += "Host | "; + if (value & PipelineStageFlagBits::eAllGraphics) result += "AllGraphics | "; + if (value & PipelineStageFlagBits::eAllCommands) result += "AllCommands | "; + if (value & PipelineStageFlagBits::eTransformFeedbackEXT) result += "TransformFeedbackEXT | "; + if (value & PipelineStageFlagBits::eConditionalRenderingEXT) result += "ConditionalRenderingEXT | "; + if (value & PipelineStageFlagBits::eCommandProcessNVX) result += "CommandProcessNVX | "; + if (value & PipelineStageFlagBits::eShadingRateImageNV) result += "ShadingRateImageNV | "; + if (value & PipelineStageFlagBits::eRayTracingShaderNV) result += "RayTracingShaderNV | "; + if (value & PipelineStageFlagBits::eAccelerationStructureBuildNV) result += "AccelerationStructureBuildNV | "; + if (value & PipelineStageFlagBits::eTaskShaderNV) result += "TaskShaderNV | "; + if (value & PipelineStageFlagBits::eMeshShaderNV) result += "MeshShaderNV | "; + return "{" + result.substr(0, result.size() - 3) + "}"; + } + + VULKAN_HPP_INLINE std::string to_string(CommandPoolCreateFlagBits value) + { + switch (value) + { + case CommandPoolCreateFlagBits::eTransient: return "Transient"; + case CommandPoolCreateFlagBits::eResetCommandBuffer: return "ResetCommandBuffer"; + case CommandPoolCreateFlagBits::eProtected: return "Protected"; + default: return "invalid"; + } + } + + VULKAN_HPP_INLINE std::string to_string(CommandPoolCreateFlags value) + { + if (!value) return "{}"; + std::string result; + if (value & CommandPoolCreateFlagBits::eTransient) result += "Transient | "; + if (value & CommandPoolCreateFlagBits::eResetCommandBuffer) result += "ResetCommandBuffer | "; + if (value & CommandPoolCreateFlagBits::eProtected) result += "Protected | "; + return "{" + result.substr(0, result.size() - 3) + "}"; + } + + VULKAN_HPP_INLINE std::string to_string(CommandPoolResetFlagBits value) + { + switch (value) + { + case CommandPoolResetFlagBits::eReleaseResources: return "ReleaseResources"; + default: return "invalid"; + } + } + + VULKAN_HPP_INLINE std::string to_string(CommandPoolResetFlags value) + { + if (!value) return "{}"; + std::string result; + if (value & CommandPoolResetFlagBits::eReleaseResources) result += "ReleaseResources | "; + return "{" + result.substr(0, result.size() - 3) + "}"; + } + + VULKAN_HPP_INLINE std::string to_string(CommandBufferResetFlagBits value) + { + switch (value) + { + case CommandBufferResetFlagBits::eReleaseResources: return "ReleaseResources"; + default: return "invalid"; + } + } + + VULKAN_HPP_INLINE std::string to_string(CommandBufferResetFlags value) + { + if (!value) return "{}"; + std::string result; + if (value & CommandBufferResetFlagBits::eReleaseResources) result += "ReleaseResources | "; + return "{" + result.substr(0, result.size() - 3) + "}"; + } + + VULKAN_HPP_INLINE std::string to_string(SampleCountFlagBits value) + { + switch (value) + { + case SampleCountFlagBits::e1: return "1"; + case SampleCountFlagBits::e2: return "2"; + case SampleCountFlagBits::e4: return "4"; + case SampleCountFlagBits::e8: return "8"; + case SampleCountFlagBits::e16: return "16"; + case SampleCountFlagBits::e32: return "32"; + case SampleCountFlagBits::e64: return "64"; + default: return "invalid"; + } + } + + VULKAN_HPP_INLINE std::string to_string(SampleCountFlags value) + { + if (!value) return "{}"; + std::string result; + if (value & SampleCountFlagBits::e1) result += "1 | "; + if (value & SampleCountFlagBits::e2) result += "2 | "; + if (value & SampleCountFlagBits::e4) result += "4 | "; + if (value & SampleCountFlagBits::e8) result += "8 | "; + if (value & SampleCountFlagBits::e16) result += "16 | "; + if (value & SampleCountFlagBits::e32) result += "32 | "; + if (value & SampleCountFlagBits::e64) result += "64 | "; + return "{" + result.substr(0, result.size() - 3) + "}"; + } + + VULKAN_HPP_INLINE std::string to_string(AttachmentDescriptionFlagBits value) + { + switch (value) + { + case AttachmentDescriptionFlagBits::eMayAlias: return "MayAlias"; + default: return "invalid"; + } + } + + VULKAN_HPP_INLINE std::string to_string(AttachmentDescriptionFlags value) + { + if (!value) return "{}"; + std::string result; + if (value & AttachmentDescriptionFlagBits::eMayAlias) result += "MayAlias | "; + return "{" + result.substr(0, result.size() - 3) + "}"; + } + + VULKAN_HPP_INLINE std::string to_string(StencilFaceFlagBits value) + { + switch (value) + { + case StencilFaceFlagBits::eFront: return "Front"; + case StencilFaceFlagBits::eBack: return "Back"; + case StencilFaceFlagBits::eVkStencilFrontAndBack: return "VkStencilFrontAndBack"; + default: return "invalid"; + } + } + + VULKAN_HPP_INLINE std::string to_string(StencilFaceFlags value) + { + if (!value) return "{}"; + std::string result; + if (value & StencilFaceFlagBits::eFront) result += "Front | "; + if (value & StencilFaceFlagBits::eBack) result += "Back | "; + if (value & StencilFaceFlagBits::eVkStencilFrontAndBack) result += "VkStencilFrontAndBack | "; + return "{" + result.substr(0, result.size() - 3) + "}"; + } + + VULKAN_HPP_INLINE std::string to_string(DescriptorPoolCreateFlagBits value) + { + switch (value) + { + case DescriptorPoolCreateFlagBits::eFreeDescriptorSet: return "FreeDescriptorSet"; + case DescriptorPoolCreateFlagBits::eUpdateAfterBindEXT: return "UpdateAfterBindEXT"; + default: return "invalid"; + } + } + + VULKAN_HPP_INLINE std::string to_string(DescriptorPoolCreateFlags value) + { + if (!value) return "{}"; + std::string result; + if (value & DescriptorPoolCreateFlagBits::eFreeDescriptorSet) result += "FreeDescriptorSet | "; + if (value & DescriptorPoolCreateFlagBits::eUpdateAfterBindEXT) result += "UpdateAfterBindEXT | "; + return "{" + result.substr(0, result.size() - 3) + "}"; + } + + VULKAN_HPP_INLINE std::string to_string(DependencyFlagBits value) + { + switch (value) + { + case DependencyFlagBits::eByRegion: return "ByRegion"; + case DependencyFlagBits::eDeviceGroup: return "DeviceGroup"; + case DependencyFlagBits::eViewLocal: return "ViewLocal"; + default: return "invalid"; + } + } + + VULKAN_HPP_INLINE std::string to_string(DependencyFlags value) + { + if (!value) return "{}"; + std::string result; + if (value & DependencyFlagBits::eByRegion) result += "ByRegion | "; + if (value & DependencyFlagBits::eDeviceGroup) result += "DeviceGroup | "; + if (value & DependencyFlagBits::eViewLocal) result += "ViewLocal | "; + return "{" + result.substr(0, result.size() - 3) + "}"; + } + + VULKAN_HPP_INLINE std::string to_string(PresentModeKHR value) + { + switch (value) + { + case PresentModeKHR::eImmediate: return "Immediate"; + case PresentModeKHR::eMailbox: return "Mailbox"; + case PresentModeKHR::eFifo: return "Fifo"; + case PresentModeKHR::eFifoRelaxed: return "FifoRelaxed"; + case PresentModeKHR::eSharedDemandRefresh: return "SharedDemandRefresh"; + case PresentModeKHR::eSharedContinuousRefresh: return "SharedContinuousRefresh"; + default: return "invalid"; + } + } + + VULKAN_HPP_INLINE std::string to_string(ColorSpaceKHR value) + { + switch (value) + { + case ColorSpaceKHR::eSrgbNonlinear: return "SrgbNonlinear"; + case ColorSpaceKHR::eDisplayP3NonlinearEXT: return "DisplayP3NonlinearEXT"; + case ColorSpaceKHR::eExtendedSrgbLinearEXT: return "ExtendedSrgbLinearEXT"; + case ColorSpaceKHR::eDciP3LinearEXT: return "DciP3LinearEXT"; + case ColorSpaceKHR::eDciP3NonlinearEXT: return "DciP3NonlinearEXT"; + case ColorSpaceKHR::eBt709LinearEXT: return "Bt709LinearEXT"; + case ColorSpaceKHR::eBt709NonlinearEXT: return "Bt709NonlinearEXT"; + case ColorSpaceKHR::eBt2020LinearEXT: return "Bt2020LinearEXT"; + case ColorSpaceKHR::eHdr10St2084EXT: return "Hdr10St2084EXT"; + case ColorSpaceKHR::eDolbyvisionEXT: return "DolbyvisionEXT"; + case ColorSpaceKHR::eHdr10HlgEXT: return "Hdr10HlgEXT"; + case ColorSpaceKHR::eAdobergbLinearEXT: return "AdobergbLinearEXT"; + case ColorSpaceKHR::eAdobergbNonlinearEXT: return "AdobergbNonlinearEXT"; + case ColorSpaceKHR::ePassThroughEXT: return "PassThroughEXT"; + case ColorSpaceKHR::eExtendedSrgbNonlinearEXT: return "ExtendedSrgbNonlinearEXT"; + default: return "invalid"; + } + } + + VULKAN_HPP_INLINE std::string to_string(DisplayPlaneAlphaFlagBitsKHR value) + { + switch (value) + { + case DisplayPlaneAlphaFlagBitsKHR::eOpaque: return "Opaque"; + case DisplayPlaneAlphaFlagBitsKHR::eGlobal: return "Global"; + case DisplayPlaneAlphaFlagBitsKHR::ePerPixel: return "PerPixel"; + case DisplayPlaneAlphaFlagBitsKHR::ePerPixelPremultiplied: return "PerPixelPremultiplied"; + default: return "invalid"; + } + } + + VULKAN_HPP_INLINE std::string to_string(DisplayPlaneAlphaFlagsKHR value) + { + if (!value) return "{}"; + std::string result; + if (value & DisplayPlaneAlphaFlagBitsKHR::eOpaque) result += "Opaque | "; + if (value & DisplayPlaneAlphaFlagBitsKHR::eGlobal) result += "Global | "; + if (value & DisplayPlaneAlphaFlagBitsKHR::ePerPixel) result += "PerPixel | "; + if (value & DisplayPlaneAlphaFlagBitsKHR::ePerPixelPremultiplied) result += "PerPixelPremultiplied | "; + return "{" + result.substr(0, result.size() - 3) + "}"; + } + + VULKAN_HPP_INLINE std::string to_string(CompositeAlphaFlagBitsKHR value) + { + switch (value) + { + case CompositeAlphaFlagBitsKHR::eOpaque: return "Opaque"; + case CompositeAlphaFlagBitsKHR::ePreMultiplied: return "PreMultiplied"; + case CompositeAlphaFlagBitsKHR::ePostMultiplied: return "PostMultiplied"; + case CompositeAlphaFlagBitsKHR::eInherit: return "Inherit"; + default: return "invalid"; + } + } + + VULKAN_HPP_INLINE std::string to_string(CompositeAlphaFlagsKHR value) + { + if (!value) return "{}"; + std::string result; + if (value & CompositeAlphaFlagBitsKHR::eOpaque) result += "Opaque | "; + if (value & CompositeAlphaFlagBitsKHR::ePreMultiplied) result += "PreMultiplied | "; + if (value & CompositeAlphaFlagBitsKHR::ePostMultiplied) result += "PostMultiplied | "; + if (value & CompositeAlphaFlagBitsKHR::eInherit) result += "Inherit | "; + return "{" + result.substr(0, result.size() - 3) + "}"; + } + + VULKAN_HPP_INLINE std::string to_string(SurfaceTransformFlagBitsKHR value) + { + switch (value) + { + case SurfaceTransformFlagBitsKHR::eIdentity: return "Identity"; + case SurfaceTransformFlagBitsKHR::eRotate90: return "Rotate90"; + case SurfaceTransformFlagBitsKHR::eRotate180: return "Rotate180"; + case SurfaceTransformFlagBitsKHR::eRotate270: return "Rotate270"; + case SurfaceTransformFlagBitsKHR::eHorizontalMirror: return "HorizontalMirror"; + case SurfaceTransformFlagBitsKHR::eHorizontalMirrorRotate90: return "HorizontalMirrorRotate90"; + case SurfaceTransformFlagBitsKHR::eHorizontalMirrorRotate180: return "HorizontalMirrorRotate180"; + case SurfaceTransformFlagBitsKHR::eHorizontalMirrorRotate270: return "HorizontalMirrorRotate270"; + case SurfaceTransformFlagBitsKHR::eInherit: return "Inherit"; + default: return "invalid"; + } + } + + VULKAN_HPP_INLINE std::string to_string(SurfaceTransformFlagsKHR value) + { + if (!value) return "{}"; + std::string result; + if (value & SurfaceTransformFlagBitsKHR::eIdentity) result += "Identity | "; + if (value & SurfaceTransformFlagBitsKHR::eRotate90) result += "Rotate90 | "; + if (value & SurfaceTransformFlagBitsKHR::eRotate180) result += "Rotate180 | "; + if (value & SurfaceTransformFlagBitsKHR::eRotate270) result += "Rotate270 | "; + if (value & SurfaceTransformFlagBitsKHR::eHorizontalMirror) result += "HorizontalMirror | "; + if (value & SurfaceTransformFlagBitsKHR::eHorizontalMirrorRotate90) result += "HorizontalMirrorRotate90 | "; + if (value & SurfaceTransformFlagBitsKHR::eHorizontalMirrorRotate180) result += "HorizontalMirrorRotate180 | "; + if (value & SurfaceTransformFlagBitsKHR::eHorizontalMirrorRotate270) result += "HorizontalMirrorRotate270 | "; + if (value & SurfaceTransformFlagBitsKHR::eInherit) result += "Inherit | "; + return "{" + result.substr(0, result.size() - 3) + "}"; + } + + VULKAN_HPP_INLINE std::string to_string(TimeDomainEXT value) + { + switch (value) + { + case TimeDomainEXT::eDevice: return "Device"; + case TimeDomainEXT::eClockMonotonic: return "ClockMonotonic"; + case TimeDomainEXT::eClockMonotonicRaw: return "ClockMonotonicRaw"; + case TimeDomainEXT::eQueryPerformanceCounter: return "QueryPerformanceCounter"; + default: return "invalid"; + } + } + + VULKAN_HPP_INLINE std::string to_string(DebugReportFlagBitsEXT value) + { + switch (value) + { + case DebugReportFlagBitsEXT::eInformation: return "Information"; + case DebugReportFlagBitsEXT::eWarning: return "Warning"; + case DebugReportFlagBitsEXT::ePerformanceWarning: return "PerformanceWarning"; + case DebugReportFlagBitsEXT::eError: return "Error"; + case DebugReportFlagBitsEXT::eDebug: return "Debug"; + default: return "invalid"; + } + } + + VULKAN_HPP_INLINE std::string to_string(DebugReportFlagsEXT value) + { + if (!value) return "{}"; + std::string result; + if (value & DebugReportFlagBitsEXT::eInformation) result += "Information | "; + if (value & DebugReportFlagBitsEXT::eWarning) result += "Warning | "; + if (value & DebugReportFlagBitsEXT::ePerformanceWarning) result += "PerformanceWarning | "; + if (value & DebugReportFlagBitsEXT::eError) result += "Error | "; + if (value & DebugReportFlagBitsEXT::eDebug) result += "Debug | "; + return "{" + result.substr(0, result.size() - 3) + "}"; + } + + VULKAN_HPP_INLINE std::string to_string(DebugReportObjectTypeEXT value) + { + switch (value) + { + case DebugReportObjectTypeEXT::eUnknown: return "Unknown"; + case DebugReportObjectTypeEXT::eInstance: return "Instance"; + case DebugReportObjectTypeEXT::ePhysicalDevice: return "PhysicalDevice"; + case DebugReportObjectTypeEXT::eDevice: return "Device"; + case DebugReportObjectTypeEXT::eQueue: return "Queue"; + case DebugReportObjectTypeEXT::eSemaphore: return "Semaphore"; + case DebugReportObjectTypeEXT::eCommandBuffer: return "CommandBuffer"; + case DebugReportObjectTypeEXT::eFence: return "Fence"; + case DebugReportObjectTypeEXT::eDeviceMemory: return "DeviceMemory"; + case DebugReportObjectTypeEXT::eBuffer: return "Buffer"; + case DebugReportObjectTypeEXT::eImage: return "Image"; + case DebugReportObjectTypeEXT::eEvent: return "Event"; + case DebugReportObjectTypeEXT::eQueryPool: return "QueryPool"; + case DebugReportObjectTypeEXT::eBufferView: return "BufferView"; + case DebugReportObjectTypeEXT::eImageView: return "ImageView"; + case DebugReportObjectTypeEXT::eShaderModule: return "ShaderModule"; + case DebugReportObjectTypeEXT::ePipelineCache: return "PipelineCache"; + case DebugReportObjectTypeEXT::ePipelineLayout: return "PipelineLayout"; + case DebugReportObjectTypeEXT::eRenderPass: return "RenderPass"; + case DebugReportObjectTypeEXT::ePipeline: return "Pipeline"; + case DebugReportObjectTypeEXT::eDescriptorSetLayout: return "DescriptorSetLayout"; + case DebugReportObjectTypeEXT::eSampler: return "Sampler"; + case DebugReportObjectTypeEXT::eDescriptorPool: return "DescriptorPool"; + case DebugReportObjectTypeEXT::eDescriptorSet: return "DescriptorSet"; + case DebugReportObjectTypeEXT::eFramebuffer: return "Framebuffer"; + case DebugReportObjectTypeEXT::eCommandPool: return "CommandPool"; + case DebugReportObjectTypeEXT::eSurfaceKhr: return "SurfaceKhr"; + case DebugReportObjectTypeEXT::eSwapchainKhr: return "SwapchainKhr"; + case DebugReportObjectTypeEXT::eDebugReportCallbackExt: return "DebugReportCallbackExt"; + case DebugReportObjectTypeEXT::eDisplayKhr: return "DisplayKhr"; + case DebugReportObjectTypeEXT::eDisplayModeKhr: return "DisplayModeKhr"; + case DebugReportObjectTypeEXT::eObjectTableNvx: return "ObjectTableNvx"; + case DebugReportObjectTypeEXT::eIndirectCommandsLayoutNvx: return "IndirectCommandsLayoutNvx"; + case DebugReportObjectTypeEXT::eValidationCacheExt: return "ValidationCacheExt"; + case DebugReportObjectTypeEXT::eSamplerYcbcrConversion: return "SamplerYcbcrConversion"; + case DebugReportObjectTypeEXT::eDescriptorUpdateTemplate: return "DescriptorUpdateTemplate"; + case DebugReportObjectTypeEXT::eAccelerationStructureNV: return "AccelerationStructureNV"; + default: return "invalid"; + } + } + + VULKAN_HPP_INLINE std::string to_string(RasterizationOrderAMD value) + { + switch (value) + { + case RasterizationOrderAMD::eStrict: return "Strict"; + case RasterizationOrderAMD::eRelaxed: return "Relaxed"; + default: return "invalid"; + } + } + + VULKAN_HPP_INLINE std::string to_string(ExternalMemoryHandleTypeFlagBitsNV value) + { + switch (value) + { + case ExternalMemoryHandleTypeFlagBitsNV::eOpaqueWin32: return "OpaqueWin32"; + case ExternalMemoryHandleTypeFlagBitsNV::eOpaqueWin32Kmt: return "OpaqueWin32Kmt"; + case ExternalMemoryHandleTypeFlagBitsNV::eD3D11Image: return "D3D11Image"; + case ExternalMemoryHandleTypeFlagBitsNV::eD3D11ImageKmt: return "D3D11ImageKmt"; + default: return "invalid"; + } + } + + VULKAN_HPP_INLINE std::string to_string(ExternalMemoryHandleTypeFlagsNV value) + { + if (!value) return "{}"; + std::string result; + if (value & ExternalMemoryHandleTypeFlagBitsNV::eOpaqueWin32) result += "OpaqueWin32 | "; + if (value & ExternalMemoryHandleTypeFlagBitsNV::eOpaqueWin32Kmt) result += "OpaqueWin32Kmt | "; + if (value & ExternalMemoryHandleTypeFlagBitsNV::eD3D11Image) result += "D3D11Image | "; + if (value & ExternalMemoryHandleTypeFlagBitsNV::eD3D11ImageKmt) result += "D3D11ImageKmt | "; + return "{" + result.substr(0, result.size() - 3) + "}"; + } + + VULKAN_HPP_INLINE std::string to_string(ExternalMemoryFeatureFlagBitsNV value) + { + switch (value) + { + case ExternalMemoryFeatureFlagBitsNV::eDedicatedOnly: return "DedicatedOnly"; + case ExternalMemoryFeatureFlagBitsNV::eExportable: return "Exportable"; + case ExternalMemoryFeatureFlagBitsNV::eImportable: return "Importable"; + default: return "invalid"; + } + } + + VULKAN_HPP_INLINE std::string to_string(ExternalMemoryFeatureFlagsNV value) + { + if (!value) return "{}"; + std::string result; + if (value & ExternalMemoryFeatureFlagBitsNV::eDedicatedOnly) result += "DedicatedOnly | "; + if (value & ExternalMemoryFeatureFlagBitsNV::eExportable) result += "Exportable | "; + if (value & ExternalMemoryFeatureFlagBitsNV::eImportable) result += "Importable | "; + return "{" + result.substr(0, result.size() - 3) + "}"; + } + + VULKAN_HPP_INLINE std::string to_string(ValidationCheckEXT value) + { + switch (value) + { + case ValidationCheckEXT::eAll: return "All"; + case ValidationCheckEXT::eShaders: return "Shaders"; + default: return "invalid"; + } + } + + VULKAN_HPP_INLINE std::string to_string(SubgroupFeatureFlagBits value) + { + switch (value) + { + case SubgroupFeatureFlagBits::eBasic: return "Basic"; + case SubgroupFeatureFlagBits::eVote: return "Vote"; + case SubgroupFeatureFlagBits::eArithmetic: return "Arithmetic"; + case SubgroupFeatureFlagBits::eBallot: return "Ballot"; + case SubgroupFeatureFlagBits::eShuffle: return "Shuffle"; + case SubgroupFeatureFlagBits::eShuffleRelative: return "ShuffleRelative"; + case SubgroupFeatureFlagBits::eClustered: return "Clustered"; + case SubgroupFeatureFlagBits::eQuad: return "Quad"; + case SubgroupFeatureFlagBits::ePartitionedNV: return "PartitionedNV"; + default: return "invalid"; + } + } + + VULKAN_HPP_INLINE std::string to_string(SubgroupFeatureFlags value) + { + if (!value) return "{}"; + std::string result; + if (value & SubgroupFeatureFlagBits::eBasic) result += "Basic | "; + if (value & SubgroupFeatureFlagBits::eVote) result += "Vote | "; + if (value & SubgroupFeatureFlagBits::eArithmetic) result += "Arithmetic | "; + if (value & SubgroupFeatureFlagBits::eBallot) result += "Ballot | "; + if (value & SubgroupFeatureFlagBits::eShuffle) result += "Shuffle | "; + if (value & SubgroupFeatureFlagBits::eShuffleRelative) result += "ShuffleRelative | "; + if (value & SubgroupFeatureFlagBits::eClustered) result += "Clustered | "; + if (value & SubgroupFeatureFlagBits::eQuad) result += "Quad | "; + if (value & SubgroupFeatureFlagBits::ePartitionedNV) result += "PartitionedNV | "; + return "{" + result.substr(0, result.size() - 3) + "}"; + } + + VULKAN_HPP_INLINE std::string to_string(IndirectCommandsLayoutUsageFlagBitsNVX value) + { + switch (value) + { + case IndirectCommandsLayoutUsageFlagBitsNVX::eUnorderedSequences: return "UnorderedSequences"; + case IndirectCommandsLayoutUsageFlagBitsNVX::eSparseSequences: return "SparseSequences"; + case IndirectCommandsLayoutUsageFlagBitsNVX::eEmptyExecutions: return "EmptyExecutions"; + case IndirectCommandsLayoutUsageFlagBitsNVX::eIndexedSequences: return "IndexedSequences"; + default: return "invalid"; + } + } + + VULKAN_HPP_INLINE std::string to_string(IndirectCommandsLayoutUsageFlagsNVX value) + { + if (!value) return "{}"; + std::string result; + if (value & IndirectCommandsLayoutUsageFlagBitsNVX::eUnorderedSequences) result += "UnorderedSequences | "; + if (value & IndirectCommandsLayoutUsageFlagBitsNVX::eSparseSequences) result += "SparseSequences | "; + if (value & IndirectCommandsLayoutUsageFlagBitsNVX::eEmptyExecutions) result += "EmptyExecutions | "; + if (value & IndirectCommandsLayoutUsageFlagBitsNVX::eIndexedSequences) result += "IndexedSequences | "; + return "{" + result.substr(0, result.size() - 3) + "}"; + } + + VULKAN_HPP_INLINE std::string to_string(ObjectEntryUsageFlagBitsNVX value) + { + switch (value) + { + case ObjectEntryUsageFlagBitsNVX::eGraphics: return "Graphics"; + case ObjectEntryUsageFlagBitsNVX::eCompute: return "Compute"; + default: return "invalid"; + } + } + + VULKAN_HPP_INLINE std::string to_string(ObjectEntryUsageFlagsNVX value) + { + if (!value) return "{}"; + std::string result; + if (value & ObjectEntryUsageFlagBitsNVX::eGraphics) result += "Graphics | "; + if (value & ObjectEntryUsageFlagBitsNVX::eCompute) result += "Compute | "; + return "{" + result.substr(0, result.size() - 3) + "}"; + } + + VULKAN_HPP_INLINE std::string to_string(IndirectCommandsTokenTypeNVX value) + { + switch (value) + { + case IndirectCommandsTokenTypeNVX::ePipeline: return "Pipeline"; + case IndirectCommandsTokenTypeNVX::eDescriptorSet: return "DescriptorSet"; + case IndirectCommandsTokenTypeNVX::eIndexBuffer: return "IndexBuffer"; + case IndirectCommandsTokenTypeNVX::eVertexBuffer: return "VertexBuffer"; + case IndirectCommandsTokenTypeNVX::ePushConstant: return "PushConstant"; + case IndirectCommandsTokenTypeNVX::eDrawIndexed: return "DrawIndexed"; + case IndirectCommandsTokenTypeNVX::eDraw: return "Draw"; + case IndirectCommandsTokenTypeNVX::eDispatch: return "Dispatch"; + default: return "invalid"; + } + } + + VULKAN_HPP_INLINE std::string to_string(ObjectEntryTypeNVX value) + { + switch (value) + { + case ObjectEntryTypeNVX::eDescriptorSet: return "DescriptorSet"; + case ObjectEntryTypeNVX::ePipeline: return "Pipeline"; + case ObjectEntryTypeNVX::eIndexBuffer: return "IndexBuffer"; + case ObjectEntryTypeNVX::eVertexBuffer: return "VertexBuffer"; + case ObjectEntryTypeNVX::ePushConstant: return "PushConstant"; + default: return "invalid"; + } + } + + VULKAN_HPP_INLINE std::string to_string(DescriptorSetLayoutCreateFlagBits value) + { + switch (value) + { + case DescriptorSetLayoutCreateFlagBits::ePushDescriptorKHR: return "PushDescriptorKHR"; + case DescriptorSetLayoutCreateFlagBits::eUpdateAfterBindPoolEXT: return "UpdateAfterBindPoolEXT"; + default: return "invalid"; + } + } + + VULKAN_HPP_INLINE std::string to_string(DescriptorSetLayoutCreateFlags value) + { + if (!value) return "{}"; + std::string result; + if (value & DescriptorSetLayoutCreateFlagBits::ePushDescriptorKHR) result += "PushDescriptorKHR | "; + if (value & DescriptorSetLayoutCreateFlagBits::eUpdateAfterBindPoolEXT) result += "UpdateAfterBindPoolEXT | "; + return "{" + result.substr(0, result.size() - 3) + "}"; + } + + VULKAN_HPP_INLINE std::string to_string(ExternalMemoryHandleTypeFlagBits value) + { + switch (value) + { + case ExternalMemoryHandleTypeFlagBits::eOpaqueFd: return "OpaqueFd"; + case ExternalMemoryHandleTypeFlagBits::eOpaqueWin32: return "OpaqueWin32"; + case ExternalMemoryHandleTypeFlagBits::eOpaqueWin32Kmt: return "OpaqueWin32Kmt"; + case ExternalMemoryHandleTypeFlagBits::eD3D11Texture: return "D3D11Texture"; + case ExternalMemoryHandleTypeFlagBits::eD3D11TextureKmt: return "D3D11TextureKmt"; + case ExternalMemoryHandleTypeFlagBits::eD3D12Heap: return "D3D12Heap"; + case ExternalMemoryHandleTypeFlagBits::eD3D12Resource: return "D3D12Resource"; + case ExternalMemoryHandleTypeFlagBits::eDmaBufEXT: return "DmaBufEXT"; + case ExternalMemoryHandleTypeFlagBits::eAndroidHardwareBufferANDROID: return "AndroidHardwareBufferANDROID"; + case ExternalMemoryHandleTypeFlagBits::eHostAllocationEXT: return "HostAllocationEXT"; + case ExternalMemoryHandleTypeFlagBits::eHostMappedForeignMemoryEXT: return "HostMappedForeignMemoryEXT"; + default: return "invalid"; + } + } + + VULKAN_HPP_INLINE std::string to_string(ExternalMemoryHandleTypeFlags value) + { + if (!value) return "{}"; + std::string result; + if (value & ExternalMemoryHandleTypeFlagBits::eOpaqueFd) result += "OpaqueFd | "; + if (value & ExternalMemoryHandleTypeFlagBits::eOpaqueWin32) result += "OpaqueWin32 | "; + if (value & ExternalMemoryHandleTypeFlagBits::eOpaqueWin32Kmt) result += "OpaqueWin32Kmt | "; + if (value & ExternalMemoryHandleTypeFlagBits::eD3D11Texture) result += "D3D11Texture | "; + if (value & ExternalMemoryHandleTypeFlagBits::eD3D11TextureKmt) result += "D3D11TextureKmt | "; + if (value & ExternalMemoryHandleTypeFlagBits::eD3D12Heap) result += "D3D12Heap | "; + if (value & ExternalMemoryHandleTypeFlagBits::eD3D12Resource) result += "D3D12Resource | "; + if (value & ExternalMemoryHandleTypeFlagBits::eDmaBufEXT) result += "DmaBufEXT | "; + if (value & ExternalMemoryHandleTypeFlagBits::eAndroidHardwareBufferANDROID) result += "AndroidHardwareBufferANDROID | "; + if (value & ExternalMemoryHandleTypeFlagBits::eHostAllocationEXT) result += "HostAllocationEXT | "; + if (value & ExternalMemoryHandleTypeFlagBits::eHostMappedForeignMemoryEXT) result += "HostMappedForeignMemoryEXT | "; + return "{" + result.substr(0, result.size() - 3) + "}"; + } + + VULKAN_HPP_INLINE std::string to_string(ExternalMemoryFeatureFlagBits value) + { + switch (value) + { + case ExternalMemoryFeatureFlagBits::eDedicatedOnly: return "DedicatedOnly"; + case ExternalMemoryFeatureFlagBits::eExportable: return "Exportable"; + case ExternalMemoryFeatureFlagBits::eImportable: return "Importable"; + default: return "invalid"; + } + } + + VULKAN_HPP_INLINE std::string to_string(ExternalMemoryFeatureFlags value) + { + if (!value) return "{}"; + std::string result; + if (value & ExternalMemoryFeatureFlagBits::eDedicatedOnly) result += "DedicatedOnly | "; + if (value & ExternalMemoryFeatureFlagBits::eExportable) result += "Exportable | "; + if (value & ExternalMemoryFeatureFlagBits::eImportable) result += "Importable | "; + return "{" + result.substr(0, result.size() - 3) + "}"; + } + + VULKAN_HPP_INLINE std::string to_string(ExternalSemaphoreHandleTypeFlagBits value) + { + switch (value) + { + case ExternalSemaphoreHandleTypeFlagBits::eOpaqueFd: return "OpaqueFd"; + case ExternalSemaphoreHandleTypeFlagBits::eOpaqueWin32: return "OpaqueWin32"; + case ExternalSemaphoreHandleTypeFlagBits::eOpaqueWin32Kmt: return "OpaqueWin32Kmt"; + case ExternalSemaphoreHandleTypeFlagBits::eD3D12Fence: return "D3D12Fence"; + case ExternalSemaphoreHandleTypeFlagBits::eSyncFd: return "SyncFd"; + default: return "invalid"; + } + } + + VULKAN_HPP_INLINE std::string to_string(ExternalSemaphoreHandleTypeFlags value) + { + if (!value) return "{}"; + std::string result; + if (value & ExternalSemaphoreHandleTypeFlagBits::eOpaqueFd) result += "OpaqueFd | "; + if (value & ExternalSemaphoreHandleTypeFlagBits::eOpaqueWin32) result += "OpaqueWin32 | "; + if (value & ExternalSemaphoreHandleTypeFlagBits::eOpaqueWin32Kmt) result += "OpaqueWin32Kmt | "; + if (value & ExternalSemaphoreHandleTypeFlagBits::eD3D12Fence) result += "D3D12Fence | "; + if (value & ExternalSemaphoreHandleTypeFlagBits::eSyncFd) result += "SyncFd | "; + return "{" + result.substr(0, result.size() - 3) + "}"; + } + + VULKAN_HPP_INLINE std::string to_string(ExternalSemaphoreFeatureFlagBits value) + { + switch (value) + { + case ExternalSemaphoreFeatureFlagBits::eExportable: return "Exportable"; + case ExternalSemaphoreFeatureFlagBits::eImportable: return "Importable"; + default: return "invalid"; + } + } + + VULKAN_HPP_INLINE std::string to_string(ExternalSemaphoreFeatureFlags value) + { + if (!value) return "{}"; + std::string result; + if (value & ExternalSemaphoreFeatureFlagBits::eExportable) result += "Exportable | "; + if (value & ExternalSemaphoreFeatureFlagBits::eImportable) result += "Importable | "; + return "{" + result.substr(0, result.size() - 3) + "}"; + } + + VULKAN_HPP_INLINE std::string to_string(SemaphoreImportFlagBits value) + { + switch (value) + { + case SemaphoreImportFlagBits::eTemporary: return "Temporary"; + default: return "invalid"; + } + } + + VULKAN_HPP_INLINE std::string to_string(SemaphoreImportFlags value) + { + if (!value) return "{}"; + std::string result; + if (value & SemaphoreImportFlagBits::eTemporary) result += "Temporary | "; + return "{" + result.substr(0, result.size() - 3) + "}"; + } + + VULKAN_HPP_INLINE std::string to_string(ExternalFenceHandleTypeFlagBits value) + { + switch (value) + { + case ExternalFenceHandleTypeFlagBits::eOpaqueFd: return "OpaqueFd"; + case ExternalFenceHandleTypeFlagBits::eOpaqueWin32: return "OpaqueWin32"; + case ExternalFenceHandleTypeFlagBits::eOpaqueWin32Kmt: return "OpaqueWin32Kmt"; + case ExternalFenceHandleTypeFlagBits::eSyncFd: return "SyncFd"; + default: return "invalid"; + } + } + + VULKAN_HPP_INLINE std::string to_string(ExternalFenceHandleTypeFlags value) + { + if (!value) return "{}"; + std::string result; + if (value & ExternalFenceHandleTypeFlagBits::eOpaqueFd) result += "OpaqueFd | "; + if (value & ExternalFenceHandleTypeFlagBits::eOpaqueWin32) result += "OpaqueWin32 | "; + if (value & ExternalFenceHandleTypeFlagBits::eOpaqueWin32Kmt) result += "OpaqueWin32Kmt | "; + if (value & ExternalFenceHandleTypeFlagBits::eSyncFd) result += "SyncFd | "; + return "{" + result.substr(0, result.size() - 3) + "}"; + } + + VULKAN_HPP_INLINE std::string to_string(ExternalFenceFeatureFlagBits value) + { + switch (value) + { + case ExternalFenceFeatureFlagBits::eExportable: return "Exportable"; + case ExternalFenceFeatureFlagBits::eImportable: return "Importable"; + default: return "invalid"; + } + } + + VULKAN_HPP_INLINE std::string to_string(ExternalFenceFeatureFlags value) + { + if (!value) return "{}"; + std::string result; + if (value & ExternalFenceFeatureFlagBits::eExportable) result += "Exportable | "; + if (value & ExternalFenceFeatureFlagBits::eImportable) result += "Importable | "; + return "{" + result.substr(0, result.size() - 3) + "}"; + } + + VULKAN_HPP_INLINE std::string to_string(FenceImportFlagBits value) + { + switch (value) + { + case FenceImportFlagBits::eTemporary: return "Temporary"; + default: return "invalid"; + } + } + + VULKAN_HPP_INLINE std::string to_string(FenceImportFlags value) + { + if (!value) return "{}"; + std::string result; + if (value & FenceImportFlagBits::eTemporary) result += "Temporary | "; + return "{" + result.substr(0, result.size() - 3) + "}"; + } + + VULKAN_HPP_INLINE std::string to_string(SurfaceCounterFlagBitsEXT value) + { + switch (value) + { + case SurfaceCounterFlagBitsEXT::eVblank: return "Vblank"; + default: return "invalid"; + } + } + + VULKAN_HPP_INLINE std::string to_string(SurfaceCounterFlagsEXT value) + { + if (!value) return "{}"; + std::string result; + if (value & SurfaceCounterFlagBitsEXT::eVblank) result += "Vblank | "; + return "{" + result.substr(0, result.size() - 3) + "}"; + } + + VULKAN_HPP_INLINE std::string to_string(DisplayPowerStateEXT value) + { + switch (value) + { + case DisplayPowerStateEXT::eOff: return "Off"; + case DisplayPowerStateEXT::eSuspend: return "Suspend"; + case DisplayPowerStateEXT::eOn: return "On"; + default: return "invalid"; + } + } + + VULKAN_HPP_INLINE std::string to_string(DeviceEventTypeEXT value) + { + switch (value) + { + case DeviceEventTypeEXT::eDisplayHotplug: return "DisplayHotplug"; + default: return "invalid"; + } + } + + VULKAN_HPP_INLINE std::string to_string(DisplayEventTypeEXT value) + { + switch (value) + { + case DisplayEventTypeEXT::eFirstPixelOut: return "FirstPixelOut"; + default: return "invalid"; + } + } + + VULKAN_HPP_INLINE std::string to_string(PeerMemoryFeatureFlagBits value) + { + switch (value) + { + case PeerMemoryFeatureFlagBits::eCopySrc: return "CopySrc"; + case PeerMemoryFeatureFlagBits::eCopyDst: return "CopyDst"; + case PeerMemoryFeatureFlagBits::eGenericSrc: return "GenericSrc"; + case PeerMemoryFeatureFlagBits::eGenericDst: return "GenericDst"; + default: return "invalid"; + } + } + + VULKAN_HPP_INLINE std::string to_string(PeerMemoryFeatureFlags value) + { + if (!value) return "{}"; + std::string result; + if (value & PeerMemoryFeatureFlagBits::eCopySrc) result += "CopySrc | "; + if (value & PeerMemoryFeatureFlagBits::eCopyDst) result += "CopyDst | "; + if (value & PeerMemoryFeatureFlagBits::eGenericSrc) result += "GenericSrc | "; + if (value & PeerMemoryFeatureFlagBits::eGenericDst) result += "GenericDst | "; + return "{" + result.substr(0, result.size() - 3) + "}"; + } + + VULKAN_HPP_INLINE std::string to_string(MemoryAllocateFlagBits value) + { + switch (value) + { + case MemoryAllocateFlagBits::eDeviceMask: return "DeviceMask"; + default: return "invalid"; + } + } + + VULKAN_HPP_INLINE std::string to_string(MemoryAllocateFlags value) + { + if (!value) return "{}"; + std::string result; + if (value & MemoryAllocateFlagBits::eDeviceMask) result += "DeviceMask | "; + return "{" + result.substr(0, result.size() - 3) + "}"; + } + + VULKAN_HPP_INLINE std::string to_string(DeviceGroupPresentModeFlagBitsKHR value) + { + switch (value) + { + case DeviceGroupPresentModeFlagBitsKHR::eLocal: return "Local"; + case DeviceGroupPresentModeFlagBitsKHR::eRemote: return "Remote"; + case DeviceGroupPresentModeFlagBitsKHR::eSum: return "Sum"; + case DeviceGroupPresentModeFlagBitsKHR::eLocalMultiDevice: return "LocalMultiDevice"; + default: return "invalid"; + } + } + + VULKAN_HPP_INLINE std::string to_string(DeviceGroupPresentModeFlagsKHR value) + { + if (!value) return "{}"; + std::string result; + if (value & DeviceGroupPresentModeFlagBitsKHR::eLocal) result += "Local | "; + if (value & DeviceGroupPresentModeFlagBitsKHR::eRemote) result += "Remote | "; + if (value & DeviceGroupPresentModeFlagBitsKHR::eSum) result += "Sum | "; + if (value & DeviceGroupPresentModeFlagBitsKHR::eLocalMultiDevice) result += "LocalMultiDevice | "; + return "{" + result.substr(0, result.size() - 3) + "}"; + } + + VULKAN_HPP_INLINE std::string to_string(SwapchainCreateFlagBitsKHR value) + { + switch (value) + { + case SwapchainCreateFlagBitsKHR::eSplitInstanceBindRegions: return "SplitInstanceBindRegions"; + case SwapchainCreateFlagBitsKHR::eProtected: return "Protected"; + default: return "invalid"; + } + } + + VULKAN_HPP_INLINE std::string to_string(SwapchainCreateFlagsKHR value) + { + if (!value) return "{}"; + std::string result; + if (value & SwapchainCreateFlagBitsKHR::eSplitInstanceBindRegions) result += "SplitInstanceBindRegions | "; + if (value & SwapchainCreateFlagBitsKHR::eProtected) result += "Protected | "; + return "{" + result.substr(0, result.size() - 3) + "}"; + } + + VULKAN_HPP_INLINE std::string to_string(ViewportCoordinateSwizzleNV value) + { + switch (value) + { + case ViewportCoordinateSwizzleNV::ePositiveX: return "PositiveX"; + case ViewportCoordinateSwizzleNV::eNegativeX: return "NegativeX"; + case ViewportCoordinateSwizzleNV::ePositiveY: return "PositiveY"; + case ViewportCoordinateSwizzleNV::eNegativeY: return "NegativeY"; + case ViewportCoordinateSwizzleNV::ePositiveZ: return "PositiveZ"; + case ViewportCoordinateSwizzleNV::eNegativeZ: return "NegativeZ"; + case ViewportCoordinateSwizzleNV::ePositiveW: return "PositiveW"; + case ViewportCoordinateSwizzleNV::eNegativeW: return "NegativeW"; + default: return "invalid"; + } + } + + VULKAN_HPP_INLINE std::string to_string(DiscardRectangleModeEXT value) + { + switch (value) + { + case DiscardRectangleModeEXT::eInclusive: return "Inclusive"; + case DiscardRectangleModeEXT::eExclusive: return "Exclusive"; + default: return "invalid"; + } + } + + VULKAN_HPP_INLINE std::string to_string(SubpassDescriptionFlagBits value) + { + switch (value) + { + case SubpassDescriptionFlagBits::ePerViewAttributesNVX: return "PerViewAttributesNVX"; + case SubpassDescriptionFlagBits::ePerViewPositionXOnlyNVX: return "PerViewPositionXOnlyNVX"; + default: return "invalid"; + } + } + + VULKAN_HPP_INLINE std::string to_string(SubpassDescriptionFlags value) + { + if (!value) return "{}"; + std::string result; + if (value & SubpassDescriptionFlagBits::ePerViewAttributesNVX) result += "PerViewAttributesNVX | "; + if (value & SubpassDescriptionFlagBits::ePerViewPositionXOnlyNVX) result += "PerViewPositionXOnlyNVX | "; + return "{" + result.substr(0, result.size() - 3) + "}"; + } + + VULKAN_HPP_INLINE std::string to_string(PointClippingBehavior value) + { + switch (value) + { + case PointClippingBehavior::eAllClipPlanes: return "AllClipPlanes"; + case PointClippingBehavior::eUserClipPlanesOnly: return "UserClipPlanesOnly"; + default: return "invalid"; + } + } + + VULKAN_HPP_INLINE std::string to_string(SamplerReductionModeEXT value) + { + switch (value) + { + case SamplerReductionModeEXT::eWeightedAverage: return "WeightedAverage"; + case SamplerReductionModeEXT::eMin: return "Min"; + case SamplerReductionModeEXT::eMax: return "Max"; + default: return "invalid"; + } + } + + VULKAN_HPP_INLINE std::string to_string(TessellationDomainOrigin value) + { + switch (value) + { + case TessellationDomainOrigin::eUpperLeft: return "UpperLeft"; + case TessellationDomainOrigin::eLowerLeft: return "LowerLeft"; + default: return "invalid"; + } + } + + VULKAN_HPP_INLINE std::string to_string(SamplerYcbcrModelConversion value) + { + switch (value) + { + case SamplerYcbcrModelConversion::eRgbIdentity: return "RgbIdentity"; + case SamplerYcbcrModelConversion::eYcbcrIdentity: return "YcbcrIdentity"; + case SamplerYcbcrModelConversion::eYcbcr709: return "Ycbcr709"; + case SamplerYcbcrModelConversion::eYcbcr601: return "Ycbcr601"; + case SamplerYcbcrModelConversion::eYcbcr2020: return "Ycbcr2020"; + default: return "invalid"; + } + } + + VULKAN_HPP_INLINE std::string to_string(SamplerYcbcrRange value) + { + switch (value) + { + case SamplerYcbcrRange::eItuFull: return "ItuFull"; + case SamplerYcbcrRange::eItuNarrow: return "ItuNarrow"; + default: return "invalid"; + } + } + + VULKAN_HPP_INLINE std::string to_string(ChromaLocation value) + { + switch (value) + { + case ChromaLocation::eCositedEven: return "CositedEven"; + case ChromaLocation::eMidpoint: return "Midpoint"; + default: return "invalid"; + } + } + + VULKAN_HPP_INLINE std::string to_string(BlendOverlapEXT value) + { + switch (value) + { + case BlendOverlapEXT::eUncorrelated: return "Uncorrelated"; + case BlendOverlapEXT::eDisjoint: return "Disjoint"; + case BlendOverlapEXT::eConjoint: return "Conjoint"; + default: return "invalid"; + } + } + + VULKAN_HPP_INLINE std::string to_string(CoverageModulationModeNV value) + { + switch (value) + { + case CoverageModulationModeNV::eNone: return "None"; + case CoverageModulationModeNV::eRgb: return "Rgb"; + case CoverageModulationModeNV::eAlpha: return "Alpha"; + case CoverageModulationModeNV::eRgba: return "Rgba"; + default: return "invalid"; + } + } + + VULKAN_HPP_INLINE std::string to_string(ValidationCacheHeaderVersionEXT value) + { + switch (value) + { + case ValidationCacheHeaderVersionEXT::eOne: return "One"; + default: return "invalid"; + } + } + + VULKAN_HPP_INLINE std::string to_string(ShaderInfoTypeAMD value) + { + switch (value) + { + case ShaderInfoTypeAMD::eStatistics: return "Statistics"; + case ShaderInfoTypeAMD::eBinary: return "Binary"; + case ShaderInfoTypeAMD::eDisassembly: return "Disassembly"; + default: return "invalid"; + } + } + + VULKAN_HPP_INLINE std::string to_string(QueueGlobalPriorityEXT value) + { + switch (value) + { + case QueueGlobalPriorityEXT::eLow: return "Low"; + case QueueGlobalPriorityEXT::eMedium: return "Medium"; + case QueueGlobalPriorityEXT::eHigh: return "High"; + case QueueGlobalPriorityEXT::eRealtime: return "Realtime"; + default: return "invalid"; + } + } + + VULKAN_HPP_INLINE std::string to_string(DebugUtilsMessageSeverityFlagBitsEXT value) + { + switch (value) + { + case DebugUtilsMessageSeverityFlagBitsEXT::eVerbose: return "Verbose"; + case DebugUtilsMessageSeverityFlagBitsEXT::eInfo: return "Info"; + case DebugUtilsMessageSeverityFlagBitsEXT::eWarning: return "Warning"; + case DebugUtilsMessageSeverityFlagBitsEXT::eError: return "Error"; + default: return "invalid"; + } + } + + VULKAN_HPP_INLINE std::string to_string(DebugUtilsMessageSeverityFlagsEXT value) + { + if (!value) return "{}"; + std::string result; + if (value & DebugUtilsMessageSeverityFlagBitsEXT::eVerbose) result += "Verbose | "; + if (value & DebugUtilsMessageSeverityFlagBitsEXT::eInfo) result += "Info | "; + if (value & DebugUtilsMessageSeverityFlagBitsEXT::eWarning) result += "Warning | "; + if (value & DebugUtilsMessageSeverityFlagBitsEXT::eError) result += "Error | "; + return "{" + result.substr(0, result.size() - 3) + "}"; + } + + VULKAN_HPP_INLINE std::string to_string(DebugUtilsMessageTypeFlagBitsEXT value) + { + switch (value) + { + case DebugUtilsMessageTypeFlagBitsEXT::eGeneral: return "General"; + case DebugUtilsMessageTypeFlagBitsEXT::eValidation: return "Validation"; + case DebugUtilsMessageTypeFlagBitsEXT::ePerformance: return "Performance"; + default: return "invalid"; + } + } + + VULKAN_HPP_INLINE std::string to_string(DebugUtilsMessageTypeFlagsEXT value) + { + if (!value) return "{}"; + std::string result; + if (value & DebugUtilsMessageTypeFlagBitsEXT::eGeneral) result += "General | "; + if (value & DebugUtilsMessageTypeFlagBitsEXT::eValidation) result += "Validation | "; + if (value & DebugUtilsMessageTypeFlagBitsEXT::ePerformance) result += "Performance | "; + return "{" + result.substr(0, result.size() - 3) + "}"; + } + + VULKAN_HPP_INLINE std::string to_string(ConservativeRasterizationModeEXT value) + { + switch (value) + { + case ConservativeRasterizationModeEXT::eDisabled: return "Disabled"; + case ConservativeRasterizationModeEXT::eOverestimate: return "Overestimate"; + case ConservativeRasterizationModeEXT::eUnderestimate: return "Underestimate"; + default: return "invalid"; + } + } + + VULKAN_HPP_INLINE std::string to_string(DescriptorBindingFlagBitsEXT value) + { + switch (value) + { + case DescriptorBindingFlagBitsEXT::eUpdateAfterBind: return "UpdateAfterBind"; + case DescriptorBindingFlagBitsEXT::eUpdateUnusedWhilePending: return "UpdateUnusedWhilePending"; + case DescriptorBindingFlagBitsEXT::ePartiallyBound: return "PartiallyBound"; + case DescriptorBindingFlagBitsEXT::eVariableDescriptorCount: return "VariableDescriptorCount"; + default: return "invalid"; + } + } + + VULKAN_HPP_INLINE std::string to_string(DescriptorBindingFlagsEXT value) + { + if (!value) return "{}"; + std::string result; + if (value & DescriptorBindingFlagBitsEXT::eUpdateAfterBind) result += "UpdateAfterBind | "; + if (value & DescriptorBindingFlagBitsEXT::eUpdateUnusedWhilePending) result += "UpdateUnusedWhilePending | "; + if (value & DescriptorBindingFlagBitsEXT::ePartiallyBound) result += "PartiallyBound | "; + if (value & DescriptorBindingFlagBitsEXT::eVariableDescriptorCount) result += "VariableDescriptorCount | "; + return "{" + result.substr(0, result.size() - 3) + "}"; + } + + VULKAN_HPP_INLINE std::string to_string(VendorId value) + { + switch (value) + { + case VendorId::eViv: return "Viv"; + case VendorId::eVsi: return "Vsi"; + case VendorId::eKazan: return "Kazan"; + default: return "invalid"; + } + } + + VULKAN_HPP_INLINE std::string to_string(DriverIdKHR value) + { + switch (value) + { + case DriverIdKHR::eAmdProprietary: return "AmdProprietary"; + case DriverIdKHR::eAmdOpenSource: return "AmdOpenSource"; + case DriverIdKHR::eMesaRadv: return "MesaRadv"; + case DriverIdKHR::eNvidiaProprietary: return "NvidiaProprietary"; + case DriverIdKHR::eIntelProprietaryWindows: return "IntelProprietaryWindows"; + case DriverIdKHR::eIntelOpenSourceMesa: return "IntelOpenSourceMesa"; + case DriverIdKHR::eImaginationProprietary: return "ImaginationProprietary"; + case DriverIdKHR::eQualcommProprietary: return "QualcommProprietary"; + case DriverIdKHR::eArmProprietary: return "ArmProprietary"; + default: return "invalid"; + } + } + + VULKAN_HPP_INLINE std::string to_string(ConditionalRenderingFlagBitsEXT value) + { + switch (value) + { + case ConditionalRenderingFlagBitsEXT::eInverted: return "Inverted"; + default: return "invalid"; + } + } + + VULKAN_HPP_INLINE std::string to_string(ConditionalRenderingFlagsEXT value) + { + if (!value) return "{}"; + std::string result; + if (value & ConditionalRenderingFlagBitsEXT::eInverted) result += "Inverted | "; + return "{" + result.substr(0, result.size() - 3) + "}"; + } + + VULKAN_HPP_INLINE std::string to_string(ShadingRatePaletteEntryNV value) + { + switch (value) + { + case ShadingRatePaletteEntryNV::eNoInvocations: return "NoInvocations"; + case ShadingRatePaletteEntryNV::e16InvocationsPerPixel: return "16InvocationsPerPixel"; + case ShadingRatePaletteEntryNV::e8InvocationsPerPixel: return "8InvocationsPerPixel"; + case ShadingRatePaletteEntryNV::e4InvocationsPerPixel: return "4InvocationsPerPixel"; + case ShadingRatePaletteEntryNV::e2InvocationsPerPixel: return "2InvocationsPerPixel"; + case ShadingRatePaletteEntryNV::e1InvocationPerPixel: return "1InvocationPerPixel"; + case ShadingRatePaletteEntryNV::e1InvocationPer2X1Pixels: return "1InvocationPer2X1Pixels"; + case ShadingRatePaletteEntryNV::e1InvocationPer1X2Pixels: return "1InvocationPer1X2Pixels"; + case ShadingRatePaletteEntryNV::e1InvocationPer2X2Pixels: return "1InvocationPer2X2Pixels"; + case ShadingRatePaletteEntryNV::e1InvocationPer4X2Pixels: return "1InvocationPer4X2Pixels"; + case ShadingRatePaletteEntryNV::e1InvocationPer2X4Pixels: return "1InvocationPer2X4Pixels"; + case ShadingRatePaletteEntryNV::e1InvocationPer4X4Pixels: return "1InvocationPer4X4Pixels"; + default: return "invalid"; + } + } + + VULKAN_HPP_INLINE std::string to_string(CoarseSampleOrderTypeNV value) + { + switch (value) + { + case CoarseSampleOrderTypeNV::eDefault: return "Default"; + case CoarseSampleOrderTypeNV::eCustom: return "Custom"; + case CoarseSampleOrderTypeNV::ePixelMajor: return "PixelMajor"; + case CoarseSampleOrderTypeNV::eSampleMajor: return "SampleMajor"; + default: return "invalid"; + } + } + + VULKAN_HPP_INLINE std::string to_string(GeometryInstanceFlagBitsNV value) + { + switch (value) + { + case GeometryInstanceFlagBitsNV::eTriangleCullDisable: return "TriangleCullDisable"; + case GeometryInstanceFlagBitsNV::eTriangleFrontCounterclockwise: return "TriangleFrontCounterclockwise"; + case GeometryInstanceFlagBitsNV::eForceOpaque: return "ForceOpaque"; + case GeometryInstanceFlagBitsNV::eForceNoOpaque: return "ForceNoOpaque"; + default: return "invalid"; + } + } + + VULKAN_HPP_INLINE std::string to_string(GeometryInstanceFlagsNV value) + { + if (!value) return "{}"; + std::string result; + if (value & GeometryInstanceFlagBitsNV::eTriangleCullDisable) result += "TriangleCullDisable | "; + if (value & GeometryInstanceFlagBitsNV::eTriangleFrontCounterclockwise) result += "TriangleFrontCounterclockwise | "; + if (value & GeometryInstanceFlagBitsNV::eForceOpaque) result += "ForceOpaque | "; + if (value & GeometryInstanceFlagBitsNV::eForceNoOpaque) result += "ForceNoOpaque | "; + return "{" + result.substr(0, result.size() - 3) + "}"; + } + + VULKAN_HPP_INLINE std::string to_string(GeometryFlagBitsNV value) + { + switch (value) + { + case GeometryFlagBitsNV::eOpaque: return "Opaque"; + case GeometryFlagBitsNV::eNoDuplicateAnyHitInvocation: return "NoDuplicateAnyHitInvocation"; + default: return "invalid"; + } + } + + VULKAN_HPP_INLINE std::string to_string(GeometryFlagsNV value) + { + if (!value) return "{}"; + std::string result; + if (value & GeometryFlagBitsNV::eOpaque) result += "Opaque | "; + if (value & GeometryFlagBitsNV::eNoDuplicateAnyHitInvocation) result += "NoDuplicateAnyHitInvocation | "; + return "{" + result.substr(0, result.size() - 3) + "}"; + } + + VULKAN_HPP_INLINE std::string to_string(BuildAccelerationStructureFlagBitsNV value) + { + switch (value) + { + case BuildAccelerationStructureFlagBitsNV::eAllowUpdate: return "AllowUpdate"; + case BuildAccelerationStructureFlagBitsNV::eAllowCompaction: return "AllowCompaction"; + case BuildAccelerationStructureFlagBitsNV::ePreferFastTrace: return "PreferFastTrace"; + case BuildAccelerationStructureFlagBitsNV::ePreferFastBuild: return "PreferFastBuild"; + case BuildAccelerationStructureFlagBitsNV::eLowMemory: return "LowMemory"; + default: return "invalid"; + } + } + + VULKAN_HPP_INLINE std::string to_string(BuildAccelerationStructureFlagsNV value) + { + if (!value) return "{}"; + std::string result; + if (value & BuildAccelerationStructureFlagBitsNV::eAllowUpdate) result += "AllowUpdate | "; + if (value & BuildAccelerationStructureFlagBitsNV::eAllowCompaction) result += "AllowCompaction | "; + if (value & BuildAccelerationStructureFlagBitsNV::ePreferFastTrace) result += "PreferFastTrace | "; + if (value & BuildAccelerationStructureFlagBitsNV::ePreferFastBuild) result += "PreferFastBuild | "; + if (value & BuildAccelerationStructureFlagBitsNV::eLowMemory) result += "LowMemory | "; + return "{" + result.substr(0, result.size() - 3) + "}"; + } + + VULKAN_HPP_INLINE std::string to_string(CopyAccelerationStructureModeNV value) + { + switch (value) + { + case CopyAccelerationStructureModeNV::eClone: return "Clone"; + case CopyAccelerationStructureModeNV::eCompact: return "Compact"; + default: return "invalid"; + } + } + + VULKAN_HPP_INLINE std::string to_string(AccelerationStructureTypeNV value) + { + switch (value) + { + case AccelerationStructureTypeNV::eTopLevel: return "TopLevel"; + case AccelerationStructureTypeNV::eBottomLevel: return "BottomLevel"; + default: return "invalid"; + } + } + + VULKAN_HPP_INLINE std::string to_string(GeometryTypeNV value) + { + switch (value) + { + case GeometryTypeNV::eTriangles: return "Triangles"; + case GeometryTypeNV::eAabbs: return "Aabbs"; + default: return "invalid"; + } + } + + VULKAN_HPP_INLINE std::string to_string(AccelerationStructureMemoryRequirementsTypeNV value) + { + switch (value) + { + case AccelerationStructureMemoryRequirementsTypeNV::eObject: return "Object"; + case AccelerationStructureMemoryRequirementsTypeNV::eBuildScratch: return "BuildScratch"; + case AccelerationStructureMemoryRequirementsTypeNV::eUpdateScratch: return "UpdateScratch"; + default: return "invalid"; + } + } + + VULKAN_HPP_INLINE std::string to_string(RayTracingShaderGroupTypeNV value) + { + switch (value) + { + case RayTracingShaderGroupTypeNV::eGeneral: return "General"; + case RayTracingShaderGroupTypeNV::eTrianglesHitGroup: return "TrianglesHitGroup"; + case RayTracingShaderGroupTypeNV::eProceduralHitGroup: return "ProceduralHitGroup"; + default: return "invalid"; + } + } + + VULKAN_HPP_INLINE std::string to_string(MemoryOverallocationBehaviorAMD value) + { + switch (value) + { + case MemoryOverallocationBehaviorAMD::eDefault: return "Default"; + case MemoryOverallocationBehaviorAMD::eAllowed: return "Allowed"; + case MemoryOverallocationBehaviorAMD::eDisallowed: return "Disallowed"; + default: return "invalid"; + } + } + + class DispatchLoaderDynamic + { + public: + PFN_vkAcquireNextImage2KHR vkAcquireNextImage2KHR = 0; + PFN_vkAcquireNextImageKHR vkAcquireNextImageKHR = 0; +#ifdef VK_USE_PLATFORM_XLIB_XRANDR_NV + PFN_vkAcquireXlibDisplayEXT vkAcquireXlibDisplayEXT = 0; +#endif /*VK_USE_PLATFORM_XLIB_XRANDR_NV*/ + PFN_vkAllocateCommandBuffers vkAllocateCommandBuffers = 0; + PFN_vkAllocateDescriptorSets vkAllocateDescriptorSets = 0; + PFN_vkAllocateMemory vkAllocateMemory = 0; + PFN_vkBeginCommandBuffer vkBeginCommandBuffer = 0; + PFN_vkBindAccelerationStructureMemoryNV vkBindAccelerationStructureMemoryNV = 0; + PFN_vkBindBufferMemory vkBindBufferMemory = 0; + PFN_vkBindBufferMemory2 vkBindBufferMemory2 = 0; + PFN_vkBindBufferMemory2KHR vkBindBufferMemory2KHR = 0; + PFN_vkBindImageMemory vkBindImageMemory = 0; + PFN_vkBindImageMemory2 vkBindImageMemory2 = 0; + PFN_vkBindImageMemory2KHR vkBindImageMemory2KHR = 0; + PFN_vkCmdBeginConditionalRenderingEXT vkCmdBeginConditionalRenderingEXT = 0; + PFN_vkCmdBeginDebugUtilsLabelEXT vkCmdBeginDebugUtilsLabelEXT = 0; + PFN_vkCmdBeginQuery vkCmdBeginQuery = 0; + PFN_vkCmdBeginQueryIndexedEXT vkCmdBeginQueryIndexedEXT = 0; + PFN_vkCmdBeginRenderPass vkCmdBeginRenderPass = 0; + PFN_vkCmdBeginRenderPass2KHR vkCmdBeginRenderPass2KHR = 0; + PFN_vkCmdBeginTransformFeedbackEXT vkCmdBeginTransformFeedbackEXT = 0; + PFN_vkCmdBindDescriptorSets vkCmdBindDescriptorSets = 0; + PFN_vkCmdBindIndexBuffer vkCmdBindIndexBuffer = 0; + PFN_vkCmdBindPipeline vkCmdBindPipeline = 0; + PFN_vkCmdBindShadingRateImageNV vkCmdBindShadingRateImageNV = 0; + PFN_vkCmdBindTransformFeedbackBuffersEXT vkCmdBindTransformFeedbackBuffersEXT = 0; + PFN_vkCmdBindVertexBuffers vkCmdBindVertexBuffers = 0; + PFN_vkCmdBlitImage vkCmdBlitImage = 0; + PFN_vkCmdBuildAccelerationStructureNV vkCmdBuildAccelerationStructureNV = 0; + PFN_vkCmdClearAttachments vkCmdClearAttachments = 0; + PFN_vkCmdClearColorImage vkCmdClearColorImage = 0; + PFN_vkCmdClearDepthStencilImage vkCmdClearDepthStencilImage = 0; + PFN_vkCmdCopyAccelerationStructureNV vkCmdCopyAccelerationStructureNV = 0; + PFN_vkCmdCopyBuffer vkCmdCopyBuffer = 0; + PFN_vkCmdCopyBufferToImage vkCmdCopyBufferToImage = 0; + PFN_vkCmdCopyImage vkCmdCopyImage = 0; + PFN_vkCmdCopyImageToBuffer vkCmdCopyImageToBuffer = 0; + PFN_vkCmdCopyQueryPoolResults vkCmdCopyQueryPoolResults = 0; + PFN_vkCmdDebugMarkerBeginEXT vkCmdDebugMarkerBeginEXT = 0; + PFN_vkCmdDebugMarkerEndEXT vkCmdDebugMarkerEndEXT = 0; + PFN_vkCmdDebugMarkerInsertEXT vkCmdDebugMarkerInsertEXT = 0; + PFN_vkCmdDispatch vkCmdDispatch = 0; + PFN_vkCmdDispatchBase vkCmdDispatchBase = 0; + PFN_vkCmdDispatchBaseKHR vkCmdDispatchBaseKHR = 0; + PFN_vkCmdDispatchIndirect vkCmdDispatchIndirect = 0; + PFN_vkCmdDraw vkCmdDraw = 0; + PFN_vkCmdDrawIndexed vkCmdDrawIndexed = 0; + PFN_vkCmdDrawIndexedIndirect vkCmdDrawIndexedIndirect = 0; + PFN_vkCmdDrawIndexedIndirectCountAMD vkCmdDrawIndexedIndirectCountAMD = 0; + PFN_vkCmdDrawIndexedIndirectCountKHR vkCmdDrawIndexedIndirectCountKHR = 0; + PFN_vkCmdDrawIndirect vkCmdDrawIndirect = 0; + PFN_vkCmdDrawIndirectByteCountEXT vkCmdDrawIndirectByteCountEXT = 0; + PFN_vkCmdDrawIndirectCountAMD vkCmdDrawIndirectCountAMD = 0; + PFN_vkCmdDrawIndirectCountKHR vkCmdDrawIndirectCountKHR = 0; + PFN_vkCmdDrawMeshTasksIndirectCountNV vkCmdDrawMeshTasksIndirectCountNV = 0; + PFN_vkCmdDrawMeshTasksIndirectNV vkCmdDrawMeshTasksIndirectNV = 0; + PFN_vkCmdDrawMeshTasksNV vkCmdDrawMeshTasksNV = 0; + PFN_vkCmdEndConditionalRenderingEXT vkCmdEndConditionalRenderingEXT = 0; + PFN_vkCmdEndDebugUtilsLabelEXT vkCmdEndDebugUtilsLabelEXT = 0; + PFN_vkCmdEndQuery vkCmdEndQuery = 0; + PFN_vkCmdEndQueryIndexedEXT vkCmdEndQueryIndexedEXT = 0; + PFN_vkCmdEndRenderPass vkCmdEndRenderPass = 0; + PFN_vkCmdEndRenderPass2KHR vkCmdEndRenderPass2KHR = 0; + PFN_vkCmdEndTransformFeedbackEXT vkCmdEndTransformFeedbackEXT = 0; + PFN_vkCmdExecuteCommands vkCmdExecuteCommands = 0; + PFN_vkCmdFillBuffer vkCmdFillBuffer = 0; + PFN_vkCmdInsertDebugUtilsLabelEXT vkCmdInsertDebugUtilsLabelEXT = 0; + PFN_vkCmdNextSubpass vkCmdNextSubpass = 0; + PFN_vkCmdNextSubpass2KHR vkCmdNextSubpass2KHR = 0; + PFN_vkCmdPipelineBarrier vkCmdPipelineBarrier = 0; + PFN_vkCmdProcessCommandsNVX vkCmdProcessCommandsNVX = 0; + PFN_vkCmdPushConstants vkCmdPushConstants = 0; + PFN_vkCmdPushDescriptorSetKHR vkCmdPushDescriptorSetKHR = 0; + PFN_vkCmdPushDescriptorSetWithTemplateKHR vkCmdPushDescriptorSetWithTemplateKHR = 0; + PFN_vkCmdReserveSpaceForCommandsNVX vkCmdReserveSpaceForCommandsNVX = 0; + PFN_vkCmdResetEvent vkCmdResetEvent = 0; + PFN_vkCmdResetQueryPool vkCmdResetQueryPool = 0; + PFN_vkCmdResolveImage vkCmdResolveImage = 0; + PFN_vkCmdSetBlendConstants vkCmdSetBlendConstants = 0; + PFN_vkCmdSetCheckpointNV vkCmdSetCheckpointNV = 0; + PFN_vkCmdSetCoarseSampleOrderNV vkCmdSetCoarseSampleOrderNV = 0; + PFN_vkCmdSetDepthBias vkCmdSetDepthBias = 0; + PFN_vkCmdSetDepthBounds vkCmdSetDepthBounds = 0; + PFN_vkCmdSetDeviceMask vkCmdSetDeviceMask = 0; + PFN_vkCmdSetDeviceMaskKHR vkCmdSetDeviceMaskKHR = 0; + PFN_vkCmdSetDiscardRectangleEXT vkCmdSetDiscardRectangleEXT = 0; + PFN_vkCmdSetEvent vkCmdSetEvent = 0; + PFN_vkCmdSetExclusiveScissorNV vkCmdSetExclusiveScissorNV = 0; + PFN_vkCmdSetLineWidth vkCmdSetLineWidth = 0; + PFN_vkCmdSetSampleLocationsEXT vkCmdSetSampleLocationsEXT = 0; + PFN_vkCmdSetScissor vkCmdSetScissor = 0; + PFN_vkCmdSetStencilCompareMask vkCmdSetStencilCompareMask = 0; + PFN_vkCmdSetStencilReference vkCmdSetStencilReference = 0; + PFN_vkCmdSetStencilWriteMask vkCmdSetStencilWriteMask = 0; + PFN_vkCmdSetViewport vkCmdSetViewport = 0; + PFN_vkCmdSetViewportShadingRatePaletteNV vkCmdSetViewportShadingRatePaletteNV = 0; + PFN_vkCmdSetViewportWScalingNV vkCmdSetViewportWScalingNV = 0; + PFN_vkCmdTraceRaysNV vkCmdTraceRaysNV = 0; + PFN_vkCmdUpdateBuffer vkCmdUpdateBuffer = 0; + PFN_vkCmdWaitEvents vkCmdWaitEvents = 0; + PFN_vkCmdWriteAccelerationStructuresPropertiesNV vkCmdWriteAccelerationStructuresPropertiesNV = 0; + PFN_vkCmdWriteBufferMarkerAMD vkCmdWriteBufferMarkerAMD = 0; + PFN_vkCmdWriteTimestamp vkCmdWriteTimestamp = 0; + PFN_vkCompileDeferredNV vkCompileDeferredNV = 0; + PFN_vkCreateAccelerationStructureNV vkCreateAccelerationStructureNV = 0; +#ifdef VK_USE_PLATFORM_ANDROID_KHR + PFN_vkCreateAndroidSurfaceKHR vkCreateAndroidSurfaceKHR = 0; +#endif /*VK_USE_PLATFORM_ANDROID_KHR*/ + PFN_vkCreateBuffer vkCreateBuffer = 0; + PFN_vkCreateBufferView vkCreateBufferView = 0; + PFN_vkCreateCommandPool vkCreateCommandPool = 0; + PFN_vkCreateComputePipelines vkCreateComputePipelines = 0; + PFN_vkCreateDebugReportCallbackEXT vkCreateDebugReportCallbackEXT = 0; + PFN_vkCreateDebugUtilsMessengerEXT vkCreateDebugUtilsMessengerEXT = 0; + PFN_vkCreateDescriptorPool vkCreateDescriptorPool = 0; + PFN_vkCreateDescriptorSetLayout vkCreateDescriptorSetLayout = 0; + PFN_vkCreateDescriptorUpdateTemplate vkCreateDescriptorUpdateTemplate = 0; + PFN_vkCreateDescriptorUpdateTemplateKHR vkCreateDescriptorUpdateTemplateKHR = 0; + PFN_vkCreateDevice vkCreateDevice = 0; + PFN_vkCreateDisplayModeKHR vkCreateDisplayModeKHR = 0; + PFN_vkCreateDisplayPlaneSurfaceKHR vkCreateDisplayPlaneSurfaceKHR = 0; + PFN_vkCreateEvent vkCreateEvent = 0; + PFN_vkCreateFence vkCreateFence = 0; + PFN_vkCreateFramebuffer vkCreateFramebuffer = 0; + PFN_vkCreateGraphicsPipelines vkCreateGraphicsPipelines = 0; +#ifdef VK_USE_PLATFORM_IOS_MVK + PFN_vkCreateIOSSurfaceMVK vkCreateIOSSurfaceMVK = 0; +#endif /*VK_USE_PLATFORM_IOS_MVK*/ + PFN_vkCreateImage vkCreateImage = 0; +#ifdef VK_USE_PLATFORM_FUCHSIA_FUCHSIA + PFN_vkCreateImagePipeSurfaceFUCHSIA vkCreateImagePipeSurfaceFUCHSIA = 0; +#endif /*VK_USE_PLATFORM_FUCHSIA_FUCHSIA*/ + PFN_vkCreateImageView vkCreateImageView = 0; + PFN_vkCreateIndirectCommandsLayoutNVX vkCreateIndirectCommandsLayoutNVX = 0; + PFN_vkCreateInstance vkCreateInstance = 0; +#ifdef VK_USE_PLATFORM_MACOS_MVK + PFN_vkCreateMacOSSurfaceMVK vkCreateMacOSSurfaceMVK = 0; +#endif /*VK_USE_PLATFORM_MACOS_MVK*/ + PFN_vkCreateObjectTableNVX vkCreateObjectTableNVX = 0; + PFN_vkCreatePipelineCache vkCreatePipelineCache = 0; + PFN_vkCreatePipelineLayout vkCreatePipelineLayout = 0; + PFN_vkCreateQueryPool vkCreateQueryPool = 0; + PFN_vkCreateRayTracingPipelinesNV vkCreateRayTracingPipelinesNV = 0; + PFN_vkCreateRenderPass vkCreateRenderPass = 0; + PFN_vkCreateRenderPass2KHR vkCreateRenderPass2KHR = 0; + PFN_vkCreateSampler vkCreateSampler = 0; + PFN_vkCreateSamplerYcbcrConversion vkCreateSamplerYcbcrConversion = 0; + PFN_vkCreateSamplerYcbcrConversionKHR vkCreateSamplerYcbcrConversionKHR = 0; + PFN_vkCreateSemaphore vkCreateSemaphore = 0; + PFN_vkCreateShaderModule vkCreateShaderModule = 0; + PFN_vkCreateSharedSwapchainsKHR vkCreateSharedSwapchainsKHR = 0; + PFN_vkCreateSwapchainKHR vkCreateSwapchainKHR = 0; + PFN_vkCreateValidationCacheEXT vkCreateValidationCacheEXT = 0; +#ifdef VK_USE_PLATFORM_VI_NN + PFN_vkCreateViSurfaceNN vkCreateViSurfaceNN = 0; +#endif /*VK_USE_PLATFORM_VI_NN*/ +#ifdef VK_USE_PLATFORM_WAYLAND_KHR + PFN_vkCreateWaylandSurfaceKHR vkCreateWaylandSurfaceKHR = 0; +#endif /*VK_USE_PLATFORM_WAYLAND_KHR*/ +#ifdef VK_USE_PLATFORM_WIN32_KHR + PFN_vkCreateWin32SurfaceKHR vkCreateWin32SurfaceKHR = 0; +#endif /*VK_USE_PLATFORM_WIN32_KHR*/ +#ifdef VK_USE_PLATFORM_XCB_KHR + PFN_vkCreateXcbSurfaceKHR vkCreateXcbSurfaceKHR = 0; +#endif /*VK_USE_PLATFORM_XCB_KHR*/ +#ifdef VK_USE_PLATFORM_XLIB_KHR + PFN_vkCreateXlibSurfaceKHR vkCreateXlibSurfaceKHR = 0; +#endif /*VK_USE_PLATFORM_XLIB_KHR*/ + PFN_vkDebugMarkerSetObjectNameEXT vkDebugMarkerSetObjectNameEXT = 0; + PFN_vkDebugMarkerSetObjectTagEXT vkDebugMarkerSetObjectTagEXT = 0; + PFN_vkDebugReportMessageEXT vkDebugReportMessageEXT = 0; + PFN_vkDestroyAccelerationStructureNV vkDestroyAccelerationStructureNV = 0; + PFN_vkDestroyBuffer vkDestroyBuffer = 0; + PFN_vkDestroyBufferView vkDestroyBufferView = 0; + PFN_vkDestroyCommandPool vkDestroyCommandPool = 0; + PFN_vkDestroyDebugReportCallbackEXT vkDestroyDebugReportCallbackEXT = 0; + PFN_vkDestroyDebugUtilsMessengerEXT vkDestroyDebugUtilsMessengerEXT = 0; + PFN_vkDestroyDescriptorPool vkDestroyDescriptorPool = 0; + PFN_vkDestroyDescriptorSetLayout vkDestroyDescriptorSetLayout = 0; + PFN_vkDestroyDescriptorUpdateTemplate vkDestroyDescriptorUpdateTemplate = 0; + PFN_vkDestroyDescriptorUpdateTemplateKHR vkDestroyDescriptorUpdateTemplateKHR = 0; + PFN_vkDestroyDevice vkDestroyDevice = 0; + PFN_vkDestroyEvent vkDestroyEvent = 0; + PFN_vkDestroyFence vkDestroyFence = 0; + PFN_vkDestroyFramebuffer vkDestroyFramebuffer = 0; + PFN_vkDestroyImage vkDestroyImage = 0; + PFN_vkDestroyImageView vkDestroyImageView = 0; + PFN_vkDestroyIndirectCommandsLayoutNVX vkDestroyIndirectCommandsLayoutNVX = 0; + PFN_vkDestroyInstance vkDestroyInstance = 0; + PFN_vkDestroyObjectTableNVX vkDestroyObjectTableNVX = 0; + PFN_vkDestroyPipeline vkDestroyPipeline = 0; + PFN_vkDestroyPipelineCache vkDestroyPipelineCache = 0; + PFN_vkDestroyPipelineLayout vkDestroyPipelineLayout = 0; + PFN_vkDestroyQueryPool vkDestroyQueryPool = 0; + PFN_vkDestroyRenderPass vkDestroyRenderPass = 0; + PFN_vkDestroySampler vkDestroySampler = 0; + PFN_vkDestroySamplerYcbcrConversion vkDestroySamplerYcbcrConversion = 0; + PFN_vkDestroySamplerYcbcrConversionKHR vkDestroySamplerYcbcrConversionKHR = 0; + PFN_vkDestroySemaphore vkDestroySemaphore = 0; + PFN_vkDestroyShaderModule vkDestroyShaderModule = 0; + PFN_vkDestroySurfaceKHR vkDestroySurfaceKHR = 0; + PFN_vkDestroySwapchainKHR vkDestroySwapchainKHR = 0; + PFN_vkDestroyValidationCacheEXT vkDestroyValidationCacheEXT = 0; + PFN_vkDeviceWaitIdle vkDeviceWaitIdle = 0; + PFN_vkDisplayPowerControlEXT vkDisplayPowerControlEXT = 0; + PFN_vkEndCommandBuffer vkEndCommandBuffer = 0; + PFN_vkEnumerateDeviceExtensionProperties vkEnumerateDeviceExtensionProperties = 0; + PFN_vkEnumerateDeviceLayerProperties vkEnumerateDeviceLayerProperties = 0; + PFN_vkEnumerateInstanceExtensionProperties vkEnumerateInstanceExtensionProperties = 0; + PFN_vkEnumerateInstanceLayerProperties vkEnumerateInstanceLayerProperties = 0; + PFN_vkEnumerateInstanceVersion vkEnumerateInstanceVersion = 0; + PFN_vkEnumeratePhysicalDeviceGroups vkEnumeratePhysicalDeviceGroups = 0; + PFN_vkEnumeratePhysicalDeviceGroupsKHR vkEnumeratePhysicalDeviceGroupsKHR = 0; + PFN_vkEnumeratePhysicalDevices vkEnumeratePhysicalDevices = 0; + PFN_vkFlushMappedMemoryRanges vkFlushMappedMemoryRanges = 0; + PFN_vkFreeCommandBuffers vkFreeCommandBuffers = 0; + PFN_vkFreeDescriptorSets vkFreeDescriptorSets = 0; + PFN_vkFreeMemory vkFreeMemory = 0; + PFN_vkGetAccelerationStructureHandleNV vkGetAccelerationStructureHandleNV = 0; + PFN_vkGetAccelerationStructureMemoryRequirementsNV vkGetAccelerationStructureMemoryRequirementsNV = 0; +#ifdef VK_USE_PLATFORM_ANDROID_ANDROID + PFN_vkGetAndroidHardwareBufferPropertiesANDROID vkGetAndroidHardwareBufferPropertiesANDROID = 0; +#endif /*VK_USE_PLATFORM_ANDROID_ANDROID*/ + PFN_vkGetBufferMemoryRequirements vkGetBufferMemoryRequirements = 0; + PFN_vkGetBufferMemoryRequirements2 vkGetBufferMemoryRequirements2 = 0; + PFN_vkGetBufferMemoryRequirements2KHR vkGetBufferMemoryRequirements2KHR = 0; + PFN_vkGetCalibratedTimestampsEXT vkGetCalibratedTimestampsEXT = 0; + PFN_vkGetDescriptorSetLayoutSupport vkGetDescriptorSetLayoutSupport = 0; + PFN_vkGetDescriptorSetLayoutSupportKHR vkGetDescriptorSetLayoutSupportKHR = 0; + PFN_vkGetDeviceGroupPeerMemoryFeatures vkGetDeviceGroupPeerMemoryFeatures = 0; + PFN_vkGetDeviceGroupPeerMemoryFeaturesKHR vkGetDeviceGroupPeerMemoryFeaturesKHR = 0; + PFN_vkGetDeviceGroupPresentCapabilitiesKHR vkGetDeviceGroupPresentCapabilitiesKHR = 0; + PFN_vkGetDeviceGroupSurfacePresentModesKHR vkGetDeviceGroupSurfacePresentModesKHR = 0; + PFN_vkGetDeviceMemoryCommitment vkGetDeviceMemoryCommitment = 0; + PFN_vkGetDeviceProcAddr vkGetDeviceProcAddr = 0; + PFN_vkGetDeviceQueue vkGetDeviceQueue = 0; + PFN_vkGetDeviceQueue2 vkGetDeviceQueue2 = 0; + PFN_vkGetDisplayModeProperties2KHR vkGetDisplayModeProperties2KHR = 0; + PFN_vkGetDisplayModePropertiesKHR vkGetDisplayModePropertiesKHR = 0; + PFN_vkGetDisplayPlaneCapabilities2KHR vkGetDisplayPlaneCapabilities2KHR = 0; + PFN_vkGetDisplayPlaneCapabilitiesKHR vkGetDisplayPlaneCapabilitiesKHR = 0; + PFN_vkGetDisplayPlaneSupportedDisplaysKHR vkGetDisplayPlaneSupportedDisplaysKHR = 0; + PFN_vkGetEventStatus vkGetEventStatus = 0; + PFN_vkGetFenceFdKHR vkGetFenceFdKHR = 0; + PFN_vkGetFenceStatus vkGetFenceStatus = 0; +#ifdef VK_USE_PLATFORM_WIN32_KHR + PFN_vkGetFenceWin32HandleKHR vkGetFenceWin32HandleKHR = 0; +#endif /*VK_USE_PLATFORM_WIN32_KHR*/ + PFN_vkGetImageDrmFormatModifierPropertiesEXT vkGetImageDrmFormatModifierPropertiesEXT = 0; + PFN_vkGetImageMemoryRequirements vkGetImageMemoryRequirements = 0; + PFN_vkGetImageMemoryRequirements2 vkGetImageMemoryRequirements2 = 0; + PFN_vkGetImageMemoryRequirements2KHR vkGetImageMemoryRequirements2KHR = 0; + PFN_vkGetImageSparseMemoryRequirements vkGetImageSparseMemoryRequirements = 0; + PFN_vkGetImageSparseMemoryRequirements2 vkGetImageSparseMemoryRequirements2 = 0; + PFN_vkGetImageSparseMemoryRequirements2KHR vkGetImageSparseMemoryRequirements2KHR = 0; + PFN_vkGetImageSubresourceLayout vkGetImageSubresourceLayout = 0; + PFN_vkGetInstanceProcAddr vkGetInstanceProcAddr = 0; +#ifdef VK_USE_PLATFORM_ANDROID_ANDROID + PFN_vkGetMemoryAndroidHardwareBufferANDROID vkGetMemoryAndroidHardwareBufferANDROID = 0; +#endif /*VK_USE_PLATFORM_ANDROID_ANDROID*/ + PFN_vkGetMemoryFdKHR vkGetMemoryFdKHR = 0; + PFN_vkGetMemoryFdPropertiesKHR vkGetMemoryFdPropertiesKHR = 0; + PFN_vkGetMemoryHostPointerPropertiesEXT vkGetMemoryHostPointerPropertiesEXT = 0; +#ifdef VK_USE_PLATFORM_WIN32_KHR + PFN_vkGetMemoryWin32HandleKHR vkGetMemoryWin32HandleKHR = 0; +#endif /*VK_USE_PLATFORM_WIN32_KHR*/ +#ifdef VK_USE_PLATFORM_WIN32_NV + PFN_vkGetMemoryWin32HandleNV vkGetMemoryWin32HandleNV = 0; +#endif /*VK_USE_PLATFORM_WIN32_NV*/ +#ifdef VK_USE_PLATFORM_WIN32_KHR + PFN_vkGetMemoryWin32HandlePropertiesKHR vkGetMemoryWin32HandlePropertiesKHR = 0; +#endif /*VK_USE_PLATFORM_WIN32_KHR*/ + PFN_vkGetPastPresentationTimingGOOGLE vkGetPastPresentationTimingGOOGLE = 0; + PFN_vkGetPhysicalDeviceCalibrateableTimeDomainsEXT vkGetPhysicalDeviceCalibrateableTimeDomainsEXT = 0; + PFN_vkGetPhysicalDeviceDisplayPlaneProperties2KHR vkGetPhysicalDeviceDisplayPlaneProperties2KHR = 0; + PFN_vkGetPhysicalDeviceDisplayPlanePropertiesKHR vkGetPhysicalDeviceDisplayPlanePropertiesKHR = 0; + PFN_vkGetPhysicalDeviceDisplayProperties2KHR vkGetPhysicalDeviceDisplayProperties2KHR = 0; + PFN_vkGetPhysicalDeviceDisplayPropertiesKHR vkGetPhysicalDeviceDisplayPropertiesKHR = 0; + PFN_vkGetPhysicalDeviceExternalBufferProperties vkGetPhysicalDeviceExternalBufferProperties = 0; + PFN_vkGetPhysicalDeviceExternalBufferPropertiesKHR vkGetPhysicalDeviceExternalBufferPropertiesKHR = 0; + PFN_vkGetPhysicalDeviceExternalFenceProperties vkGetPhysicalDeviceExternalFenceProperties = 0; + PFN_vkGetPhysicalDeviceExternalFencePropertiesKHR vkGetPhysicalDeviceExternalFencePropertiesKHR = 0; + PFN_vkGetPhysicalDeviceExternalImageFormatPropertiesNV vkGetPhysicalDeviceExternalImageFormatPropertiesNV = 0; + PFN_vkGetPhysicalDeviceExternalSemaphoreProperties vkGetPhysicalDeviceExternalSemaphoreProperties = 0; + PFN_vkGetPhysicalDeviceExternalSemaphorePropertiesKHR vkGetPhysicalDeviceExternalSemaphorePropertiesKHR = 0; + PFN_vkGetPhysicalDeviceFeatures vkGetPhysicalDeviceFeatures = 0; + PFN_vkGetPhysicalDeviceFeatures2 vkGetPhysicalDeviceFeatures2 = 0; + PFN_vkGetPhysicalDeviceFeatures2KHR vkGetPhysicalDeviceFeatures2KHR = 0; + PFN_vkGetPhysicalDeviceFormatProperties vkGetPhysicalDeviceFormatProperties = 0; + PFN_vkGetPhysicalDeviceFormatProperties2 vkGetPhysicalDeviceFormatProperties2 = 0; + PFN_vkGetPhysicalDeviceFormatProperties2KHR vkGetPhysicalDeviceFormatProperties2KHR = 0; + PFN_vkGetPhysicalDeviceGeneratedCommandsPropertiesNVX vkGetPhysicalDeviceGeneratedCommandsPropertiesNVX = 0; + PFN_vkGetPhysicalDeviceImageFormatProperties vkGetPhysicalDeviceImageFormatProperties = 0; + PFN_vkGetPhysicalDeviceImageFormatProperties2 vkGetPhysicalDeviceImageFormatProperties2 = 0; + PFN_vkGetPhysicalDeviceImageFormatProperties2KHR vkGetPhysicalDeviceImageFormatProperties2KHR = 0; + PFN_vkGetPhysicalDeviceMemoryProperties vkGetPhysicalDeviceMemoryProperties = 0; + PFN_vkGetPhysicalDeviceMemoryProperties2 vkGetPhysicalDeviceMemoryProperties2 = 0; + PFN_vkGetPhysicalDeviceMemoryProperties2KHR vkGetPhysicalDeviceMemoryProperties2KHR = 0; + PFN_vkGetPhysicalDeviceMultisamplePropertiesEXT vkGetPhysicalDeviceMultisamplePropertiesEXT = 0; + PFN_vkGetPhysicalDevicePresentRectanglesKHR vkGetPhysicalDevicePresentRectanglesKHR = 0; + PFN_vkGetPhysicalDeviceProperties vkGetPhysicalDeviceProperties = 0; + PFN_vkGetPhysicalDeviceProperties2 vkGetPhysicalDeviceProperties2 = 0; + PFN_vkGetPhysicalDeviceProperties2KHR vkGetPhysicalDeviceProperties2KHR = 0; + PFN_vkGetPhysicalDeviceQueueFamilyProperties vkGetPhysicalDeviceQueueFamilyProperties = 0; + PFN_vkGetPhysicalDeviceQueueFamilyProperties2 vkGetPhysicalDeviceQueueFamilyProperties2 = 0; + PFN_vkGetPhysicalDeviceQueueFamilyProperties2KHR vkGetPhysicalDeviceQueueFamilyProperties2KHR = 0; + PFN_vkGetPhysicalDeviceSparseImageFormatProperties vkGetPhysicalDeviceSparseImageFormatProperties = 0; + PFN_vkGetPhysicalDeviceSparseImageFormatProperties2 vkGetPhysicalDeviceSparseImageFormatProperties2 = 0; + PFN_vkGetPhysicalDeviceSparseImageFormatProperties2KHR vkGetPhysicalDeviceSparseImageFormatProperties2KHR = 0; + PFN_vkGetPhysicalDeviceSurfaceCapabilities2EXT vkGetPhysicalDeviceSurfaceCapabilities2EXT = 0; + PFN_vkGetPhysicalDeviceSurfaceCapabilities2KHR vkGetPhysicalDeviceSurfaceCapabilities2KHR = 0; + PFN_vkGetPhysicalDeviceSurfaceCapabilitiesKHR vkGetPhysicalDeviceSurfaceCapabilitiesKHR = 0; + PFN_vkGetPhysicalDeviceSurfaceFormats2KHR vkGetPhysicalDeviceSurfaceFormats2KHR = 0; + PFN_vkGetPhysicalDeviceSurfaceFormatsKHR vkGetPhysicalDeviceSurfaceFormatsKHR = 0; + PFN_vkGetPhysicalDeviceSurfacePresentModesKHR vkGetPhysicalDeviceSurfacePresentModesKHR = 0; + PFN_vkGetPhysicalDeviceSurfaceSupportKHR vkGetPhysicalDeviceSurfaceSupportKHR = 0; +#ifdef VK_USE_PLATFORM_WAYLAND_KHR + PFN_vkGetPhysicalDeviceWaylandPresentationSupportKHR vkGetPhysicalDeviceWaylandPresentationSupportKHR = 0; +#endif /*VK_USE_PLATFORM_WAYLAND_KHR*/ +#ifdef VK_USE_PLATFORM_WIN32_KHR + PFN_vkGetPhysicalDeviceWin32PresentationSupportKHR vkGetPhysicalDeviceWin32PresentationSupportKHR = 0; +#endif /*VK_USE_PLATFORM_WIN32_KHR*/ +#ifdef VK_USE_PLATFORM_XCB_KHR + PFN_vkGetPhysicalDeviceXcbPresentationSupportKHR vkGetPhysicalDeviceXcbPresentationSupportKHR = 0; +#endif /*VK_USE_PLATFORM_XCB_KHR*/ +#ifdef VK_USE_PLATFORM_XLIB_KHR + PFN_vkGetPhysicalDeviceXlibPresentationSupportKHR vkGetPhysicalDeviceXlibPresentationSupportKHR = 0; +#endif /*VK_USE_PLATFORM_XLIB_KHR*/ + PFN_vkGetPipelineCacheData vkGetPipelineCacheData = 0; + PFN_vkGetQueryPoolResults vkGetQueryPoolResults = 0; + PFN_vkGetQueueCheckpointDataNV vkGetQueueCheckpointDataNV = 0; +#ifdef VK_USE_PLATFORM_XLIB_XRANDR_NV + PFN_vkGetRandROutputDisplayEXT vkGetRandROutputDisplayEXT = 0; +#endif /*VK_USE_PLATFORM_XLIB_XRANDR_NV*/ + PFN_vkGetRayTracingShaderGroupHandlesNV vkGetRayTracingShaderGroupHandlesNV = 0; + PFN_vkGetRefreshCycleDurationGOOGLE vkGetRefreshCycleDurationGOOGLE = 0; + PFN_vkGetRenderAreaGranularity vkGetRenderAreaGranularity = 0; + PFN_vkGetSemaphoreFdKHR vkGetSemaphoreFdKHR = 0; +#ifdef VK_USE_PLATFORM_WIN32_KHR + PFN_vkGetSemaphoreWin32HandleKHR vkGetSemaphoreWin32HandleKHR = 0; +#endif /*VK_USE_PLATFORM_WIN32_KHR*/ + PFN_vkGetShaderInfoAMD vkGetShaderInfoAMD = 0; + PFN_vkGetSwapchainCounterEXT vkGetSwapchainCounterEXT = 0; + PFN_vkGetSwapchainImagesKHR vkGetSwapchainImagesKHR = 0; + PFN_vkGetSwapchainStatusKHR vkGetSwapchainStatusKHR = 0; + PFN_vkGetValidationCacheDataEXT vkGetValidationCacheDataEXT = 0; + PFN_vkImportFenceFdKHR vkImportFenceFdKHR = 0; +#ifdef VK_USE_PLATFORM_WIN32_KHR + PFN_vkImportFenceWin32HandleKHR vkImportFenceWin32HandleKHR = 0; +#endif /*VK_USE_PLATFORM_WIN32_KHR*/ + PFN_vkImportSemaphoreFdKHR vkImportSemaphoreFdKHR = 0; +#ifdef VK_USE_PLATFORM_WIN32_KHR + PFN_vkImportSemaphoreWin32HandleKHR vkImportSemaphoreWin32HandleKHR = 0; +#endif /*VK_USE_PLATFORM_WIN32_KHR*/ + PFN_vkInvalidateMappedMemoryRanges vkInvalidateMappedMemoryRanges = 0; + PFN_vkMapMemory vkMapMemory = 0; + PFN_vkMergePipelineCaches vkMergePipelineCaches = 0; + PFN_vkMergeValidationCachesEXT vkMergeValidationCachesEXT = 0; + PFN_vkQueueBeginDebugUtilsLabelEXT vkQueueBeginDebugUtilsLabelEXT = 0; + PFN_vkQueueBindSparse vkQueueBindSparse = 0; + PFN_vkQueueEndDebugUtilsLabelEXT vkQueueEndDebugUtilsLabelEXT = 0; + PFN_vkQueueInsertDebugUtilsLabelEXT vkQueueInsertDebugUtilsLabelEXT = 0; + PFN_vkQueuePresentKHR vkQueuePresentKHR = 0; + PFN_vkQueueSubmit vkQueueSubmit = 0; + PFN_vkQueueWaitIdle vkQueueWaitIdle = 0; + PFN_vkRegisterDeviceEventEXT vkRegisterDeviceEventEXT = 0; + PFN_vkRegisterDisplayEventEXT vkRegisterDisplayEventEXT = 0; + PFN_vkRegisterObjectsNVX vkRegisterObjectsNVX = 0; + PFN_vkReleaseDisplayEXT vkReleaseDisplayEXT = 0; + PFN_vkResetCommandBuffer vkResetCommandBuffer = 0; + PFN_vkResetCommandPool vkResetCommandPool = 0; + PFN_vkResetDescriptorPool vkResetDescriptorPool = 0; + PFN_vkResetEvent vkResetEvent = 0; + PFN_vkResetFences vkResetFences = 0; + PFN_vkSetDebugUtilsObjectNameEXT vkSetDebugUtilsObjectNameEXT = 0; + PFN_vkSetDebugUtilsObjectTagEXT vkSetDebugUtilsObjectTagEXT = 0; + PFN_vkSetEvent vkSetEvent = 0; + PFN_vkSetHdrMetadataEXT vkSetHdrMetadataEXT = 0; + PFN_vkSubmitDebugUtilsMessageEXT vkSubmitDebugUtilsMessageEXT = 0; + PFN_vkTrimCommandPool vkTrimCommandPool = 0; + PFN_vkTrimCommandPoolKHR vkTrimCommandPoolKHR = 0; + PFN_vkUnmapMemory vkUnmapMemory = 0; + PFN_vkUnregisterObjectsNVX vkUnregisterObjectsNVX = 0; + PFN_vkUpdateDescriptorSetWithTemplate vkUpdateDescriptorSetWithTemplate = 0; + PFN_vkUpdateDescriptorSetWithTemplateKHR vkUpdateDescriptorSetWithTemplateKHR = 0; + PFN_vkUpdateDescriptorSets vkUpdateDescriptorSets = 0; + PFN_vkWaitForFences vkWaitForFences = 0; + public: + DispatchLoaderDynamic(Instance instance = Instance(), Device device = Device()) + { + if (instance) + { + init(instance, device); + } + } + + void init(Instance instance, Device device = Device()) + { + vkAcquireNextImage2KHR = PFN_vkAcquireNextImage2KHR(device ? device.getProcAddr( "vkAcquireNextImage2KHR") : instance.getProcAddr( "vkAcquireNextImage2KHR")); + vkAcquireNextImageKHR = PFN_vkAcquireNextImageKHR(device ? device.getProcAddr( "vkAcquireNextImageKHR") : instance.getProcAddr( "vkAcquireNextImageKHR")); +#ifdef VK_USE_PLATFORM_XLIB_XRANDR_NV + vkAcquireXlibDisplayEXT = PFN_vkAcquireXlibDisplayEXT(instance.getProcAddr( "vkAcquireXlibDisplayEXT")); +#endif /*VK_USE_PLATFORM_XLIB_XRANDR_NV*/ + vkAllocateCommandBuffers = PFN_vkAllocateCommandBuffers(device ? device.getProcAddr( "vkAllocateCommandBuffers") : instance.getProcAddr( "vkAllocateCommandBuffers")); + vkAllocateDescriptorSets = PFN_vkAllocateDescriptorSets(device ? device.getProcAddr( "vkAllocateDescriptorSets") : instance.getProcAddr( "vkAllocateDescriptorSets")); + vkAllocateMemory = PFN_vkAllocateMemory(device ? device.getProcAddr( "vkAllocateMemory") : instance.getProcAddr( "vkAllocateMemory")); + vkBeginCommandBuffer = PFN_vkBeginCommandBuffer(device ? device.getProcAddr( "vkBeginCommandBuffer") : instance.getProcAddr( "vkBeginCommandBuffer")); + vkBindAccelerationStructureMemoryNV = PFN_vkBindAccelerationStructureMemoryNV(device ? device.getProcAddr( "vkBindAccelerationStructureMemoryNV") : instance.getProcAddr( "vkBindAccelerationStructureMemoryNV")); + vkBindBufferMemory = PFN_vkBindBufferMemory(device ? device.getProcAddr( "vkBindBufferMemory") : instance.getProcAddr( "vkBindBufferMemory")); + vkBindBufferMemory2 = PFN_vkBindBufferMemory2(device ? device.getProcAddr( "vkBindBufferMemory2") : instance.getProcAddr( "vkBindBufferMemory2")); + vkBindBufferMemory2KHR = PFN_vkBindBufferMemory2KHR(device ? device.getProcAddr( "vkBindBufferMemory2KHR") : instance.getProcAddr( "vkBindBufferMemory2KHR")); + vkBindImageMemory = PFN_vkBindImageMemory(device ? device.getProcAddr( "vkBindImageMemory") : instance.getProcAddr( "vkBindImageMemory")); + vkBindImageMemory2 = PFN_vkBindImageMemory2(device ? device.getProcAddr( "vkBindImageMemory2") : instance.getProcAddr( "vkBindImageMemory2")); + vkBindImageMemory2KHR = PFN_vkBindImageMemory2KHR(device ? device.getProcAddr( "vkBindImageMemory2KHR") : instance.getProcAddr( "vkBindImageMemory2KHR")); + vkCmdBeginConditionalRenderingEXT = PFN_vkCmdBeginConditionalRenderingEXT(device ? device.getProcAddr( "vkCmdBeginConditionalRenderingEXT") : instance.getProcAddr( "vkCmdBeginConditionalRenderingEXT")); + vkCmdBeginDebugUtilsLabelEXT = PFN_vkCmdBeginDebugUtilsLabelEXT(device ? device.getProcAddr( "vkCmdBeginDebugUtilsLabelEXT") : instance.getProcAddr( "vkCmdBeginDebugUtilsLabelEXT")); + vkCmdBeginQuery = PFN_vkCmdBeginQuery(device ? device.getProcAddr( "vkCmdBeginQuery") : instance.getProcAddr( "vkCmdBeginQuery")); + vkCmdBeginQueryIndexedEXT = PFN_vkCmdBeginQueryIndexedEXT(device ? device.getProcAddr( "vkCmdBeginQueryIndexedEXT") : instance.getProcAddr( "vkCmdBeginQueryIndexedEXT")); + vkCmdBeginRenderPass = PFN_vkCmdBeginRenderPass(device ? device.getProcAddr( "vkCmdBeginRenderPass") : instance.getProcAddr( "vkCmdBeginRenderPass")); + vkCmdBeginRenderPass2KHR = PFN_vkCmdBeginRenderPass2KHR(device ? device.getProcAddr( "vkCmdBeginRenderPass2KHR") : instance.getProcAddr( "vkCmdBeginRenderPass2KHR")); + vkCmdBeginTransformFeedbackEXT = PFN_vkCmdBeginTransformFeedbackEXT(device ? device.getProcAddr( "vkCmdBeginTransformFeedbackEXT") : instance.getProcAddr( "vkCmdBeginTransformFeedbackEXT")); + vkCmdBindDescriptorSets = PFN_vkCmdBindDescriptorSets(device ? device.getProcAddr( "vkCmdBindDescriptorSets") : instance.getProcAddr( "vkCmdBindDescriptorSets")); + vkCmdBindIndexBuffer = PFN_vkCmdBindIndexBuffer(device ? device.getProcAddr( "vkCmdBindIndexBuffer") : instance.getProcAddr( "vkCmdBindIndexBuffer")); + vkCmdBindPipeline = PFN_vkCmdBindPipeline(device ? device.getProcAddr( "vkCmdBindPipeline") : instance.getProcAddr( "vkCmdBindPipeline")); + vkCmdBindShadingRateImageNV = PFN_vkCmdBindShadingRateImageNV(device ? device.getProcAddr( "vkCmdBindShadingRateImageNV") : instance.getProcAddr( "vkCmdBindShadingRateImageNV")); + vkCmdBindTransformFeedbackBuffersEXT = PFN_vkCmdBindTransformFeedbackBuffersEXT(device ? device.getProcAddr( "vkCmdBindTransformFeedbackBuffersEXT") : instance.getProcAddr( "vkCmdBindTransformFeedbackBuffersEXT")); + vkCmdBindVertexBuffers = PFN_vkCmdBindVertexBuffers(device ? device.getProcAddr( "vkCmdBindVertexBuffers") : instance.getProcAddr( "vkCmdBindVertexBuffers")); + vkCmdBlitImage = PFN_vkCmdBlitImage(device ? device.getProcAddr( "vkCmdBlitImage") : instance.getProcAddr( "vkCmdBlitImage")); + vkCmdBuildAccelerationStructureNV = PFN_vkCmdBuildAccelerationStructureNV(device ? device.getProcAddr( "vkCmdBuildAccelerationStructureNV") : instance.getProcAddr( "vkCmdBuildAccelerationStructureNV")); + vkCmdClearAttachments = PFN_vkCmdClearAttachments(device ? device.getProcAddr( "vkCmdClearAttachments") : instance.getProcAddr( "vkCmdClearAttachments")); + vkCmdClearColorImage = PFN_vkCmdClearColorImage(device ? device.getProcAddr( "vkCmdClearColorImage") : instance.getProcAddr( "vkCmdClearColorImage")); + vkCmdClearDepthStencilImage = PFN_vkCmdClearDepthStencilImage(device ? device.getProcAddr( "vkCmdClearDepthStencilImage") : instance.getProcAddr( "vkCmdClearDepthStencilImage")); + vkCmdCopyAccelerationStructureNV = PFN_vkCmdCopyAccelerationStructureNV(device ? device.getProcAddr( "vkCmdCopyAccelerationStructureNV") : instance.getProcAddr( "vkCmdCopyAccelerationStructureNV")); + vkCmdCopyBuffer = PFN_vkCmdCopyBuffer(device ? device.getProcAddr( "vkCmdCopyBuffer") : instance.getProcAddr( "vkCmdCopyBuffer")); + vkCmdCopyBufferToImage = PFN_vkCmdCopyBufferToImage(device ? device.getProcAddr( "vkCmdCopyBufferToImage") : instance.getProcAddr( "vkCmdCopyBufferToImage")); + vkCmdCopyImage = PFN_vkCmdCopyImage(device ? device.getProcAddr( "vkCmdCopyImage") : instance.getProcAddr( "vkCmdCopyImage")); + vkCmdCopyImageToBuffer = PFN_vkCmdCopyImageToBuffer(device ? device.getProcAddr( "vkCmdCopyImageToBuffer") : instance.getProcAddr( "vkCmdCopyImageToBuffer")); + vkCmdCopyQueryPoolResults = PFN_vkCmdCopyQueryPoolResults(device ? device.getProcAddr( "vkCmdCopyQueryPoolResults") : instance.getProcAddr( "vkCmdCopyQueryPoolResults")); + vkCmdDebugMarkerBeginEXT = PFN_vkCmdDebugMarkerBeginEXT(device ? device.getProcAddr( "vkCmdDebugMarkerBeginEXT") : instance.getProcAddr( "vkCmdDebugMarkerBeginEXT")); + vkCmdDebugMarkerEndEXT = PFN_vkCmdDebugMarkerEndEXT(device ? device.getProcAddr( "vkCmdDebugMarkerEndEXT") : instance.getProcAddr( "vkCmdDebugMarkerEndEXT")); + vkCmdDebugMarkerInsertEXT = PFN_vkCmdDebugMarkerInsertEXT(device ? device.getProcAddr( "vkCmdDebugMarkerInsertEXT") : instance.getProcAddr( "vkCmdDebugMarkerInsertEXT")); + vkCmdDispatch = PFN_vkCmdDispatch(device ? device.getProcAddr( "vkCmdDispatch") : instance.getProcAddr( "vkCmdDispatch")); + vkCmdDispatchBase = PFN_vkCmdDispatchBase(device ? device.getProcAddr( "vkCmdDispatchBase") : instance.getProcAddr( "vkCmdDispatchBase")); + vkCmdDispatchBaseKHR = PFN_vkCmdDispatchBaseKHR(device ? device.getProcAddr( "vkCmdDispatchBaseKHR") : instance.getProcAddr( "vkCmdDispatchBaseKHR")); + vkCmdDispatchIndirect = PFN_vkCmdDispatchIndirect(device ? device.getProcAddr( "vkCmdDispatchIndirect") : instance.getProcAddr( "vkCmdDispatchIndirect")); + vkCmdDraw = PFN_vkCmdDraw(device ? device.getProcAddr( "vkCmdDraw") : instance.getProcAddr( "vkCmdDraw")); + vkCmdDrawIndexed = PFN_vkCmdDrawIndexed(device ? device.getProcAddr( "vkCmdDrawIndexed") : instance.getProcAddr( "vkCmdDrawIndexed")); + vkCmdDrawIndexedIndirect = PFN_vkCmdDrawIndexedIndirect(device ? device.getProcAddr( "vkCmdDrawIndexedIndirect") : instance.getProcAddr( "vkCmdDrawIndexedIndirect")); + vkCmdDrawIndexedIndirectCountAMD = PFN_vkCmdDrawIndexedIndirectCountAMD(device ? device.getProcAddr( "vkCmdDrawIndexedIndirectCountAMD") : instance.getProcAddr( "vkCmdDrawIndexedIndirectCountAMD")); + vkCmdDrawIndexedIndirectCountKHR = PFN_vkCmdDrawIndexedIndirectCountKHR(device ? device.getProcAddr( "vkCmdDrawIndexedIndirectCountKHR") : instance.getProcAddr( "vkCmdDrawIndexedIndirectCountKHR")); + vkCmdDrawIndirect = PFN_vkCmdDrawIndirect(device ? device.getProcAddr( "vkCmdDrawIndirect") : instance.getProcAddr( "vkCmdDrawIndirect")); + vkCmdDrawIndirectByteCountEXT = PFN_vkCmdDrawIndirectByteCountEXT(device ? device.getProcAddr( "vkCmdDrawIndirectByteCountEXT") : instance.getProcAddr( "vkCmdDrawIndirectByteCountEXT")); + vkCmdDrawIndirectCountAMD = PFN_vkCmdDrawIndirectCountAMD(device ? device.getProcAddr( "vkCmdDrawIndirectCountAMD") : instance.getProcAddr( "vkCmdDrawIndirectCountAMD")); + vkCmdDrawIndirectCountKHR = PFN_vkCmdDrawIndirectCountKHR(device ? device.getProcAddr( "vkCmdDrawIndirectCountKHR") : instance.getProcAddr( "vkCmdDrawIndirectCountKHR")); + vkCmdDrawMeshTasksIndirectCountNV = PFN_vkCmdDrawMeshTasksIndirectCountNV(device ? device.getProcAddr( "vkCmdDrawMeshTasksIndirectCountNV") : instance.getProcAddr( "vkCmdDrawMeshTasksIndirectCountNV")); + vkCmdDrawMeshTasksIndirectNV = PFN_vkCmdDrawMeshTasksIndirectNV(device ? device.getProcAddr( "vkCmdDrawMeshTasksIndirectNV") : instance.getProcAddr( "vkCmdDrawMeshTasksIndirectNV")); + vkCmdDrawMeshTasksNV = PFN_vkCmdDrawMeshTasksNV(device ? device.getProcAddr( "vkCmdDrawMeshTasksNV") : instance.getProcAddr( "vkCmdDrawMeshTasksNV")); + vkCmdEndConditionalRenderingEXT = PFN_vkCmdEndConditionalRenderingEXT(device ? device.getProcAddr( "vkCmdEndConditionalRenderingEXT") : instance.getProcAddr( "vkCmdEndConditionalRenderingEXT")); + vkCmdEndDebugUtilsLabelEXT = PFN_vkCmdEndDebugUtilsLabelEXT(device ? device.getProcAddr( "vkCmdEndDebugUtilsLabelEXT") : instance.getProcAddr( "vkCmdEndDebugUtilsLabelEXT")); + vkCmdEndQuery = PFN_vkCmdEndQuery(device ? device.getProcAddr( "vkCmdEndQuery") : instance.getProcAddr( "vkCmdEndQuery")); + vkCmdEndQueryIndexedEXT = PFN_vkCmdEndQueryIndexedEXT(device ? device.getProcAddr( "vkCmdEndQueryIndexedEXT") : instance.getProcAddr( "vkCmdEndQueryIndexedEXT")); + vkCmdEndRenderPass = PFN_vkCmdEndRenderPass(device ? device.getProcAddr( "vkCmdEndRenderPass") : instance.getProcAddr( "vkCmdEndRenderPass")); + vkCmdEndRenderPass2KHR = PFN_vkCmdEndRenderPass2KHR(device ? device.getProcAddr( "vkCmdEndRenderPass2KHR") : instance.getProcAddr( "vkCmdEndRenderPass2KHR")); + vkCmdEndTransformFeedbackEXT = PFN_vkCmdEndTransformFeedbackEXT(device ? device.getProcAddr( "vkCmdEndTransformFeedbackEXT") : instance.getProcAddr( "vkCmdEndTransformFeedbackEXT")); + vkCmdExecuteCommands = PFN_vkCmdExecuteCommands(device ? device.getProcAddr( "vkCmdExecuteCommands") : instance.getProcAddr( "vkCmdExecuteCommands")); + vkCmdFillBuffer = PFN_vkCmdFillBuffer(device ? device.getProcAddr( "vkCmdFillBuffer") : instance.getProcAddr( "vkCmdFillBuffer")); + vkCmdInsertDebugUtilsLabelEXT = PFN_vkCmdInsertDebugUtilsLabelEXT(device ? device.getProcAddr( "vkCmdInsertDebugUtilsLabelEXT") : instance.getProcAddr( "vkCmdInsertDebugUtilsLabelEXT")); + vkCmdNextSubpass = PFN_vkCmdNextSubpass(device ? device.getProcAddr( "vkCmdNextSubpass") : instance.getProcAddr( "vkCmdNextSubpass")); + vkCmdNextSubpass2KHR = PFN_vkCmdNextSubpass2KHR(device ? device.getProcAddr( "vkCmdNextSubpass2KHR") : instance.getProcAddr( "vkCmdNextSubpass2KHR")); + vkCmdPipelineBarrier = PFN_vkCmdPipelineBarrier(device ? device.getProcAddr( "vkCmdPipelineBarrier") : instance.getProcAddr( "vkCmdPipelineBarrier")); + vkCmdProcessCommandsNVX = PFN_vkCmdProcessCommandsNVX(device ? device.getProcAddr( "vkCmdProcessCommandsNVX") : instance.getProcAddr( "vkCmdProcessCommandsNVX")); + vkCmdPushConstants = PFN_vkCmdPushConstants(device ? device.getProcAddr( "vkCmdPushConstants") : instance.getProcAddr( "vkCmdPushConstants")); + vkCmdPushDescriptorSetKHR = PFN_vkCmdPushDescriptorSetKHR(device ? device.getProcAddr( "vkCmdPushDescriptorSetKHR") : instance.getProcAddr( "vkCmdPushDescriptorSetKHR")); + vkCmdPushDescriptorSetWithTemplateKHR = PFN_vkCmdPushDescriptorSetWithTemplateKHR(device ? device.getProcAddr( "vkCmdPushDescriptorSetWithTemplateKHR") : instance.getProcAddr( "vkCmdPushDescriptorSetWithTemplateKHR")); + vkCmdReserveSpaceForCommandsNVX = PFN_vkCmdReserveSpaceForCommandsNVX(device ? device.getProcAddr( "vkCmdReserveSpaceForCommandsNVX") : instance.getProcAddr( "vkCmdReserveSpaceForCommandsNVX")); + vkCmdResetEvent = PFN_vkCmdResetEvent(device ? device.getProcAddr( "vkCmdResetEvent") : instance.getProcAddr( "vkCmdResetEvent")); + vkCmdResetQueryPool = PFN_vkCmdResetQueryPool(device ? device.getProcAddr( "vkCmdResetQueryPool") : instance.getProcAddr( "vkCmdResetQueryPool")); + vkCmdResolveImage = PFN_vkCmdResolveImage(device ? device.getProcAddr( "vkCmdResolveImage") : instance.getProcAddr( "vkCmdResolveImage")); + vkCmdSetBlendConstants = PFN_vkCmdSetBlendConstants(device ? device.getProcAddr( "vkCmdSetBlendConstants") : instance.getProcAddr( "vkCmdSetBlendConstants")); + vkCmdSetCheckpointNV = PFN_vkCmdSetCheckpointNV(device ? device.getProcAddr( "vkCmdSetCheckpointNV") : instance.getProcAddr( "vkCmdSetCheckpointNV")); + vkCmdSetCoarseSampleOrderNV = PFN_vkCmdSetCoarseSampleOrderNV(device ? device.getProcAddr( "vkCmdSetCoarseSampleOrderNV") : instance.getProcAddr( "vkCmdSetCoarseSampleOrderNV")); + vkCmdSetDepthBias = PFN_vkCmdSetDepthBias(device ? device.getProcAddr( "vkCmdSetDepthBias") : instance.getProcAddr( "vkCmdSetDepthBias")); + vkCmdSetDepthBounds = PFN_vkCmdSetDepthBounds(device ? device.getProcAddr( "vkCmdSetDepthBounds") : instance.getProcAddr( "vkCmdSetDepthBounds")); + vkCmdSetDeviceMask = PFN_vkCmdSetDeviceMask(device ? device.getProcAddr( "vkCmdSetDeviceMask") : instance.getProcAddr( "vkCmdSetDeviceMask")); + vkCmdSetDeviceMaskKHR = PFN_vkCmdSetDeviceMaskKHR(device ? device.getProcAddr( "vkCmdSetDeviceMaskKHR") : instance.getProcAddr( "vkCmdSetDeviceMaskKHR")); + vkCmdSetDiscardRectangleEXT = PFN_vkCmdSetDiscardRectangleEXT(device ? device.getProcAddr( "vkCmdSetDiscardRectangleEXT") : instance.getProcAddr( "vkCmdSetDiscardRectangleEXT")); + vkCmdSetEvent = PFN_vkCmdSetEvent(device ? device.getProcAddr( "vkCmdSetEvent") : instance.getProcAddr( "vkCmdSetEvent")); + vkCmdSetExclusiveScissorNV = PFN_vkCmdSetExclusiveScissorNV(device ? device.getProcAddr( "vkCmdSetExclusiveScissorNV") : instance.getProcAddr( "vkCmdSetExclusiveScissorNV")); + vkCmdSetLineWidth = PFN_vkCmdSetLineWidth(device ? device.getProcAddr( "vkCmdSetLineWidth") : instance.getProcAddr( "vkCmdSetLineWidth")); + vkCmdSetSampleLocationsEXT = PFN_vkCmdSetSampleLocationsEXT(device ? device.getProcAddr( "vkCmdSetSampleLocationsEXT") : instance.getProcAddr( "vkCmdSetSampleLocationsEXT")); + vkCmdSetScissor = PFN_vkCmdSetScissor(device ? device.getProcAddr( "vkCmdSetScissor") : instance.getProcAddr( "vkCmdSetScissor")); + vkCmdSetStencilCompareMask = PFN_vkCmdSetStencilCompareMask(device ? device.getProcAddr( "vkCmdSetStencilCompareMask") : instance.getProcAddr( "vkCmdSetStencilCompareMask")); + vkCmdSetStencilReference = PFN_vkCmdSetStencilReference(device ? device.getProcAddr( "vkCmdSetStencilReference") : instance.getProcAddr( "vkCmdSetStencilReference")); + vkCmdSetStencilWriteMask = PFN_vkCmdSetStencilWriteMask(device ? device.getProcAddr( "vkCmdSetStencilWriteMask") : instance.getProcAddr( "vkCmdSetStencilWriteMask")); + vkCmdSetViewport = PFN_vkCmdSetViewport(device ? device.getProcAddr( "vkCmdSetViewport") : instance.getProcAddr( "vkCmdSetViewport")); + vkCmdSetViewportShadingRatePaletteNV = PFN_vkCmdSetViewportShadingRatePaletteNV(device ? device.getProcAddr( "vkCmdSetViewportShadingRatePaletteNV") : instance.getProcAddr( "vkCmdSetViewportShadingRatePaletteNV")); + vkCmdSetViewportWScalingNV = PFN_vkCmdSetViewportWScalingNV(device ? device.getProcAddr( "vkCmdSetViewportWScalingNV") : instance.getProcAddr( "vkCmdSetViewportWScalingNV")); + vkCmdTraceRaysNV = PFN_vkCmdTraceRaysNV(device ? device.getProcAddr( "vkCmdTraceRaysNV") : instance.getProcAddr( "vkCmdTraceRaysNV")); + vkCmdUpdateBuffer = PFN_vkCmdUpdateBuffer(device ? device.getProcAddr( "vkCmdUpdateBuffer") : instance.getProcAddr( "vkCmdUpdateBuffer")); + vkCmdWaitEvents = PFN_vkCmdWaitEvents(device ? device.getProcAddr( "vkCmdWaitEvents") : instance.getProcAddr( "vkCmdWaitEvents")); + vkCmdWriteAccelerationStructuresPropertiesNV = PFN_vkCmdWriteAccelerationStructuresPropertiesNV(device ? device.getProcAddr( "vkCmdWriteAccelerationStructuresPropertiesNV") : instance.getProcAddr( "vkCmdWriteAccelerationStructuresPropertiesNV")); + vkCmdWriteBufferMarkerAMD = PFN_vkCmdWriteBufferMarkerAMD(device ? device.getProcAddr( "vkCmdWriteBufferMarkerAMD") : instance.getProcAddr( "vkCmdWriteBufferMarkerAMD")); + vkCmdWriteTimestamp = PFN_vkCmdWriteTimestamp(device ? device.getProcAddr( "vkCmdWriteTimestamp") : instance.getProcAddr( "vkCmdWriteTimestamp")); + vkCompileDeferredNV = PFN_vkCompileDeferredNV(device ? device.getProcAddr( "vkCompileDeferredNV") : instance.getProcAddr( "vkCompileDeferredNV")); + vkCreateAccelerationStructureNV = PFN_vkCreateAccelerationStructureNV(device ? device.getProcAddr( "vkCreateAccelerationStructureNV") : instance.getProcAddr( "vkCreateAccelerationStructureNV")); +#ifdef VK_USE_PLATFORM_ANDROID_KHR + vkCreateAndroidSurfaceKHR = PFN_vkCreateAndroidSurfaceKHR(instance.getProcAddr( "vkCreateAndroidSurfaceKHR")); +#endif /*VK_USE_PLATFORM_ANDROID_KHR*/ + vkCreateBuffer = PFN_vkCreateBuffer(device ? device.getProcAddr( "vkCreateBuffer") : instance.getProcAddr( "vkCreateBuffer")); + vkCreateBufferView = PFN_vkCreateBufferView(device ? device.getProcAddr( "vkCreateBufferView") : instance.getProcAddr( "vkCreateBufferView")); + vkCreateCommandPool = PFN_vkCreateCommandPool(device ? device.getProcAddr( "vkCreateCommandPool") : instance.getProcAddr( "vkCreateCommandPool")); + vkCreateComputePipelines = PFN_vkCreateComputePipelines(device ? device.getProcAddr( "vkCreateComputePipelines") : instance.getProcAddr( "vkCreateComputePipelines")); + vkCreateDebugReportCallbackEXT = PFN_vkCreateDebugReportCallbackEXT(instance.getProcAddr( "vkCreateDebugReportCallbackEXT")); + vkCreateDebugUtilsMessengerEXT = PFN_vkCreateDebugUtilsMessengerEXT(instance.getProcAddr( "vkCreateDebugUtilsMessengerEXT")); + vkCreateDescriptorPool = PFN_vkCreateDescriptorPool(device ? device.getProcAddr( "vkCreateDescriptorPool") : instance.getProcAddr( "vkCreateDescriptorPool")); + vkCreateDescriptorSetLayout = PFN_vkCreateDescriptorSetLayout(device ? device.getProcAddr( "vkCreateDescriptorSetLayout") : instance.getProcAddr( "vkCreateDescriptorSetLayout")); + vkCreateDescriptorUpdateTemplate = PFN_vkCreateDescriptorUpdateTemplate(device ? device.getProcAddr( "vkCreateDescriptorUpdateTemplate") : instance.getProcAddr( "vkCreateDescriptorUpdateTemplate")); + vkCreateDescriptorUpdateTemplateKHR = PFN_vkCreateDescriptorUpdateTemplateKHR(device ? device.getProcAddr( "vkCreateDescriptorUpdateTemplateKHR") : instance.getProcAddr( "vkCreateDescriptorUpdateTemplateKHR")); + vkCreateDevice = PFN_vkCreateDevice(instance.getProcAddr( "vkCreateDevice")); + vkCreateDisplayModeKHR = PFN_vkCreateDisplayModeKHR(instance.getProcAddr( "vkCreateDisplayModeKHR")); + vkCreateDisplayPlaneSurfaceKHR = PFN_vkCreateDisplayPlaneSurfaceKHR(instance.getProcAddr( "vkCreateDisplayPlaneSurfaceKHR")); + vkCreateEvent = PFN_vkCreateEvent(device ? device.getProcAddr( "vkCreateEvent") : instance.getProcAddr( "vkCreateEvent")); + vkCreateFence = PFN_vkCreateFence(device ? device.getProcAddr( "vkCreateFence") : instance.getProcAddr( "vkCreateFence")); + vkCreateFramebuffer = PFN_vkCreateFramebuffer(device ? device.getProcAddr( "vkCreateFramebuffer") : instance.getProcAddr( "vkCreateFramebuffer")); + vkCreateGraphicsPipelines = PFN_vkCreateGraphicsPipelines(device ? device.getProcAddr( "vkCreateGraphicsPipelines") : instance.getProcAddr( "vkCreateGraphicsPipelines")); +#ifdef VK_USE_PLATFORM_IOS_MVK + vkCreateIOSSurfaceMVK = PFN_vkCreateIOSSurfaceMVK(instance.getProcAddr( "vkCreateIOSSurfaceMVK")); +#endif /*VK_USE_PLATFORM_IOS_MVK*/ + vkCreateImage = PFN_vkCreateImage(device ? device.getProcAddr( "vkCreateImage") : instance.getProcAddr( "vkCreateImage")); +#ifdef VK_USE_PLATFORM_FUCHSIA_FUCHSIA + vkCreateImagePipeSurfaceFUCHSIA = PFN_vkCreateImagePipeSurfaceFUCHSIA(instance.getProcAddr( "vkCreateImagePipeSurfaceFUCHSIA")); +#endif /*VK_USE_PLATFORM_FUCHSIA_FUCHSIA*/ + vkCreateImageView = PFN_vkCreateImageView(device ? device.getProcAddr( "vkCreateImageView") : instance.getProcAddr( "vkCreateImageView")); + vkCreateIndirectCommandsLayoutNVX = PFN_vkCreateIndirectCommandsLayoutNVX(device ? device.getProcAddr( "vkCreateIndirectCommandsLayoutNVX") : instance.getProcAddr( "vkCreateIndirectCommandsLayoutNVX")); + vkCreateInstance = PFN_vkCreateInstance(instance.getProcAddr( "vkCreateInstance")); +#ifdef VK_USE_PLATFORM_MACOS_MVK + vkCreateMacOSSurfaceMVK = PFN_vkCreateMacOSSurfaceMVK(instance.getProcAddr( "vkCreateMacOSSurfaceMVK")); +#endif /*VK_USE_PLATFORM_MACOS_MVK*/ + vkCreateObjectTableNVX = PFN_vkCreateObjectTableNVX(device ? device.getProcAddr( "vkCreateObjectTableNVX") : instance.getProcAddr( "vkCreateObjectTableNVX")); + vkCreatePipelineCache = PFN_vkCreatePipelineCache(device ? device.getProcAddr( "vkCreatePipelineCache") : instance.getProcAddr( "vkCreatePipelineCache")); + vkCreatePipelineLayout = PFN_vkCreatePipelineLayout(device ? device.getProcAddr( "vkCreatePipelineLayout") : instance.getProcAddr( "vkCreatePipelineLayout")); + vkCreateQueryPool = PFN_vkCreateQueryPool(device ? device.getProcAddr( "vkCreateQueryPool") : instance.getProcAddr( "vkCreateQueryPool")); + vkCreateRayTracingPipelinesNV = PFN_vkCreateRayTracingPipelinesNV(device ? device.getProcAddr( "vkCreateRayTracingPipelinesNV") : instance.getProcAddr( "vkCreateRayTracingPipelinesNV")); + vkCreateRenderPass = PFN_vkCreateRenderPass(device ? device.getProcAddr( "vkCreateRenderPass") : instance.getProcAddr( "vkCreateRenderPass")); + vkCreateRenderPass2KHR = PFN_vkCreateRenderPass2KHR(device ? device.getProcAddr( "vkCreateRenderPass2KHR") : instance.getProcAddr( "vkCreateRenderPass2KHR")); + vkCreateSampler = PFN_vkCreateSampler(device ? device.getProcAddr( "vkCreateSampler") : instance.getProcAddr( "vkCreateSampler")); + vkCreateSamplerYcbcrConversion = PFN_vkCreateSamplerYcbcrConversion(device ? device.getProcAddr( "vkCreateSamplerYcbcrConversion") : instance.getProcAddr( "vkCreateSamplerYcbcrConversion")); + vkCreateSamplerYcbcrConversionKHR = PFN_vkCreateSamplerYcbcrConversionKHR(device ? device.getProcAddr( "vkCreateSamplerYcbcrConversionKHR") : instance.getProcAddr( "vkCreateSamplerYcbcrConversionKHR")); + vkCreateSemaphore = PFN_vkCreateSemaphore(device ? device.getProcAddr( "vkCreateSemaphore") : instance.getProcAddr( "vkCreateSemaphore")); + vkCreateShaderModule = PFN_vkCreateShaderModule(device ? device.getProcAddr( "vkCreateShaderModule") : instance.getProcAddr( "vkCreateShaderModule")); + vkCreateSharedSwapchainsKHR = PFN_vkCreateSharedSwapchainsKHR(device ? device.getProcAddr( "vkCreateSharedSwapchainsKHR") : instance.getProcAddr( "vkCreateSharedSwapchainsKHR")); + vkCreateSwapchainKHR = PFN_vkCreateSwapchainKHR(device ? device.getProcAddr( "vkCreateSwapchainKHR") : instance.getProcAddr( "vkCreateSwapchainKHR")); + vkCreateValidationCacheEXT = PFN_vkCreateValidationCacheEXT(device ? device.getProcAddr( "vkCreateValidationCacheEXT") : instance.getProcAddr( "vkCreateValidationCacheEXT")); +#ifdef VK_USE_PLATFORM_VI_NN + vkCreateViSurfaceNN = PFN_vkCreateViSurfaceNN(instance.getProcAddr( "vkCreateViSurfaceNN")); +#endif /*VK_USE_PLATFORM_VI_NN*/ +#ifdef VK_USE_PLATFORM_WAYLAND_KHR + vkCreateWaylandSurfaceKHR = PFN_vkCreateWaylandSurfaceKHR(instance.getProcAddr( "vkCreateWaylandSurfaceKHR")); +#endif /*VK_USE_PLATFORM_WAYLAND_KHR*/ +#ifdef VK_USE_PLATFORM_WIN32_KHR + vkCreateWin32SurfaceKHR = PFN_vkCreateWin32SurfaceKHR(instance.getProcAddr( "vkCreateWin32SurfaceKHR")); +#endif /*VK_USE_PLATFORM_WIN32_KHR*/ +#ifdef VK_USE_PLATFORM_XCB_KHR + vkCreateXcbSurfaceKHR = PFN_vkCreateXcbSurfaceKHR(instance.getProcAddr( "vkCreateXcbSurfaceKHR")); +#endif /*VK_USE_PLATFORM_XCB_KHR*/ +#ifdef VK_USE_PLATFORM_XLIB_KHR + vkCreateXlibSurfaceKHR = PFN_vkCreateXlibSurfaceKHR(instance.getProcAddr( "vkCreateXlibSurfaceKHR")); +#endif /*VK_USE_PLATFORM_XLIB_KHR*/ + vkDebugMarkerSetObjectNameEXT = PFN_vkDebugMarkerSetObjectNameEXT(device ? device.getProcAddr( "vkDebugMarkerSetObjectNameEXT") : instance.getProcAddr( "vkDebugMarkerSetObjectNameEXT")); + vkDebugMarkerSetObjectTagEXT = PFN_vkDebugMarkerSetObjectTagEXT(device ? device.getProcAddr( "vkDebugMarkerSetObjectTagEXT") : instance.getProcAddr( "vkDebugMarkerSetObjectTagEXT")); + vkDebugReportMessageEXT = PFN_vkDebugReportMessageEXT(instance.getProcAddr( "vkDebugReportMessageEXT")); + vkDestroyAccelerationStructureNV = PFN_vkDestroyAccelerationStructureNV(device ? device.getProcAddr( "vkDestroyAccelerationStructureNV") : instance.getProcAddr( "vkDestroyAccelerationStructureNV")); + vkDestroyBuffer = PFN_vkDestroyBuffer(device ? device.getProcAddr( "vkDestroyBuffer") : instance.getProcAddr( "vkDestroyBuffer")); + vkDestroyBufferView = PFN_vkDestroyBufferView(device ? device.getProcAddr( "vkDestroyBufferView") : instance.getProcAddr( "vkDestroyBufferView")); + vkDestroyCommandPool = PFN_vkDestroyCommandPool(device ? device.getProcAddr( "vkDestroyCommandPool") : instance.getProcAddr( "vkDestroyCommandPool")); + vkDestroyDebugReportCallbackEXT = PFN_vkDestroyDebugReportCallbackEXT(instance.getProcAddr( "vkDestroyDebugReportCallbackEXT")); + vkDestroyDebugUtilsMessengerEXT = PFN_vkDestroyDebugUtilsMessengerEXT(instance.getProcAddr( "vkDestroyDebugUtilsMessengerEXT")); + vkDestroyDescriptorPool = PFN_vkDestroyDescriptorPool(device ? device.getProcAddr( "vkDestroyDescriptorPool") : instance.getProcAddr( "vkDestroyDescriptorPool")); + vkDestroyDescriptorSetLayout = PFN_vkDestroyDescriptorSetLayout(device ? device.getProcAddr( "vkDestroyDescriptorSetLayout") : instance.getProcAddr( "vkDestroyDescriptorSetLayout")); + vkDestroyDescriptorUpdateTemplate = PFN_vkDestroyDescriptorUpdateTemplate(device ? device.getProcAddr( "vkDestroyDescriptorUpdateTemplate") : instance.getProcAddr( "vkDestroyDescriptorUpdateTemplate")); + vkDestroyDescriptorUpdateTemplateKHR = PFN_vkDestroyDescriptorUpdateTemplateKHR(device ? device.getProcAddr( "vkDestroyDescriptorUpdateTemplateKHR") : instance.getProcAddr( "vkDestroyDescriptorUpdateTemplateKHR")); + vkDestroyDevice = PFN_vkDestroyDevice(device ? device.getProcAddr( "vkDestroyDevice") : instance.getProcAddr( "vkDestroyDevice")); + vkDestroyEvent = PFN_vkDestroyEvent(device ? device.getProcAddr( "vkDestroyEvent") : instance.getProcAddr( "vkDestroyEvent")); + vkDestroyFence = PFN_vkDestroyFence(device ? device.getProcAddr( "vkDestroyFence") : instance.getProcAddr( "vkDestroyFence")); + vkDestroyFramebuffer = PFN_vkDestroyFramebuffer(device ? device.getProcAddr( "vkDestroyFramebuffer") : instance.getProcAddr( "vkDestroyFramebuffer")); + vkDestroyImage = PFN_vkDestroyImage(device ? device.getProcAddr( "vkDestroyImage") : instance.getProcAddr( "vkDestroyImage")); + vkDestroyImageView = PFN_vkDestroyImageView(device ? device.getProcAddr( "vkDestroyImageView") : instance.getProcAddr( "vkDestroyImageView")); + vkDestroyIndirectCommandsLayoutNVX = PFN_vkDestroyIndirectCommandsLayoutNVX(device ? device.getProcAddr( "vkDestroyIndirectCommandsLayoutNVX") : instance.getProcAddr( "vkDestroyIndirectCommandsLayoutNVX")); + vkDestroyInstance = PFN_vkDestroyInstance(instance.getProcAddr( "vkDestroyInstance")); + vkDestroyObjectTableNVX = PFN_vkDestroyObjectTableNVX(device ? device.getProcAddr( "vkDestroyObjectTableNVX") : instance.getProcAddr( "vkDestroyObjectTableNVX")); + vkDestroyPipeline = PFN_vkDestroyPipeline(device ? device.getProcAddr( "vkDestroyPipeline") : instance.getProcAddr( "vkDestroyPipeline")); + vkDestroyPipelineCache = PFN_vkDestroyPipelineCache(device ? device.getProcAddr( "vkDestroyPipelineCache") : instance.getProcAddr( "vkDestroyPipelineCache")); + vkDestroyPipelineLayout = PFN_vkDestroyPipelineLayout(device ? device.getProcAddr( "vkDestroyPipelineLayout") : instance.getProcAddr( "vkDestroyPipelineLayout")); + vkDestroyQueryPool = PFN_vkDestroyQueryPool(device ? device.getProcAddr( "vkDestroyQueryPool") : instance.getProcAddr( "vkDestroyQueryPool")); + vkDestroyRenderPass = PFN_vkDestroyRenderPass(device ? device.getProcAddr( "vkDestroyRenderPass") : instance.getProcAddr( "vkDestroyRenderPass")); + vkDestroySampler = PFN_vkDestroySampler(device ? device.getProcAddr( "vkDestroySampler") : instance.getProcAddr( "vkDestroySampler")); + vkDestroySamplerYcbcrConversion = PFN_vkDestroySamplerYcbcrConversion(device ? device.getProcAddr( "vkDestroySamplerYcbcrConversion") : instance.getProcAddr( "vkDestroySamplerYcbcrConversion")); + vkDestroySamplerYcbcrConversionKHR = PFN_vkDestroySamplerYcbcrConversionKHR(device ? device.getProcAddr( "vkDestroySamplerYcbcrConversionKHR") : instance.getProcAddr( "vkDestroySamplerYcbcrConversionKHR")); + vkDestroySemaphore = PFN_vkDestroySemaphore(device ? device.getProcAddr( "vkDestroySemaphore") : instance.getProcAddr( "vkDestroySemaphore")); + vkDestroyShaderModule = PFN_vkDestroyShaderModule(device ? device.getProcAddr( "vkDestroyShaderModule") : instance.getProcAddr( "vkDestroyShaderModule")); + vkDestroySurfaceKHR = PFN_vkDestroySurfaceKHR(instance.getProcAddr( "vkDestroySurfaceKHR")); + vkDestroySwapchainKHR = PFN_vkDestroySwapchainKHR(device ? device.getProcAddr( "vkDestroySwapchainKHR") : instance.getProcAddr( "vkDestroySwapchainKHR")); + vkDestroyValidationCacheEXT = PFN_vkDestroyValidationCacheEXT(device ? device.getProcAddr( "vkDestroyValidationCacheEXT") : instance.getProcAddr( "vkDestroyValidationCacheEXT")); + vkDeviceWaitIdle = PFN_vkDeviceWaitIdle(device ? device.getProcAddr( "vkDeviceWaitIdle") : instance.getProcAddr( "vkDeviceWaitIdle")); + vkDisplayPowerControlEXT = PFN_vkDisplayPowerControlEXT(device ? device.getProcAddr( "vkDisplayPowerControlEXT") : instance.getProcAddr( "vkDisplayPowerControlEXT")); + vkEndCommandBuffer = PFN_vkEndCommandBuffer(device ? device.getProcAddr( "vkEndCommandBuffer") : instance.getProcAddr( "vkEndCommandBuffer")); + vkEnumerateDeviceExtensionProperties = PFN_vkEnumerateDeviceExtensionProperties(instance.getProcAddr( "vkEnumerateDeviceExtensionProperties")); + vkEnumerateDeviceLayerProperties = PFN_vkEnumerateDeviceLayerProperties(instance.getProcAddr( "vkEnumerateDeviceLayerProperties")); + vkEnumerateInstanceExtensionProperties = PFN_vkEnumerateInstanceExtensionProperties(instance.getProcAddr( "vkEnumerateInstanceExtensionProperties")); + vkEnumerateInstanceLayerProperties = PFN_vkEnumerateInstanceLayerProperties(instance.getProcAddr( "vkEnumerateInstanceLayerProperties")); + vkEnumerateInstanceVersion = PFN_vkEnumerateInstanceVersion(instance.getProcAddr( "vkEnumerateInstanceVersion")); + vkEnumeratePhysicalDeviceGroups = PFN_vkEnumeratePhysicalDeviceGroups(instance.getProcAddr( "vkEnumeratePhysicalDeviceGroups")); + vkEnumeratePhysicalDeviceGroupsKHR = PFN_vkEnumeratePhysicalDeviceGroupsKHR(instance.getProcAddr( "vkEnumeratePhysicalDeviceGroupsKHR")); + vkEnumeratePhysicalDevices = PFN_vkEnumeratePhysicalDevices(instance.getProcAddr( "vkEnumeratePhysicalDevices")); + vkFlushMappedMemoryRanges = PFN_vkFlushMappedMemoryRanges(device ? device.getProcAddr( "vkFlushMappedMemoryRanges") : instance.getProcAddr( "vkFlushMappedMemoryRanges")); + vkFreeCommandBuffers = PFN_vkFreeCommandBuffers(device ? device.getProcAddr( "vkFreeCommandBuffers") : instance.getProcAddr( "vkFreeCommandBuffers")); + vkFreeDescriptorSets = PFN_vkFreeDescriptorSets(device ? device.getProcAddr( "vkFreeDescriptorSets") : instance.getProcAddr( "vkFreeDescriptorSets")); + vkFreeMemory = PFN_vkFreeMemory(device ? device.getProcAddr( "vkFreeMemory") : instance.getProcAddr( "vkFreeMemory")); + vkGetAccelerationStructureHandleNV = PFN_vkGetAccelerationStructureHandleNV(device ? device.getProcAddr( "vkGetAccelerationStructureHandleNV") : instance.getProcAddr( "vkGetAccelerationStructureHandleNV")); + vkGetAccelerationStructureMemoryRequirementsNV = PFN_vkGetAccelerationStructureMemoryRequirementsNV(device ? device.getProcAddr( "vkGetAccelerationStructureMemoryRequirementsNV") : instance.getProcAddr( "vkGetAccelerationStructureMemoryRequirementsNV")); +#ifdef VK_USE_PLATFORM_ANDROID_ANDROID + vkGetAndroidHardwareBufferPropertiesANDROID = PFN_vkGetAndroidHardwareBufferPropertiesANDROID(device ? device.getProcAddr( "vkGetAndroidHardwareBufferPropertiesANDROID") : instance.getProcAddr( "vkGetAndroidHardwareBufferPropertiesANDROID")); +#endif /*VK_USE_PLATFORM_ANDROID_ANDROID*/ + vkGetBufferMemoryRequirements = PFN_vkGetBufferMemoryRequirements(device ? device.getProcAddr( "vkGetBufferMemoryRequirements") : instance.getProcAddr( "vkGetBufferMemoryRequirements")); + vkGetBufferMemoryRequirements2 = PFN_vkGetBufferMemoryRequirements2(device ? device.getProcAddr( "vkGetBufferMemoryRequirements2") : instance.getProcAddr( "vkGetBufferMemoryRequirements2")); + vkGetBufferMemoryRequirements2KHR = PFN_vkGetBufferMemoryRequirements2KHR(device ? device.getProcAddr( "vkGetBufferMemoryRequirements2KHR") : instance.getProcAddr( "vkGetBufferMemoryRequirements2KHR")); + vkGetCalibratedTimestampsEXT = PFN_vkGetCalibratedTimestampsEXT(device ? device.getProcAddr( "vkGetCalibratedTimestampsEXT") : instance.getProcAddr( "vkGetCalibratedTimestampsEXT")); + vkGetDescriptorSetLayoutSupport = PFN_vkGetDescriptorSetLayoutSupport(device ? device.getProcAddr( "vkGetDescriptorSetLayoutSupport") : instance.getProcAddr( "vkGetDescriptorSetLayoutSupport")); + vkGetDescriptorSetLayoutSupportKHR = PFN_vkGetDescriptorSetLayoutSupportKHR(device ? device.getProcAddr( "vkGetDescriptorSetLayoutSupportKHR") : instance.getProcAddr( "vkGetDescriptorSetLayoutSupportKHR")); + vkGetDeviceGroupPeerMemoryFeatures = PFN_vkGetDeviceGroupPeerMemoryFeatures(device ? device.getProcAddr( "vkGetDeviceGroupPeerMemoryFeatures") : instance.getProcAddr( "vkGetDeviceGroupPeerMemoryFeatures")); + vkGetDeviceGroupPeerMemoryFeaturesKHR = PFN_vkGetDeviceGroupPeerMemoryFeaturesKHR(device ? device.getProcAddr( "vkGetDeviceGroupPeerMemoryFeaturesKHR") : instance.getProcAddr( "vkGetDeviceGroupPeerMemoryFeaturesKHR")); + vkGetDeviceGroupPresentCapabilitiesKHR = PFN_vkGetDeviceGroupPresentCapabilitiesKHR(device ? device.getProcAddr( "vkGetDeviceGroupPresentCapabilitiesKHR") : instance.getProcAddr( "vkGetDeviceGroupPresentCapabilitiesKHR")); + vkGetDeviceGroupSurfacePresentModesKHR = PFN_vkGetDeviceGroupSurfacePresentModesKHR(device ? device.getProcAddr( "vkGetDeviceGroupSurfacePresentModesKHR") : instance.getProcAddr( "vkGetDeviceGroupSurfacePresentModesKHR")); + vkGetDeviceMemoryCommitment = PFN_vkGetDeviceMemoryCommitment(device ? device.getProcAddr( "vkGetDeviceMemoryCommitment") : instance.getProcAddr( "vkGetDeviceMemoryCommitment")); + vkGetDeviceProcAddr = PFN_vkGetDeviceProcAddr(device ? device.getProcAddr( "vkGetDeviceProcAddr") : instance.getProcAddr( "vkGetDeviceProcAddr")); + vkGetDeviceQueue = PFN_vkGetDeviceQueue(device ? device.getProcAddr( "vkGetDeviceQueue") : instance.getProcAddr( "vkGetDeviceQueue")); + vkGetDeviceQueue2 = PFN_vkGetDeviceQueue2(device ? device.getProcAddr( "vkGetDeviceQueue2") : instance.getProcAddr( "vkGetDeviceQueue2")); + vkGetDisplayModeProperties2KHR = PFN_vkGetDisplayModeProperties2KHR(instance.getProcAddr( "vkGetDisplayModeProperties2KHR")); + vkGetDisplayModePropertiesKHR = PFN_vkGetDisplayModePropertiesKHR(instance.getProcAddr( "vkGetDisplayModePropertiesKHR")); + vkGetDisplayPlaneCapabilities2KHR = PFN_vkGetDisplayPlaneCapabilities2KHR(instance.getProcAddr( "vkGetDisplayPlaneCapabilities2KHR")); + vkGetDisplayPlaneCapabilitiesKHR = PFN_vkGetDisplayPlaneCapabilitiesKHR(instance.getProcAddr( "vkGetDisplayPlaneCapabilitiesKHR")); + vkGetDisplayPlaneSupportedDisplaysKHR = PFN_vkGetDisplayPlaneSupportedDisplaysKHR(instance.getProcAddr( "vkGetDisplayPlaneSupportedDisplaysKHR")); + vkGetEventStatus = PFN_vkGetEventStatus(device ? device.getProcAddr( "vkGetEventStatus") : instance.getProcAddr( "vkGetEventStatus")); + vkGetFenceFdKHR = PFN_vkGetFenceFdKHR(device ? device.getProcAddr( "vkGetFenceFdKHR") : instance.getProcAddr( "vkGetFenceFdKHR")); + vkGetFenceStatus = PFN_vkGetFenceStatus(device ? device.getProcAddr( "vkGetFenceStatus") : instance.getProcAddr( "vkGetFenceStatus")); +#ifdef VK_USE_PLATFORM_WIN32_KHR + vkGetFenceWin32HandleKHR = PFN_vkGetFenceWin32HandleKHR(device ? device.getProcAddr( "vkGetFenceWin32HandleKHR") : instance.getProcAddr( "vkGetFenceWin32HandleKHR")); +#endif /*VK_USE_PLATFORM_WIN32_KHR*/ + vkGetImageDrmFormatModifierPropertiesEXT = PFN_vkGetImageDrmFormatModifierPropertiesEXT(device ? device.getProcAddr( "vkGetImageDrmFormatModifierPropertiesEXT") : instance.getProcAddr( "vkGetImageDrmFormatModifierPropertiesEXT")); + vkGetImageMemoryRequirements = PFN_vkGetImageMemoryRequirements(device ? device.getProcAddr( "vkGetImageMemoryRequirements") : instance.getProcAddr( "vkGetImageMemoryRequirements")); + vkGetImageMemoryRequirements2 = PFN_vkGetImageMemoryRequirements2(device ? device.getProcAddr( "vkGetImageMemoryRequirements2") : instance.getProcAddr( "vkGetImageMemoryRequirements2")); + vkGetImageMemoryRequirements2KHR = PFN_vkGetImageMemoryRequirements2KHR(device ? device.getProcAddr( "vkGetImageMemoryRequirements2KHR") : instance.getProcAddr( "vkGetImageMemoryRequirements2KHR")); + vkGetImageSparseMemoryRequirements = PFN_vkGetImageSparseMemoryRequirements(device ? device.getProcAddr( "vkGetImageSparseMemoryRequirements") : instance.getProcAddr( "vkGetImageSparseMemoryRequirements")); + vkGetImageSparseMemoryRequirements2 = PFN_vkGetImageSparseMemoryRequirements2(device ? device.getProcAddr( "vkGetImageSparseMemoryRequirements2") : instance.getProcAddr( "vkGetImageSparseMemoryRequirements2")); + vkGetImageSparseMemoryRequirements2KHR = PFN_vkGetImageSparseMemoryRequirements2KHR(device ? device.getProcAddr( "vkGetImageSparseMemoryRequirements2KHR") : instance.getProcAddr( "vkGetImageSparseMemoryRequirements2KHR")); + vkGetImageSubresourceLayout = PFN_vkGetImageSubresourceLayout(device ? device.getProcAddr( "vkGetImageSubresourceLayout") : instance.getProcAddr( "vkGetImageSubresourceLayout")); + vkGetInstanceProcAddr = PFN_vkGetInstanceProcAddr(instance.getProcAddr( "vkGetInstanceProcAddr")); +#ifdef VK_USE_PLATFORM_ANDROID_ANDROID + vkGetMemoryAndroidHardwareBufferANDROID = PFN_vkGetMemoryAndroidHardwareBufferANDROID(device ? device.getProcAddr( "vkGetMemoryAndroidHardwareBufferANDROID") : instance.getProcAddr( "vkGetMemoryAndroidHardwareBufferANDROID")); +#endif /*VK_USE_PLATFORM_ANDROID_ANDROID*/ + vkGetMemoryFdKHR = PFN_vkGetMemoryFdKHR(device ? device.getProcAddr( "vkGetMemoryFdKHR") : instance.getProcAddr( "vkGetMemoryFdKHR")); + vkGetMemoryFdPropertiesKHR = PFN_vkGetMemoryFdPropertiesKHR(device ? device.getProcAddr( "vkGetMemoryFdPropertiesKHR") : instance.getProcAddr( "vkGetMemoryFdPropertiesKHR")); + vkGetMemoryHostPointerPropertiesEXT = PFN_vkGetMemoryHostPointerPropertiesEXT(device ? device.getProcAddr( "vkGetMemoryHostPointerPropertiesEXT") : instance.getProcAddr( "vkGetMemoryHostPointerPropertiesEXT")); +#ifdef VK_USE_PLATFORM_WIN32_KHR + vkGetMemoryWin32HandleKHR = PFN_vkGetMemoryWin32HandleKHR(device ? device.getProcAddr( "vkGetMemoryWin32HandleKHR") : instance.getProcAddr( "vkGetMemoryWin32HandleKHR")); +#endif /*VK_USE_PLATFORM_WIN32_KHR*/ +#ifdef VK_USE_PLATFORM_WIN32_NV + vkGetMemoryWin32HandleNV = PFN_vkGetMemoryWin32HandleNV(device ? device.getProcAddr( "vkGetMemoryWin32HandleNV") : instance.getProcAddr( "vkGetMemoryWin32HandleNV")); +#endif /*VK_USE_PLATFORM_WIN32_NV*/ +#ifdef VK_USE_PLATFORM_WIN32_KHR + vkGetMemoryWin32HandlePropertiesKHR = PFN_vkGetMemoryWin32HandlePropertiesKHR(device ? device.getProcAddr( "vkGetMemoryWin32HandlePropertiesKHR") : instance.getProcAddr( "vkGetMemoryWin32HandlePropertiesKHR")); +#endif /*VK_USE_PLATFORM_WIN32_KHR*/ + vkGetPastPresentationTimingGOOGLE = PFN_vkGetPastPresentationTimingGOOGLE(device ? device.getProcAddr( "vkGetPastPresentationTimingGOOGLE") : instance.getProcAddr( "vkGetPastPresentationTimingGOOGLE")); + vkGetPhysicalDeviceCalibrateableTimeDomainsEXT = PFN_vkGetPhysicalDeviceCalibrateableTimeDomainsEXT(instance.getProcAddr( "vkGetPhysicalDeviceCalibrateableTimeDomainsEXT")); + vkGetPhysicalDeviceDisplayPlaneProperties2KHR = PFN_vkGetPhysicalDeviceDisplayPlaneProperties2KHR(instance.getProcAddr( "vkGetPhysicalDeviceDisplayPlaneProperties2KHR")); + vkGetPhysicalDeviceDisplayPlanePropertiesKHR = PFN_vkGetPhysicalDeviceDisplayPlanePropertiesKHR(instance.getProcAddr( "vkGetPhysicalDeviceDisplayPlanePropertiesKHR")); + vkGetPhysicalDeviceDisplayProperties2KHR = PFN_vkGetPhysicalDeviceDisplayProperties2KHR(instance.getProcAddr( "vkGetPhysicalDeviceDisplayProperties2KHR")); + vkGetPhysicalDeviceDisplayPropertiesKHR = PFN_vkGetPhysicalDeviceDisplayPropertiesKHR(instance.getProcAddr( "vkGetPhysicalDeviceDisplayPropertiesKHR")); + vkGetPhysicalDeviceExternalBufferProperties = PFN_vkGetPhysicalDeviceExternalBufferProperties(instance.getProcAddr( "vkGetPhysicalDeviceExternalBufferProperties")); + vkGetPhysicalDeviceExternalBufferPropertiesKHR = PFN_vkGetPhysicalDeviceExternalBufferPropertiesKHR(instance.getProcAddr( "vkGetPhysicalDeviceExternalBufferPropertiesKHR")); + vkGetPhysicalDeviceExternalFenceProperties = PFN_vkGetPhysicalDeviceExternalFenceProperties(instance.getProcAddr( "vkGetPhysicalDeviceExternalFenceProperties")); + vkGetPhysicalDeviceExternalFencePropertiesKHR = PFN_vkGetPhysicalDeviceExternalFencePropertiesKHR(instance.getProcAddr( "vkGetPhysicalDeviceExternalFencePropertiesKHR")); + vkGetPhysicalDeviceExternalImageFormatPropertiesNV = PFN_vkGetPhysicalDeviceExternalImageFormatPropertiesNV(instance.getProcAddr( "vkGetPhysicalDeviceExternalImageFormatPropertiesNV")); + vkGetPhysicalDeviceExternalSemaphoreProperties = PFN_vkGetPhysicalDeviceExternalSemaphoreProperties(instance.getProcAddr( "vkGetPhysicalDeviceExternalSemaphoreProperties")); + vkGetPhysicalDeviceExternalSemaphorePropertiesKHR = PFN_vkGetPhysicalDeviceExternalSemaphorePropertiesKHR(instance.getProcAddr( "vkGetPhysicalDeviceExternalSemaphorePropertiesKHR")); + vkGetPhysicalDeviceFeatures = PFN_vkGetPhysicalDeviceFeatures(instance.getProcAddr( "vkGetPhysicalDeviceFeatures")); + vkGetPhysicalDeviceFeatures2 = PFN_vkGetPhysicalDeviceFeatures2(instance.getProcAddr( "vkGetPhysicalDeviceFeatures2")); + vkGetPhysicalDeviceFeatures2KHR = PFN_vkGetPhysicalDeviceFeatures2KHR(instance.getProcAddr( "vkGetPhysicalDeviceFeatures2KHR")); + vkGetPhysicalDeviceFormatProperties = PFN_vkGetPhysicalDeviceFormatProperties(instance.getProcAddr( "vkGetPhysicalDeviceFormatProperties")); + vkGetPhysicalDeviceFormatProperties2 = PFN_vkGetPhysicalDeviceFormatProperties2(instance.getProcAddr( "vkGetPhysicalDeviceFormatProperties2")); + vkGetPhysicalDeviceFormatProperties2KHR = PFN_vkGetPhysicalDeviceFormatProperties2KHR(instance.getProcAddr( "vkGetPhysicalDeviceFormatProperties2KHR")); + vkGetPhysicalDeviceGeneratedCommandsPropertiesNVX = PFN_vkGetPhysicalDeviceGeneratedCommandsPropertiesNVX(instance.getProcAddr( "vkGetPhysicalDeviceGeneratedCommandsPropertiesNVX")); + vkGetPhysicalDeviceImageFormatProperties = PFN_vkGetPhysicalDeviceImageFormatProperties(instance.getProcAddr( "vkGetPhysicalDeviceImageFormatProperties")); + vkGetPhysicalDeviceImageFormatProperties2 = PFN_vkGetPhysicalDeviceImageFormatProperties2(instance.getProcAddr( "vkGetPhysicalDeviceImageFormatProperties2")); + vkGetPhysicalDeviceImageFormatProperties2KHR = PFN_vkGetPhysicalDeviceImageFormatProperties2KHR(instance.getProcAddr( "vkGetPhysicalDeviceImageFormatProperties2KHR")); + vkGetPhysicalDeviceMemoryProperties = PFN_vkGetPhysicalDeviceMemoryProperties(instance.getProcAddr( "vkGetPhysicalDeviceMemoryProperties")); + vkGetPhysicalDeviceMemoryProperties2 = PFN_vkGetPhysicalDeviceMemoryProperties2(instance.getProcAddr( "vkGetPhysicalDeviceMemoryProperties2")); + vkGetPhysicalDeviceMemoryProperties2KHR = PFN_vkGetPhysicalDeviceMemoryProperties2KHR(instance.getProcAddr( "vkGetPhysicalDeviceMemoryProperties2KHR")); + vkGetPhysicalDeviceMultisamplePropertiesEXT = PFN_vkGetPhysicalDeviceMultisamplePropertiesEXT(instance.getProcAddr( "vkGetPhysicalDeviceMultisamplePropertiesEXT")); + vkGetPhysicalDevicePresentRectanglesKHR = PFN_vkGetPhysicalDevicePresentRectanglesKHR(instance.getProcAddr( "vkGetPhysicalDevicePresentRectanglesKHR")); + vkGetPhysicalDeviceProperties = PFN_vkGetPhysicalDeviceProperties(instance.getProcAddr( "vkGetPhysicalDeviceProperties")); + vkGetPhysicalDeviceProperties2 = PFN_vkGetPhysicalDeviceProperties2(instance.getProcAddr( "vkGetPhysicalDeviceProperties2")); + vkGetPhysicalDeviceProperties2KHR = PFN_vkGetPhysicalDeviceProperties2KHR(instance.getProcAddr( "vkGetPhysicalDeviceProperties2KHR")); + vkGetPhysicalDeviceQueueFamilyProperties = PFN_vkGetPhysicalDeviceQueueFamilyProperties(instance.getProcAddr( "vkGetPhysicalDeviceQueueFamilyProperties")); + vkGetPhysicalDeviceQueueFamilyProperties2 = PFN_vkGetPhysicalDeviceQueueFamilyProperties2(instance.getProcAddr( "vkGetPhysicalDeviceQueueFamilyProperties2")); + vkGetPhysicalDeviceQueueFamilyProperties2KHR = PFN_vkGetPhysicalDeviceQueueFamilyProperties2KHR(instance.getProcAddr( "vkGetPhysicalDeviceQueueFamilyProperties2KHR")); + vkGetPhysicalDeviceSparseImageFormatProperties = PFN_vkGetPhysicalDeviceSparseImageFormatProperties(instance.getProcAddr( "vkGetPhysicalDeviceSparseImageFormatProperties")); + vkGetPhysicalDeviceSparseImageFormatProperties2 = PFN_vkGetPhysicalDeviceSparseImageFormatProperties2(instance.getProcAddr( "vkGetPhysicalDeviceSparseImageFormatProperties2")); + vkGetPhysicalDeviceSparseImageFormatProperties2KHR = PFN_vkGetPhysicalDeviceSparseImageFormatProperties2KHR(instance.getProcAddr( "vkGetPhysicalDeviceSparseImageFormatProperties2KHR")); + vkGetPhysicalDeviceSurfaceCapabilities2EXT = PFN_vkGetPhysicalDeviceSurfaceCapabilities2EXT(instance.getProcAddr( "vkGetPhysicalDeviceSurfaceCapabilities2EXT")); + vkGetPhysicalDeviceSurfaceCapabilities2KHR = PFN_vkGetPhysicalDeviceSurfaceCapabilities2KHR(instance.getProcAddr( "vkGetPhysicalDeviceSurfaceCapabilities2KHR")); + vkGetPhysicalDeviceSurfaceCapabilitiesKHR = PFN_vkGetPhysicalDeviceSurfaceCapabilitiesKHR(instance.getProcAddr( "vkGetPhysicalDeviceSurfaceCapabilitiesKHR")); + vkGetPhysicalDeviceSurfaceFormats2KHR = PFN_vkGetPhysicalDeviceSurfaceFormats2KHR(instance.getProcAddr( "vkGetPhysicalDeviceSurfaceFormats2KHR")); + vkGetPhysicalDeviceSurfaceFormatsKHR = PFN_vkGetPhysicalDeviceSurfaceFormatsKHR(instance.getProcAddr( "vkGetPhysicalDeviceSurfaceFormatsKHR")); + vkGetPhysicalDeviceSurfacePresentModesKHR = PFN_vkGetPhysicalDeviceSurfacePresentModesKHR(instance.getProcAddr( "vkGetPhysicalDeviceSurfacePresentModesKHR")); + vkGetPhysicalDeviceSurfaceSupportKHR = PFN_vkGetPhysicalDeviceSurfaceSupportKHR(instance.getProcAddr( "vkGetPhysicalDeviceSurfaceSupportKHR")); +#ifdef VK_USE_PLATFORM_WAYLAND_KHR + vkGetPhysicalDeviceWaylandPresentationSupportKHR = PFN_vkGetPhysicalDeviceWaylandPresentationSupportKHR(instance.getProcAddr( "vkGetPhysicalDeviceWaylandPresentationSupportKHR")); +#endif /*VK_USE_PLATFORM_WAYLAND_KHR*/ +#ifdef VK_USE_PLATFORM_WIN32_KHR + vkGetPhysicalDeviceWin32PresentationSupportKHR = PFN_vkGetPhysicalDeviceWin32PresentationSupportKHR(instance.getProcAddr( "vkGetPhysicalDeviceWin32PresentationSupportKHR")); +#endif /*VK_USE_PLATFORM_WIN32_KHR*/ +#ifdef VK_USE_PLATFORM_XCB_KHR + vkGetPhysicalDeviceXcbPresentationSupportKHR = PFN_vkGetPhysicalDeviceXcbPresentationSupportKHR(instance.getProcAddr( "vkGetPhysicalDeviceXcbPresentationSupportKHR")); +#endif /*VK_USE_PLATFORM_XCB_KHR*/ +#ifdef VK_USE_PLATFORM_XLIB_KHR + vkGetPhysicalDeviceXlibPresentationSupportKHR = PFN_vkGetPhysicalDeviceXlibPresentationSupportKHR(instance.getProcAddr( "vkGetPhysicalDeviceXlibPresentationSupportKHR")); +#endif /*VK_USE_PLATFORM_XLIB_KHR*/ + vkGetPipelineCacheData = PFN_vkGetPipelineCacheData(device ? device.getProcAddr( "vkGetPipelineCacheData") : instance.getProcAddr( "vkGetPipelineCacheData")); + vkGetQueryPoolResults = PFN_vkGetQueryPoolResults(device ? device.getProcAddr( "vkGetQueryPoolResults") : instance.getProcAddr( "vkGetQueryPoolResults")); + vkGetQueueCheckpointDataNV = PFN_vkGetQueueCheckpointDataNV(device ? device.getProcAddr( "vkGetQueueCheckpointDataNV") : instance.getProcAddr( "vkGetQueueCheckpointDataNV")); +#ifdef VK_USE_PLATFORM_XLIB_XRANDR_NV + vkGetRandROutputDisplayEXT = PFN_vkGetRandROutputDisplayEXT(instance.getProcAddr( "vkGetRandROutputDisplayEXT")); +#endif /*VK_USE_PLATFORM_XLIB_XRANDR_NV*/ + vkGetRayTracingShaderGroupHandlesNV = PFN_vkGetRayTracingShaderGroupHandlesNV(device ? device.getProcAddr( "vkGetRayTracingShaderGroupHandlesNV") : instance.getProcAddr( "vkGetRayTracingShaderGroupHandlesNV")); + vkGetRefreshCycleDurationGOOGLE = PFN_vkGetRefreshCycleDurationGOOGLE(device ? device.getProcAddr( "vkGetRefreshCycleDurationGOOGLE") : instance.getProcAddr( "vkGetRefreshCycleDurationGOOGLE")); + vkGetRenderAreaGranularity = PFN_vkGetRenderAreaGranularity(device ? device.getProcAddr( "vkGetRenderAreaGranularity") : instance.getProcAddr( "vkGetRenderAreaGranularity")); + vkGetSemaphoreFdKHR = PFN_vkGetSemaphoreFdKHR(device ? device.getProcAddr( "vkGetSemaphoreFdKHR") : instance.getProcAddr( "vkGetSemaphoreFdKHR")); +#ifdef VK_USE_PLATFORM_WIN32_KHR + vkGetSemaphoreWin32HandleKHR = PFN_vkGetSemaphoreWin32HandleKHR(device ? device.getProcAddr( "vkGetSemaphoreWin32HandleKHR") : instance.getProcAddr( "vkGetSemaphoreWin32HandleKHR")); +#endif /*VK_USE_PLATFORM_WIN32_KHR*/ + vkGetShaderInfoAMD = PFN_vkGetShaderInfoAMD(device ? device.getProcAddr( "vkGetShaderInfoAMD") : instance.getProcAddr( "vkGetShaderInfoAMD")); + vkGetSwapchainCounterEXT = PFN_vkGetSwapchainCounterEXT(device ? device.getProcAddr( "vkGetSwapchainCounterEXT") : instance.getProcAddr( "vkGetSwapchainCounterEXT")); + vkGetSwapchainImagesKHR = PFN_vkGetSwapchainImagesKHR(device ? device.getProcAddr( "vkGetSwapchainImagesKHR") : instance.getProcAddr( "vkGetSwapchainImagesKHR")); + vkGetSwapchainStatusKHR = PFN_vkGetSwapchainStatusKHR(device ? device.getProcAddr( "vkGetSwapchainStatusKHR") : instance.getProcAddr( "vkGetSwapchainStatusKHR")); + vkGetValidationCacheDataEXT = PFN_vkGetValidationCacheDataEXT(device ? device.getProcAddr( "vkGetValidationCacheDataEXT") : instance.getProcAddr( "vkGetValidationCacheDataEXT")); + vkImportFenceFdKHR = PFN_vkImportFenceFdKHR(device ? device.getProcAddr( "vkImportFenceFdKHR") : instance.getProcAddr( "vkImportFenceFdKHR")); +#ifdef VK_USE_PLATFORM_WIN32_KHR + vkImportFenceWin32HandleKHR = PFN_vkImportFenceWin32HandleKHR(device ? device.getProcAddr( "vkImportFenceWin32HandleKHR") : instance.getProcAddr( "vkImportFenceWin32HandleKHR")); +#endif /*VK_USE_PLATFORM_WIN32_KHR*/ + vkImportSemaphoreFdKHR = PFN_vkImportSemaphoreFdKHR(device ? device.getProcAddr( "vkImportSemaphoreFdKHR") : instance.getProcAddr( "vkImportSemaphoreFdKHR")); +#ifdef VK_USE_PLATFORM_WIN32_KHR + vkImportSemaphoreWin32HandleKHR = PFN_vkImportSemaphoreWin32HandleKHR(device ? device.getProcAddr( "vkImportSemaphoreWin32HandleKHR") : instance.getProcAddr( "vkImportSemaphoreWin32HandleKHR")); +#endif /*VK_USE_PLATFORM_WIN32_KHR*/ + vkInvalidateMappedMemoryRanges = PFN_vkInvalidateMappedMemoryRanges(device ? device.getProcAddr( "vkInvalidateMappedMemoryRanges") : instance.getProcAddr( "vkInvalidateMappedMemoryRanges")); + vkMapMemory = PFN_vkMapMemory(device ? device.getProcAddr( "vkMapMemory") : instance.getProcAddr( "vkMapMemory")); + vkMergePipelineCaches = PFN_vkMergePipelineCaches(device ? device.getProcAddr( "vkMergePipelineCaches") : instance.getProcAddr( "vkMergePipelineCaches")); + vkMergeValidationCachesEXT = PFN_vkMergeValidationCachesEXT(device ? device.getProcAddr( "vkMergeValidationCachesEXT") : instance.getProcAddr( "vkMergeValidationCachesEXT")); + vkQueueBeginDebugUtilsLabelEXT = PFN_vkQueueBeginDebugUtilsLabelEXT(device ? device.getProcAddr( "vkQueueBeginDebugUtilsLabelEXT") : instance.getProcAddr( "vkQueueBeginDebugUtilsLabelEXT")); + vkQueueBindSparse = PFN_vkQueueBindSparse(device ? device.getProcAddr( "vkQueueBindSparse") : instance.getProcAddr( "vkQueueBindSparse")); + vkQueueEndDebugUtilsLabelEXT = PFN_vkQueueEndDebugUtilsLabelEXT(device ? device.getProcAddr( "vkQueueEndDebugUtilsLabelEXT") : instance.getProcAddr( "vkQueueEndDebugUtilsLabelEXT")); + vkQueueInsertDebugUtilsLabelEXT = PFN_vkQueueInsertDebugUtilsLabelEXT(device ? device.getProcAddr( "vkQueueInsertDebugUtilsLabelEXT") : instance.getProcAddr( "vkQueueInsertDebugUtilsLabelEXT")); + vkQueuePresentKHR = PFN_vkQueuePresentKHR(device ? device.getProcAddr( "vkQueuePresentKHR") : instance.getProcAddr( "vkQueuePresentKHR")); + vkQueueSubmit = PFN_vkQueueSubmit(device ? device.getProcAddr( "vkQueueSubmit") : instance.getProcAddr( "vkQueueSubmit")); + vkQueueWaitIdle = PFN_vkQueueWaitIdle(device ? device.getProcAddr( "vkQueueWaitIdle") : instance.getProcAddr( "vkQueueWaitIdle")); + vkRegisterDeviceEventEXT = PFN_vkRegisterDeviceEventEXT(device ? device.getProcAddr( "vkRegisterDeviceEventEXT") : instance.getProcAddr( "vkRegisterDeviceEventEXT")); + vkRegisterDisplayEventEXT = PFN_vkRegisterDisplayEventEXT(device ? device.getProcAddr( "vkRegisterDisplayEventEXT") : instance.getProcAddr( "vkRegisterDisplayEventEXT")); + vkRegisterObjectsNVX = PFN_vkRegisterObjectsNVX(device ? device.getProcAddr( "vkRegisterObjectsNVX") : instance.getProcAddr( "vkRegisterObjectsNVX")); + vkReleaseDisplayEXT = PFN_vkReleaseDisplayEXT(instance.getProcAddr( "vkReleaseDisplayEXT")); + vkResetCommandBuffer = PFN_vkResetCommandBuffer(device ? device.getProcAddr( "vkResetCommandBuffer") : instance.getProcAddr( "vkResetCommandBuffer")); + vkResetCommandPool = PFN_vkResetCommandPool(device ? device.getProcAddr( "vkResetCommandPool") : instance.getProcAddr( "vkResetCommandPool")); + vkResetDescriptorPool = PFN_vkResetDescriptorPool(device ? device.getProcAddr( "vkResetDescriptorPool") : instance.getProcAddr( "vkResetDescriptorPool")); + vkResetEvent = PFN_vkResetEvent(device ? device.getProcAddr( "vkResetEvent") : instance.getProcAddr( "vkResetEvent")); + vkResetFences = PFN_vkResetFences(device ? device.getProcAddr( "vkResetFences") : instance.getProcAddr( "vkResetFences")); + vkSetDebugUtilsObjectNameEXT = PFN_vkSetDebugUtilsObjectNameEXT(device ? device.getProcAddr( "vkSetDebugUtilsObjectNameEXT") : instance.getProcAddr( "vkSetDebugUtilsObjectNameEXT")); + vkSetDebugUtilsObjectTagEXT = PFN_vkSetDebugUtilsObjectTagEXT(device ? device.getProcAddr( "vkSetDebugUtilsObjectTagEXT") : instance.getProcAddr( "vkSetDebugUtilsObjectTagEXT")); + vkSetEvent = PFN_vkSetEvent(device ? device.getProcAddr( "vkSetEvent") : instance.getProcAddr( "vkSetEvent")); + vkSetHdrMetadataEXT = PFN_vkSetHdrMetadataEXT(device ? device.getProcAddr( "vkSetHdrMetadataEXT") : instance.getProcAddr( "vkSetHdrMetadataEXT")); + vkSubmitDebugUtilsMessageEXT = PFN_vkSubmitDebugUtilsMessageEXT(instance.getProcAddr( "vkSubmitDebugUtilsMessageEXT")); + vkTrimCommandPool = PFN_vkTrimCommandPool(device ? device.getProcAddr( "vkTrimCommandPool") : instance.getProcAddr( "vkTrimCommandPool")); + vkTrimCommandPoolKHR = PFN_vkTrimCommandPoolKHR(device ? device.getProcAddr( "vkTrimCommandPoolKHR") : instance.getProcAddr( "vkTrimCommandPoolKHR")); + vkUnmapMemory = PFN_vkUnmapMemory(device ? device.getProcAddr( "vkUnmapMemory") : instance.getProcAddr( "vkUnmapMemory")); + vkUnregisterObjectsNVX = PFN_vkUnregisterObjectsNVX(device ? device.getProcAddr( "vkUnregisterObjectsNVX") : instance.getProcAddr( "vkUnregisterObjectsNVX")); + vkUpdateDescriptorSetWithTemplate = PFN_vkUpdateDescriptorSetWithTemplate(device ? device.getProcAddr( "vkUpdateDescriptorSetWithTemplate") : instance.getProcAddr( "vkUpdateDescriptorSetWithTemplate")); + vkUpdateDescriptorSetWithTemplateKHR = PFN_vkUpdateDescriptorSetWithTemplateKHR(device ? device.getProcAddr( "vkUpdateDescriptorSetWithTemplateKHR") : instance.getProcAddr( "vkUpdateDescriptorSetWithTemplateKHR")); + vkUpdateDescriptorSets = PFN_vkUpdateDescriptorSets(device ? device.getProcAddr( "vkUpdateDescriptorSets") : instance.getProcAddr( "vkUpdateDescriptorSets")); + vkWaitForFences = PFN_vkWaitForFences(device ? device.getProcAddr( "vkWaitForFences") : instance.getProcAddr( "vkWaitForFences")); + } + }; +} // namespace VULKAN_HPP_NAMESPACE + +#endif diff --git a/src/video/khronos/vulkan/vulkan_android.h b/src/video/khronos/vulkan/vulkan_android.h new file mode 100644 index 000000000..07aaeda28 --- /dev/null +++ b/src/video/khronos/vulkan/vulkan_android.h @@ -0,0 +1,126 @@ +#ifndef VULKAN_ANDROID_H_ +#define VULKAN_ANDROID_H_ 1 + +#ifdef __cplusplus +extern "C" { +#endif + +/* +** Copyright (c) 2015-2018 The Khronos Group Inc. +** +** Licensed under the Apache License, Version 2.0 (the "License"); +** you may not use this file except in compliance with the License. +** You may obtain a copy of the License at +** +** http://www.apache.org/licenses/LICENSE-2.0 +** +** Unless required by applicable law or agreed to in writing, software +** distributed under the License is distributed on an "AS IS" BASIS, +** WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +** See the License for the specific language governing permissions and +** limitations under the License. +*/ + +/* +** This header is generated from the Khronos Vulkan XML API Registry. +** +*/ + + +#define VK_KHR_android_surface 1 +struct ANativeWindow; + +#define VK_KHR_ANDROID_SURFACE_SPEC_VERSION 6 +#define VK_KHR_ANDROID_SURFACE_EXTENSION_NAME "VK_KHR_android_surface" + +typedef VkFlags VkAndroidSurfaceCreateFlagsKHR; + +typedef struct VkAndroidSurfaceCreateInfoKHR { + VkStructureType sType; + const void* pNext; + VkAndroidSurfaceCreateFlagsKHR flags; + struct ANativeWindow* window; +} VkAndroidSurfaceCreateInfoKHR; + + +typedef VkResult (VKAPI_PTR *PFN_vkCreateAndroidSurfaceKHR)(VkInstance instance, const VkAndroidSurfaceCreateInfoKHR* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkSurfaceKHR* pSurface); + +#ifndef VK_NO_PROTOTYPES +VKAPI_ATTR VkResult VKAPI_CALL vkCreateAndroidSurfaceKHR( + VkInstance instance, + const VkAndroidSurfaceCreateInfoKHR* pCreateInfo, + const VkAllocationCallbacks* pAllocator, + VkSurfaceKHR* pSurface); +#endif + +#define VK_ANDROID_external_memory_android_hardware_buffer 1 +struct AHardwareBuffer; + +#define VK_ANDROID_EXTERNAL_MEMORY_ANDROID_HARDWARE_BUFFER_SPEC_VERSION 3 +#define VK_ANDROID_EXTERNAL_MEMORY_ANDROID_HARDWARE_BUFFER_EXTENSION_NAME "VK_ANDROID_external_memory_android_hardware_buffer" + +typedef struct VkAndroidHardwareBufferUsageANDROID { + VkStructureType sType; + void* pNext; + uint64_t androidHardwareBufferUsage; +} VkAndroidHardwareBufferUsageANDROID; + +typedef struct VkAndroidHardwareBufferPropertiesANDROID { + VkStructureType sType; + void* pNext; + VkDeviceSize allocationSize; + uint32_t memoryTypeBits; +} VkAndroidHardwareBufferPropertiesANDROID; + +typedef struct VkAndroidHardwareBufferFormatPropertiesANDROID { + VkStructureType sType; + void* pNext; + VkFormat format; + uint64_t externalFormat; + VkFormatFeatureFlags formatFeatures; + VkComponentMapping samplerYcbcrConversionComponents; + VkSamplerYcbcrModelConversion suggestedYcbcrModel; + VkSamplerYcbcrRange suggestedYcbcrRange; + VkChromaLocation suggestedXChromaOffset; + VkChromaLocation suggestedYChromaOffset; +} VkAndroidHardwareBufferFormatPropertiesANDROID; + +typedef struct VkImportAndroidHardwareBufferInfoANDROID { + VkStructureType sType; + const void* pNext; + struct AHardwareBuffer* buffer; +} VkImportAndroidHardwareBufferInfoANDROID; + +typedef struct VkMemoryGetAndroidHardwareBufferInfoANDROID { + VkStructureType sType; + const void* pNext; + VkDeviceMemory memory; +} VkMemoryGetAndroidHardwareBufferInfoANDROID; + +typedef struct VkExternalFormatANDROID { + VkStructureType sType; + void* pNext; + uint64_t externalFormat; +} VkExternalFormatANDROID; + + +typedef VkResult (VKAPI_PTR *PFN_vkGetAndroidHardwareBufferPropertiesANDROID)(VkDevice device, const struct AHardwareBuffer* buffer, VkAndroidHardwareBufferPropertiesANDROID* pProperties); +typedef VkResult (VKAPI_PTR *PFN_vkGetMemoryAndroidHardwareBufferANDROID)(VkDevice device, const VkMemoryGetAndroidHardwareBufferInfoANDROID* pInfo, struct AHardwareBuffer** pBuffer); + +#ifndef VK_NO_PROTOTYPES +VKAPI_ATTR VkResult VKAPI_CALL vkGetAndroidHardwareBufferPropertiesANDROID( + VkDevice device, + const struct AHardwareBuffer* buffer, + VkAndroidHardwareBufferPropertiesANDROID* pProperties); + +VKAPI_ATTR VkResult VKAPI_CALL vkGetMemoryAndroidHardwareBufferANDROID( + VkDevice device, + const VkMemoryGetAndroidHardwareBufferInfoANDROID* pInfo, + struct AHardwareBuffer** pBuffer); +#endif + +#ifdef __cplusplus +} +#endif + +#endif diff --git a/src/video/khronos/vulkan/vulkan_core.h b/src/video/khronos/vulkan/vulkan_core.h new file mode 100644 index 000000000..4cd8ed51d --- /dev/null +++ b/src/video/khronos/vulkan/vulkan_core.h @@ -0,0 +1,8823 @@ +#ifndef VULKAN_CORE_H_ +#define VULKAN_CORE_H_ 1 + +#ifdef __cplusplus +extern "C" { +#endif + +/* +** Copyright (c) 2015-2018 The Khronos Group Inc. +** +** Licensed under the Apache License, Version 2.0 (the "License"); +** you may not use this file except in compliance with the License. +** You may obtain a copy of the License at +** +** http://www.apache.org/licenses/LICENSE-2.0 +** +** Unless required by applicable law or agreed to in writing, software +** distributed under the License is distributed on an "AS IS" BASIS, +** WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +** See the License for the specific language governing permissions and +** limitations under the License. +*/ + +/* +** This header is generated from the Khronos Vulkan XML API Registry. +** +*/ + + +#define VK_VERSION_1_0 1 +#include "vk_platform.h" + +#define VK_MAKE_VERSION(major, minor, patch) \ + (((major) << 22) | ((minor) << 12) | (patch)) + +// DEPRECATED: This define has been removed. Specific version defines (e.g. VK_API_VERSION_1_0), or the VK_MAKE_VERSION macro, should be used instead. +//#define VK_API_VERSION VK_MAKE_VERSION(1, 0, 0) // Patch version should always be set to 0 + +// Vulkan 1.0 version number +#define VK_API_VERSION_1_0 VK_MAKE_VERSION(1, 0, 0)// Patch version should always be set to 0 + +#define VK_VERSION_MAJOR(version) ((uint32_t)(version) >> 22) +#define VK_VERSION_MINOR(version) (((uint32_t)(version) >> 12) & 0x3ff) +#define VK_VERSION_PATCH(version) ((uint32_t)(version) & 0xfff) +// Version of this file +#define VK_HEADER_VERSION 91 + + +#define VK_NULL_HANDLE 0 + + +#define VK_DEFINE_HANDLE(object) typedef struct object##_T* object; + + +#if !defined(VK_DEFINE_NON_DISPATCHABLE_HANDLE) +#if defined(__LP64__) || defined(_WIN64) || (defined(__x86_64__) && !defined(__ILP32__) ) || defined(_M_X64) || defined(__ia64) || defined (_M_IA64) || defined(__aarch64__) || defined(__powerpc64__) + #define VK_DEFINE_NON_DISPATCHABLE_HANDLE(object) typedef struct object##_T *object; +#else + #define VK_DEFINE_NON_DISPATCHABLE_HANDLE(object) typedef uint64_t object; +#endif +#endif + + +typedef uint32_t VkFlags; +typedef uint32_t VkBool32; +typedef uint64_t VkDeviceSize; +typedef uint32_t VkSampleMask; + +VK_DEFINE_HANDLE(VkInstance) +VK_DEFINE_HANDLE(VkPhysicalDevice) +VK_DEFINE_HANDLE(VkDevice) +VK_DEFINE_HANDLE(VkQueue) +VK_DEFINE_NON_DISPATCHABLE_HANDLE(VkSemaphore) +VK_DEFINE_HANDLE(VkCommandBuffer) +VK_DEFINE_NON_DISPATCHABLE_HANDLE(VkFence) +VK_DEFINE_NON_DISPATCHABLE_HANDLE(VkDeviceMemory) +VK_DEFINE_NON_DISPATCHABLE_HANDLE(VkBuffer) +VK_DEFINE_NON_DISPATCHABLE_HANDLE(VkImage) +VK_DEFINE_NON_DISPATCHABLE_HANDLE(VkEvent) +VK_DEFINE_NON_DISPATCHABLE_HANDLE(VkQueryPool) +VK_DEFINE_NON_DISPATCHABLE_HANDLE(VkBufferView) +VK_DEFINE_NON_DISPATCHABLE_HANDLE(VkImageView) +VK_DEFINE_NON_DISPATCHABLE_HANDLE(VkShaderModule) +VK_DEFINE_NON_DISPATCHABLE_HANDLE(VkPipelineCache) +VK_DEFINE_NON_DISPATCHABLE_HANDLE(VkPipelineLayout) +VK_DEFINE_NON_DISPATCHABLE_HANDLE(VkRenderPass) +VK_DEFINE_NON_DISPATCHABLE_HANDLE(VkPipeline) +VK_DEFINE_NON_DISPATCHABLE_HANDLE(VkDescriptorSetLayout) +VK_DEFINE_NON_DISPATCHABLE_HANDLE(VkSampler) +VK_DEFINE_NON_DISPATCHABLE_HANDLE(VkDescriptorPool) +VK_DEFINE_NON_DISPATCHABLE_HANDLE(VkDescriptorSet) +VK_DEFINE_NON_DISPATCHABLE_HANDLE(VkFramebuffer) +VK_DEFINE_NON_DISPATCHABLE_HANDLE(VkCommandPool) + +#define VK_LOD_CLAMP_NONE 1000.0f +#define VK_REMAINING_MIP_LEVELS (~0U) +#define VK_REMAINING_ARRAY_LAYERS (~0U) +#define VK_WHOLE_SIZE (~0ULL) +#define VK_ATTACHMENT_UNUSED (~0U) +#define VK_TRUE 1 +#define VK_FALSE 0 +#define VK_QUEUE_FAMILY_IGNORED (~0U) +#define VK_SUBPASS_EXTERNAL (~0U) +#define VK_MAX_PHYSICAL_DEVICE_NAME_SIZE 256 +#define VK_UUID_SIZE 16 +#define VK_MAX_MEMORY_TYPES 32 +#define VK_MAX_MEMORY_HEAPS 16 +#define VK_MAX_EXTENSION_NAME_SIZE 256 +#define VK_MAX_DESCRIPTION_SIZE 256 + + +typedef enum VkPipelineCacheHeaderVersion { + VK_PIPELINE_CACHE_HEADER_VERSION_ONE = 1, + VK_PIPELINE_CACHE_HEADER_VERSION_BEGIN_RANGE = VK_PIPELINE_CACHE_HEADER_VERSION_ONE, + VK_PIPELINE_CACHE_HEADER_VERSION_END_RANGE = VK_PIPELINE_CACHE_HEADER_VERSION_ONE, + VK_PIPELINE_CACHE_HEADER_VERSION_RANGE_SIZE = (VK_PIPELINE_CACHE_HEADER_VERSION_ONE - VK_PIPELINE_CACHE_HEADER_VERSION_ONE + 1), + VK_PIPELINE_CACHE_HEADER_VERSION_MAX_ENUM = 0x7FFFFFFF +} VkPipelineCacheHeaderVersion; + +typedef enum VkResult { + VK_SUCCESS = 0, + VK_NOT_READY = 1, + VK_TIMEOUT = 2, + VK_EVENT_SET = 3, + VK_EVENT_RESET = 4, + VK_INCOMPLETE = 5, + VK_ERROR_OUT_OF_HOST_MEMORY = -1, + VK_ERROR_OUT_OF_DEVICE_MEMORY = -2, + VK_ERROR_INITIALIZATION_FAILED = -3, + VK_ERROR_DEVICE_LOST = -4, + VK_ERROR_MEMORY_MAP_FAILED = -5, + VK_ERROR_LAYER_NOT_PRESENT = -6, + VK_ERROR_EXTENSION_NOT_PRESENT = -7, + VK_ERROR_FEATURE_NOT_PRESENT = -8, + VK_ERROR_INCOMPATIBLE_DRIVER = -9, + VK_ERROR_TOO_MANY_OBJECTS = -10, + VK_ERROR_FORMAT_NOT_SUPPORTED = -11, + VK_ERROR_FRAGMENTED_POOL = -12, + VK_ERROR_OUT_OF_POOL_MEMORY = -1000069000, + VK_ERROR_INVALID_EXTERNAL_HANDLE = -1000072003, + VK_ERROR_SURFACE_LOST_KHR = -1000000000, + VK_ERROR_NATIVE_WINDOW_IN_USE_KHR = -1000000001, + VK_SUBOPTIMAL_KHR = 1000001003, + VK_ERROR_OUT_OF_DATE_KHR = -1000001004, + VK_ERROR_INCOMPATIBLE_DISPLAY_KHR = -1000003001, + VK_ERROR_VALIDATION_FAILED_EXT = -1000011001, + VK_ERROR_INVALID_SHADER_NV = -1000012000, + VK_ERROR_INVALID_DRM_FORMAT_MODIFIER_PLANE_LAYOUT_EXT = -1000158000, + VK_ERROR_FRAGMENTATION_EXT = -1000161000, + VK_ERROR_NOT_PERMITTED_EXT = -1000174001, + VK_ERROR_OUT_OF_POOL_MEMORY_KHR = VK_ERROR_OUT_OF_POOL_MEMORY, + VK_ERROR_INVALID_EXTERNAL_HANDLE_KHR = VK_ERROR_INVALID_EXTERNAL_HANDLE, + VK_RESULT_BEGIN_RANGE = VK_ERROR_FRAGMENTED_POOL, + VK_RESULT_END_RANGE = VK_INCOMPLETE, + VK_RESULT_RANGE_SIZE = (VK_INCOMPLETE - VK_ERROR_FRAGMENTED_POOL + 1), + VK_RESULT_MAX_ENUM = 0x7FFFFFFF +} VkResult; + +typedef enum VkStructureType { + VK_STRUCTURE_TYPE_APPLICATION_INFO = 0, + VK_STRUCTURE_TYPE_INSTANCE_CREATE_INFO = 1, + VK_STRUCTURE_TYPE_DEVICE_QUEUE_CREATE_INFO = 2, + VK_STRUCTURE_TYPE_DEVICE_CREATE_INFO = 3, + VK_STRUCTURE_TYPE_SUBMIT_INFO = 4, + VK_STRUCTURE_TYPE_MEMORY_ALLOCATE_INFO = 5, + VK_STRUCTURE_TYPE_MAPPED_MEMORY_RANGE = 6, + VK_STRUCTURE_TYPE_BIND_SPARSE_INFO = 7, + VK_STRUCTURE_TYPE_FENCE_CREATE_INFO = 8, + VK_STRUCTURE_TYPE_SEMAPHORE_CREATE_INFO = 9, + VK_STRUCTURE_TYPE_EVENT_CREATE_INFO = 10, + VK_STRUCTURE_TYPE_QUERY_POOL_CREATE_INFO = 11, + VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO = 12, + VK_STRUCTURE_TYPE_BUFFER_VIEW_CREATE_INFO = 13, + VK_STRUCTURE_TYPE_IMAGE_CREATE_INFO = 14, + VK_STRUCTURE_TYPE_IMAGE_VIEW_CREATE_INFO = 15, + VK_STRUCTURE_TYPE_SHADER_MODULE_CREATE_INFO = 16, + VK_STRUCTURE_TYPE_PIPELINE_CACHE_CREATE_INFO = 17, + VK_STRUCTURE_TYPE_PIPELINE_SHADER_STAGE_CREATE_INFO = 18, + VK_STRUCTURE_TYPE_PIPELINE_VERTEX_INPUT_STATE_CREATE_INFO = 19, + VK_STRUCTURE_TYPE_PIPELINE_INPUT_ASSEMBLY_STATE_CREATE_INFO = 20, + VK_STRUCTURE_TYPE_PIPELINE_TESSELLATION_STATE_CREATE_INFO = 21, + VK_STRUCTURE_TYPE_PIPELINE_VIEWPORT_STATE_CREATE_INFO = 22, + VK_STRUCTURE_TYPE_PIPELINE_RASTERIZATION_STATE_CREATE_INFO = 23, + VK_STRUCTURE_TYPE_PIPELINE_MULTISAMPLE_STATE_CREATE_INFO = 24, + VK_STRUCTURE_TYPE_PIPELINE_DEPTH_STENCIL_STATE_CREATE_INFO = 25, + VK_STRUCTURE_TYPE_PIPELINE_COLOR_BLEND_STATE_CREATE_INFO = 26, + VK_STRUCTURE_TYPE_PIPELINE_DYNAMIC_STATE_CREATE_INFO = 27, + VK_STRUCTURE_TYPE_GRAPHICS_PIPELINE_CREATE_INFO = 28, + VK_STRUCTURE_TYPE_COMPUTE_PIPELINE_CREATE_INFO = 29, + VK_STRUCTURE_TYPE_PIPELINE_LAYOUT_CREATE_INFO = 30, + VK_STRUCTURE_TYPE_SAMPLER_CREATE_INFO = 31, + VK_STRUCTURE_TYPE_DESCRIPTOR_SET_LAYOUT_CREATE_INFO = 32, + VK_STRUCTURE_TYPE_DESCRIPTOR_POOL_CREATE_INFO = 33, + VK_STRUCTURE_TYPE_DESCRIPTOR_SET_ALLOCATE_INFO = 34, + VK_STRUCTURE_TYPE_WRITE_DESCRIPTOR_SET = 35, + VK_STRUCTURE_TYPE_COPY_DESCRIPTOR_SET = 36, + VK_STRUCTURE_TYPE_FRAMEBUFFER_CREATE_INFO = 37, + VK_STRUCTURE_TYPE_RENDER_PASS_CREATE_INFO = 38, + VK_STRUCTURE_TYPE_COMMAND_POOL_CREATE_INFO = 39, + VK_STRUCTURE_TYPE_COMMAND_BUFFER_ALLOCATE_INFO = 40, + VK_STRUCTURE_TYPE_COMMAND_BUFFER_INHERITANCE_INFO = 41, + VK_STRUCTURE_TYPE_COMMAND_BUFFER_BEGIN_INFO = 42, + VK_STRUCTURE_TYPE_RENDER_PASS_BEGIN_INFO = 43, + VK_STRUCTURE_TYPE_BUFFER_MEMORY_BARRIER = 44, + VK_STRUCTURE_TYPE_IMAGE_MEMORY_BARRIER = 45, + VK_STRUCTURE_TYPE_MEMORY_BARRIER = 46, + VK_STRUCTURE_TYPE_LOADER_INSTANCE_CREATE_INFO = 47, + VK_STRUCTURE_TYPE_LOADER_DEVICE_CREATE_INFO = 48, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SUBGROUP_PROPERTIES = 1000094000, + VK_STRUCTURE_TYPE_BIND_BUFFER_MEMORY_INFO = 1000157000, + VK_STRUCTURE_TYPE_BIND_IMAGE_MEMORY_INFO = 1000157001, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_16BIT_STORAGE_FEATURES = 1000083000, + VK_STRUCTURE_TYPE_MEMORY_DEDICATED_REQUIREMENTS = 1000127000, + VK_STRUCTURE_TYPE_MEMORY_DEDICATED_ALLOCATE_INFO = 1000127001, + VK_STRUCTURE_TYPE_MEMORY_ALLOCATE_FLAGS_INFO = 1000060000, + VK_STRUCTURE_TYPE_DEVICE_GROUP_RENDER_PASS_BEGIN_INFO = 1000060003, + VK_STRUCTURE_TYPE_DEVICE_GROUP_COMMAND_BUFFER_BEGIN_INFO = 1000060004, + VK_STRUCTURE_TYPE_DEVICE_GROUP_SUBMIT_INFO = 1000060005, + VK_STRUCTURE_TYPE_DEVICE_GROUP_BIND_SPARSE_INFO = 1000060006, + VK_STRUCTURE_TYPE_BIND_BUFFER_MEMORY_DEVICE_GROUP_INFO = 1000060013, + VK_STRUCTURE_TYPE_BIND_IMAGE_MEMORY_DEVICE_GROUP_INFO = 1000060014, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_GROUP_PROPERTIES = 1000070000, + VK_STRUCTURE_TYPE_DEVICE_GROUP_DEVICE_CREATE_INFO = 1000070001, + VK_STRUCTURE_TYPE_BUFFER_MEMORY_REQUIREMENTS_INFO_2 = 1000146000, + VK_STRUCTURE_TYPE_IMAGE_MEMORY_REQUIREMENTS_INFO_2 = 1000146001, + VK_STRUCTURE_TYPE_IMAGE_SPARSE_MEMORY_REQUIREMENTS_INFO_2 = 1000146002, + VK_STRUCTURE_TYPE_MEMORY_REQUIREMENTS_2 = 1000146003, + VK_STRUCTURE_TYPE_SPARSE_IMAGE_MEMORY_REQUIREMENTS_2 = 1000146004, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FEATURES_2 = 1000059000, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PROPERTIES_2 = 1000059001, + VK_STRUCTURE_TYPE_FORMAT_PROPERTIES_2 = 1000059002, + VK_STRUCTURE_TYPE_IMAGE_FORMAT_PROPERTIES_2 = 1000059003, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_IMAGE_FORMAT_INFO_2 = 1000059004, + VK_STRUCTURE_TYPE_QUEUE_FAMILY_PROPERTIES_2 = 1000059005, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MEMORY_PROPERTIES_2 = 1000059006, + VK_STRUCTURE_TYPE_SPARSE_IMAGE_FORMAT_PROPERTIES_2 = 1000059007, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SPARSE_IMAGE_FORMAT_INFO_2 = 1000059008, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_POINT_CLIPPING_PROPERTIES = 1000117000, + VK_STRUCTURE_TYPE_RENDER_PASS_INPUT_ATTACHMENT_ASPECT_CREATE_INFO = 1000117001, + VK_STRUCTURE_TYPE_IMAGE_VIEW_USAGE_CREATE_INFO = 1000117002, + VK_STRUCTURE_TYPE_PIPELINE_TESSELLATION_DOMAIN_ORIGIN_STATE_CREATE_INFO = 1000117003, + VK_STRUCTURE_TYPE_RENDER_PASS_MULTIVIEW_CREATE_INFO = 1000053000, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MULTIVIEW_FEATURES = 1000053001, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MULTIVIEW_PROPERTIES = 1000053002, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VARIABLE_POINTER_FEATURES = 1000120000, + VK_STRUCTURE_TYPE_PROTECTED_SUBMIT_INFO = 1000145000, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PROTECTED_MEMORY_FEATURES = 1000145001, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PROTECTED_MEMORY_PROPERTIES = 1000145002, + VK_STRUCTURE_TYPE_DEVICE_QUEUE_INFO_2 = 1000145003, + VK_STRUCTURE_TYPE_SAMPLER_YCBCR_CONVERSION_CREATE_INFO = 1000156000, + VK_STRUCTURE_TYPE_SAMPLER_YCBCR_CONVERSION_INFO = 1000156001, + VK_STRUCTURE_TYPE_BIND_IMAGE_PLANE_MEMORY_INFO = 1000156002, + VK_STRUCTURE_TYPE_IMAGE_PLANE_MEMORY_REQUIREMENTS_INFO = 1000156003, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SAMPLER_YCBCR_CONVERSION_FEATURES = 1000156004, + VK_STRUCTURE_TYPE_SAMPLER_YCBCR_CONVERSION_IMAGE_FORMAT_PROPERTIES = 1000156005, + VK_STRUCTURE_TYPE_DESCRIPTOR_UPDATE_TEMPLATE_CREATE_INFO = 1000085000, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_EXTERNAL_IMAGE_FORMAT_INFO = 1000071000, + VK_STRUCTURE_TYPE_EXTERNAL_IMAGE_FORMAT_PROPERTIES = 1000071001, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_EXTERNAL_BUFFER_INFO = 1000071002, + VK_STRUCTURE_TYPE_EXTERNAL_BUFFER_PROPERTIES = 1000071003, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_ID_PROPERTIES = 1000071004, + VK_STRUCTURE_TYPE_EXTERNAL_MEMORY_BUFFER_CREATE_INFO = 1000072000, + VK_STRUCTURE_TYPE_EXTERNAL_MEMORY_IMAGE_CREATE_INFO = 1000072001, + VK_STRUCTURE_TYPE_EXPORT_MEMORY_ALLOCATE_INFO = 1000072002, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_EXTERNAL_FENCE_INFO = 1000112000, + VK_STRUCTURE_TYPE_EXTERNAL_FENCE_PROPERTIES = 1000112001, + VK_STRUCTURE_TYPE_EXPORT_FENCE_CREATE_INFO = 1000113000, + VK_STRUCTURE_TYPE_EXPORT_SEMAPHORE_CREATE_INFO = 1000077000, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_EXTERNAL_SEMAPHORE_INFO = 1000076000, + VK_STRUCTURE_TYPE_EXTERNAL_SEMAPHORE_PROPERTIES = 1000076001, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MAINTENANCE_3_PROPERTIES = 1000168000, + VK_STRUCTURE_TYPE_DESCRIPTOR_SET_LAYOUT_SUPPORT = 1000168001, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_DRAW_PARAMETER_FEATURES = 1000063000, + VK_STRUCTURE_TYPE_SWAPCHAIN_CREATE_INFO_KHR = 1000001000, + VK_STRUCTURE_TYPE_PRESENT_INFO_KHR = 1000001001, + VK_STRUCTURE_TYPE_DEVICE_GROUP_PRESENT_CAPABILITIES_KHR = 1000060007, + VK_STRUCTURE_TYPE_IMAGE_SWAPCHAIN_CREATE_INFO_KHR = 1000060008, + VK_STRUCTURE_TYPE_BIND_IMAGE_MEMORY_SWAPCHAIN_INFO_KHR = 1000060009, + VK_STRUCTURE_TYPE_ACQUIRE_NEXT_IMAGE_INFO_KHR = 1000060010, + VK_STRUCTURE_TYPE_DEVICE_GROUP_PRESENT_INFO_KHR = 1000060011, + VK_STRUCTURE_TYPE_DEVICE_GROUP_SWAPCHAIN_CREATE_INFO_KHR = 1000060012, + VK_STRUCTURE_TYPE_DISPLAY_MODE_CREATE_INFO_KHR = 1000002000, + VK_STRUCTURE_TYPE_DISPLAY_SURFACE_CREATE_INFO_KHR = 1000002001, + VK_STRUCTURE_TYPE_DISPLAY_PRESENT_INFO_KHR = 1000003000, + VK_STRUCTURE_TYPE_XLIB_SURFACE_CREATE_INFO_KHR = 1000004000, + VK_STRUCTURE_TYPE_XCB_SURFACE_CREATE_INFO_KHR = 1000005000, + VK_STRUCTURE_TYPE_WAYLAND_SURFACE_CREATE_INFO_KHR = 1000006000, + VK_STRUCTURE_TYPE_ANDROID_SURFACE_CREATE_INFO_KHR = 1000008000, + VK_STRUCTURE_TYPE_WIN32_SURFACE_CREATE_INFO_KHR = 1000009000, + VK_STRUCTURE_TYPE_DEBUG_REPORT_CALLBACK_CREATE_INFO_EXT = 1000011000, + VK_STRUCTURE_TYPE_PIPELINE_RASTERIZATION_STATE_RASTERIZATION_ORDER_AMD = 1000018000, + VK_STRUCTURE_TYPE_DEBUG_MARKER_OBJECT_NAME_INFO_EXT = 1000022000, + VK_STRUCTURE_TYPE_DEBUG_MARKER_OBJECT_TAG_INFO_EXT = 1000022001, + VK_STRUCTURE_TYPE_DEBUG_MARKER_MARKER_INFO_EXT = 1000022002, + VK_STRUCTURE_TYPE_DEDICATED_ALLOCATION_IMAGE_CREATE_INFO_NV = 1000026000, + VK_STRUCTURE_TYPE_DEDICATED_ALLOCATION_BUFFER_CREATE_INFO_NV = 1000026001, + VK_STRUCTURE_TYPE_DEDICATED_ALLOCATION_MEMORY_ALLOCATE_INFO_NV = 1000026002, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_TRANSFORM_FEEDBACK_FEATURES_EXT = 1000028000, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_TRANSFORM_FEEDBACK_PROPERTIES_EXT = 1000028001, + VK_STRUCTURE_TYPE_PIPELINE_RASTERIZATION_STATE_STREAM_CREATE_INFO_EXT = 1000028002, + VK_STRUCTURE_TYPE_TEXTURE_LOD_GATHER_FORMAT_PROPERTIES_AMD = 1000041000, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_CORNER_SAMPLED_IMAGE_FEATURES_NV = 1000050000, + VK_STRUCTURE_TYPE_EXTERNAL_MEMORY_IMAGE_CREATE_INFO_NV = 1000056000, + VK_STRUCTURE_TYPE_EXPORT_MEMORY_ALLOCATE_INFO_NV = 1000056001, + VK_STRUCTURE_TYPE_IMPORT_MEMORY_WIN32_HANDLE_INFO_NV = 1000057000, + VK_STRUCTURE_TYPE_EXPORT_MEMORY_WIN32_HANDLE_INFO_NV = 1000057001, + VK_STRUCTURE_TYPE_WIN32_KEYED_MUTEX_ACQUIRE_RELEASE_INFO_NV = 1000058000, + VK_STRUCTURE_TYPE_VALIDATION_FLAGS_EXT = 1000061000, + VK_STRUCTURE_TYPE_VI_SURFACE_CREATE_INFO_NN = 1000062000, + VK_STRUCTURE_TYPE_IMAGE_VIEW_ASTC_DECODE_MODE_EXT = 1000067000, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_ASTC_DECODE_FEATURES_EXT = 1000067001, + VK_STRUCTURE_TYPE_IMPORT_MEMORY_WIN32_HANDLE_INFO_KHR = 1000073000, + VK_STRUCTURE_TYPE_EXPORT_MEMORY_WIN32_HANDLE_INFO_KHR = 1000073001, + VK_STRUCTURE_TYPE_MEMORY_WIN32_HANDLE_PROPERTIES_KHR = 1000073002, + VK_STRUCTURE_TYPE_MEMORY_GET_WIN32_HANDLE_INFO_KHR = 1000073003, + VK_STRUCTURE_TYPE_IMPORT_MEMORY_FD_INFO_KHR = 1000074000, + VK_STRUCTURE_TYPE_MEMORY_FD_PROPERTIES_KHR = 1000074001, + VK_STRUCTURE_TYPE_MEMORY_GET_FD_INFO_KHR = 1000074002, + VK_STRUCTURE_TYPE_WIN32_KEYED_MUTEX_ACQUIRE_RELEASE_INFO_KHR = 1000075000, + VK_STRUCTURE_TYPE_IMPORT_SEMAPHORE_WIN32_HANDLE_INFO_KHR = 1000078000, + VK_STRUCTURE_TYPE_EXPORT_SEMAPHORE_WIN32_HANDLE_INFO_KHR = 1000078001, + VK_STRUCTURE_TYPE_D3D12_FENCE_SUBMIT_INFO_KHR = 1000078002, + VK_STRUCTURE_TYPE_SEMAPHORE_GET_WIN32_HANDLE_INFO_KHR = 1000078003, + VK_STRUCTURE_TYPE_IMPORT_SEMAPHORE_FD_INFO_KHR = 1000079000, + VK_STRUCTURE_TYPE_SEMAPHORE_GET_FD_INFO_KHR = 1000079001, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PUSH_DESCRIPTOR_PROPERTIES_KHR = 1000080000, + VK_STRUCTURE_TYPE_COMMAND_BUFFER_INHERITANCE_CONDITIONAL_RENDERING_INFO_EXT = 1000081000, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_CONDITIONAL_RENDERING_FEATURES_EXT = 1000081001, + VK_STRUCTURE_TYPE_CONDITIONAL_RENDERING_BEGIN_INFO_EXT = 1000081002, + VK_STRUCTURE_TYPE_PRESENT_REGIONS_KHR = 1000084000, + VK_STRUCTURE_TYPE_OBJECT_TABLE_CREATE_INFO_NVX = 1000086000, + VK_STRUCTURE_TYPE_INDIRECT_COMMANDS_LAYOUT_CREATE_INFO_NVX = 1000086001, + VK_STRUCTURE_TYPE_CMD_PROCESS_COMMANDS_INFO_NVX = 1000086002, + VK_STRUCTURE_TYPE_CMD_RESERVE_SPACE_FOR_COMMANDS_INFO_NVX = 1000086003, + VK_STRUCTURE_TYPE_DEVICE_GENERATED_COMMANDS_LIMITS_NVX = 1000086004, + VK_STRUCTURE_TYPE_DEVICE_GENERATED_COMMANDS_FEATURES_NVX = 1000086005, + VK_STRUCTURE_TYPE_PIPELINE_VIEWPORT_W_SCALING_STATE_CREATE_INFO_NV = 1000087000, + VK_STRUCTURE_TYPE_SURFACE_CAPABILITIES_2_EXT = 1000090000, + VK_STRUCTURE_TYPE_DISPLAY_POWER_INFO_EXT = 1000091000, + VK_STRUCTURE_TYPE_DEVICE_EVENT_INFO_EXT = 1000091001, + VK_STRUCTURE_TYPE_DISPLAY_EVENT_INFO_EXT = 1000091002, + VK_STRUCTURE_TYPE_SWAPCHAIN_COUNTER_CREATE_INFO_EXT = 1000091003, + VK_STRUCTURE_TYPE_PRESENT_TIMES_INFO_GOOGLE = 1000092000, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MULTIVIEW_PER_VIEW_ATTRIBUTES_PROPERTIES_NVX = 1000097000, + VK_STRUCTURE_TYPE_PIPELINE_VIEWPORT_SWIZZLE_STATE_CREATE_INFO_NV = 1000098000, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DISCARD_RECTANGLE_PROPERTIES_EXT = 1000099000, + VK_STRUCTURE_TYPE_PIPELINE_DISCARD_RECTANGLE_STATE_CREATE_INFO_EXT = 1000099001, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_CONSERVATIVE_RASTERIZATION_PROPERTIES_EXT = 1000101000, + VK_STRUCTURE_TYPE_PIPELINE_RASTERIZATION_CONSERVATIVE_STATE_CREATE_INFO_EXT = 1000101001, + VK_STRUCTURE_TYPE_HDR_METADATA_EXT = 1000105000, + VK_STRUCTURE_TYPE_ATTACHMENT_DESCRIPTION_2_KHR = 1000109000, + VK_STRUCTURE_TYPE_ATTACHMENT_REFERENCE_2_KHR = 1000109001, + VK_STRUCTURE_TYPE_SUBPASS_DESCRIPTION_2_KHR = 1000109002, + VK_STRUCTURE_TYPE_SUBPASS_DEPENDENCY_2_KHR = 1000109003, + VK_STRUCTURE_TYPE_RENDER_PASS_CREATE_INFO_2_KHR = 1000109004, + VK_STRUCTURE_TYPE_SUBPASS_BEGIN_INFO_KHR = 1000109005, + VK_STRUCTURE_TYPE_SUBPASS_END_INFO_KHR = 1000109006, + VK_STRUCTURE_TYPE_SHARED_PRESENT_SURFACE_CAPABILITIES_KHR = 1000111000, + VK_STRUCTURE_TYPE_IMPORT_FENCE_WIN32_HANDLE_INFO_KHR = 1000114000, + VK_STRUCTURE_TYPE_EXPORT_FENCE_WIN32_HANDLE_INFO_KHR = 1000114001, + VK_STRUCTURE_TYPE_FENCE_GET_WIN32_HANDLE_INFO_KHR = 1000114002, + VK_STRUCTURE_TYPE_IMPORT_FENCE_FD_INFO_KHR = 1000115000, + VK_STRUCTURE_TYPE_FENCE_GET_FD_INFO_KHR = 1000115001, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SURFACE_INFO_2_KHR = 1000119000, + VK_STRUCTURE_TYPE_SURFACE_CAPABILITIES_2_KHR = 1000119001, + VK_STRUCTURE_TYPE_SURFACE_FORMAT_2_KHR = 1000119002, + VK_STRUCTURE_TYPE_DISPLAY_PROPERTIES_2_KHR = 1000121000, + VK_STRUCTURE_TYPE_DISPLAY_PLANE_PROPERTIES_2_KHR = 1000121001, + VK_STRUCTURE_TYPE_DISPLAY_MODE_PROPERTIES_2_KHR = 1000121002, + VK_STRUCTURE_TYPE_DISPLAY_PLANE_INFO_2_KHR = 1000121003, + VK_STRUCTURE_TYPE_DISPLAY_PLANE_CAPABILITIES_2_KHR = 1000121004, + VK_STRUCTURE_TYPE_IOS_SURFACE_CREATE_INFO_MVK = 1000122000, + VK_STRUCTURE_TYPE_MACOS_SURFACE_CREATE_INFO_MVK = 1000123000, + VK_STRUCTURE_TYPE_DEBUG_UTILS_OBJECT_NAME_INFO_EXT = 1000128000, + VK_STRUCTURE_TYPE_DEBUG_UTILS_OBJECT_TAG_INFO_EXT = 1000128001, + VK_STRUCTURE_TYPE_DEBUG_UTILS_LABEL_EXT = 1000128002, + VK_STRUCTURE_TYPE_DEBUG_UTILS_MESSENGER_CALLBACK_DATA_EXT = 1000128003, + VK_STRUCTURE_TYPE_DEBUG_UTILS_MESSENGER_CREATE_INFO_EXT = 1000128004, + VK_STRUCTURE_TYPE_ANDROID_HARDWARE_BUFFER_USAGE_ANDROID = 1000129000, + VK_STRUCTURE_TYPE_ANDROID_HARDWARE_BUFFER_PROPERTIES_ANDROID = 1000129001, + VK_STRUCTURE_TYPE_ANDROID_HARDWARE_BUFFER_FORMAT_PROPERTIES_ANDROID = 1000129002, + VK_STRUCTURE_TYPE_IMPORT_ANDROID_HARDWARE_BUFFER_INFO_ANDROID = 1000129003, + VK_STRUCTURE_TYPE_MEMORY_GET_ANDROID_HARDWARE_BUFFER_INFO_ANDROID = 1000129004, + VK_STRUCTURE_TYPE_EXTERNAL_FORMAT_ANDROID = 1000129005, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SAMPLER_FILTER_MINMAX_PROPERTIES_EXT = 1000130000, + VK_STRUCTURE_TYPE_SAMPLER_REDUCTION_MODE_CREATE_INFO_EXT = 1000130001, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_INLINE_UNIFORM_BLOCK_FEATURES_EXT = 1000138000, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_INLINE_UNIFORM_BLOCK_PROPERTIES_EXT = 1000138001, + VK_STRUCTURE_TYPE_WRITE_DESCRIPTOR_SET_INLINE_UNIFORM_BLOCK_EXT = 1000138002, + VK_STRUCTURE_TYPE_DESCRIPTOR_POOL_INLINE_UNIFORM_BLOCK_CREATE_INFO_EXT = 1000138003, + VK_STRUCTURE_TYPE_SAMPLE_LOCATIONS_INFO_EXT = 1000143000, + VK_STRUCTURE_TYPE_RENDER_PASS_SAMPLE_LOCATIONS_BEGIN_INFO_EXT = 1000143001, + VK_STRUCTURE_TYPE_PIPELINE_SAMPLE_LOCATIONS_STATE_CREATE_INFO_EXT = 1000143002, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SAMPLE_LOCATIONS_PROPERTIES_EXT = 1000143003, + VK_STRUCTURE_TYPE_MULTISAMPLE_PROPERTIES_EXT = 1000143004, + VK_STRUCTURE_TYPE_IMAGE_FORMAT_LIST_CREATE_INFO_KHR = 1000147000, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_BLEND_OPERATION_ADVANCED_FEATURES_EXT = 1000148000, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_BLEND_OPERATION_ADVANCED_PROPERTIES_EXT = 1000148001, + VK_STRUCTURE_TYPE_PIPELINE_COLOR_BLEND_ADVANCED_STATE_CREATE_INFO_EXT = 1000148002, + VK_STRUCTURE_TYPE_PIPELINE_COVERAGE_TO_COLOR_STATE_CREATE_INFO_NV = 1000149000, + VK_STRUCTURE_TYPE_PIPELINE_COVERAGE_MODULATION_STATE_CREATE_INFO_NV = 1000152000, + VK_STRUCTURE_TYPE_DRM_FORMAT_MODIFIER_PROPERTIES_LIST_EXT = 1000158000, + VK_STRUCTURE_TYPE_DRM_FORMAT_MODIFIER_PROPERTIES_EXT = 1000158001, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_IMAGE_DRM_FORMAT_MODIFIER_INFO_EXT = 1000158002, + VK_STRUCTURE_TYPE_IMAGE_DRM_FORMAT_MODIFIER_LIST_CREATE_INFO_EXT = 1000158003, + VK_STRUCTURE_TYPE_IMAGE_DRM_FORMAT_MODIFIER_EXPLICIT_CREATE_INFO_EXT = 1000158004, + VK_STRUCTURE_TYPE_IMAGE_DRM_FORMAT_MODIFIER_PROPERTIES_EXT = 1000158005, + VK_STRUCTURE_TYPE_VALIDATION_CACHE_CREATE_INFO_EXT = 1000160000, + VK_STRUCTURE_TYPE_SHADER_MODULE_VALIDATION_CACHE_CREATE_INFO_EXT = 1000160001, + VK_STRUCTURE_TYPE_DESCRIPTOR_SET_LAYOUT_BINDING_FLAGS_CREATE_INFO_EXT = 1000161000, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DESCRIPTOR_INDEXING_FEATURES_EXT = 1000161001, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DESCRIPTOR_INDEXING_PROPERTIES_EXT = 1000161002, + VK_STRUCTURE_TYPE_DESCRIPTOR_SET_VARIABLE_DESCRIPTOR_COUNT_ALLOCATE_INFO_EXT = 1000161003, + VK_STRUCTURE_TYPE_DESCRIPTOR_SET_VARIABLE_DESCRIPTOR_COUNT_LAYOUT_SUPPORT_EXT = 1000161004, + VK_STRUCTURE_TYPE_PIPELINE_VIEWPORT_SHADING_RATE_IMAGE_STATE_CREATE_INFO_NV = 1000164000, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADING_RATE_IMAGE_FEATURES_NV = 1000164001, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADING_RATE_IMAGE_PROPERTIES_NV = 1000164002, + VK_STRUCTURE_TYPE_PIPELINE_VIEWPORT_COARSE_SAMPLE_ORDER_STATE_CREATE_INFO_NV = 1000164005, + VK_STRUCTURE_TYPE_RAY_TRACING_PIPELINE_CREATE_INFO_NV = 1000165000, + VK_STRUCTURE_TYPE_ACCELERATION_STRUCTURE_CREATE_INFO_NV = 1000165001, + VK_STRUCTURE_TYPE_GEOMETRY_NV = 1000165003, + VK_STRUCTURE_TYPE_GEOMETRY_TRIANGLES_NV = 1000165004, + VK_STRUCTURE_TYPE_GEOMETRY_AABB_NV = 1000165005, + VK_STRUCTURE_TYPE_BIND_ACCELERATION_STRUCTURE_MEMORY_INFO_NV = 1000165006, + VK_STRUCTURE_TYPE_WRITE_DESCRIPTOR_SET_ACCELERATION_STRUCTURE_NV = 1000165007, + VK_STRUCTURE_TYPE_ACCELERATION_STRUCTURE_MEMORY_REQUIREMENTS_INFO_NV = 1000165008, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_RAY_TRACING_PROPERTIES_NV = 1000165009, + VK_STRUCTURE_TYPE_RAY_TRACING_SHADER_GROUP_CREATE_INFO_NV = 1000165011, + VK_STRUCTURE_TYPE_ACCELERATION_STRUCTURE_INFO_NV = 1000165012, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_REPRESENTATIVE_FRAGMENT_TEST_FEATURES_NV = 1000166000, + VK_STRUCTURE_TYPE_PIPELINE_REPRESENTATIVE_FRAGMENT_TEST_STATE_CREATE_INFO_NV = 1000166001, + VK_STRUCTURE_TYPE_DEVICE_QUEUE_GLOBAL_PRIORITY_CREATE_INFO_EXT = 1000174000, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_8BIT_STORAGE_FEATURES_KHR = 1000177000, + VK_STRUCTURE_TYPE_IMPORT_MEMORY_HOST_POINTER_INFO_EXT = 1000178000, + VK_STRUCTURE_TYPE_MEMORY_HOST_POINTER_PROPERTIES_EXT = 1000178001, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_EXTERNAL_MEMORY_HOST_PROPERTIES_EXT = 1000178002, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_ATOMIC_INT64_FEATURES_KHR = 1000180000, + VK_STRUCTURE_TYPE_CALIBRATED_TIMESTAMP_INFO_EXT = 1000184000, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_CORE_PROPERTIES_AMD = 1000185000, + VK_STRUCTURE_TYPE_DEVICE_MEMORY_OVERALLOCATION_CREATE_INFO_AMD = 1000189000, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VERTEX_ATTRIBUTE_DIVISOR_PROPERTIES_EXT = 1000190000, + VK_STRUCTURE_TYPE_PIPELINE_VERTEX_INPUT_DIVISOR_STATE_CREATE_INFO_EXT = 1000190001, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VERTEX_ATTRIBUTE_DIVISOR_FEATURES_EXT = 1000190002, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DRIVER_PROPERTIES_KHR = 1000196000, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_COMPUTE_SHADER_DERIVATIVES_FEATURES_NV = 1000201000, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MESH_SHADER_FEATURES_NV = 1000202000, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MESH_SHADER_PROPERTIES_NV = 1000202001, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FRAGMENT_SHADER_BARYCENTRIC_FEATURES_NV = 1000203000, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_IMAGE_FOOTPRINT_FEATURES_NV = 1000204000, + VK_STRUCTURE_TYPE_PIPELINE_VIEWPORT_EXCLUSIVE_SCISSOR_STATE_CREATE_INFO_NV = 1000205000, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_EXCLUSIVE_SCISSOR_FEATURES_NV = 1000205002, + VK_STRUCTURE_TYPE_CHECKPOINT_DATA_NV = 1000206000, + VK_STRUCTURE_TYPE_QUEUE_FAMILY_CHECKPOINT_PROPERTIES_NV = 1000206001, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VULKAN_MEMORY_MODEL_FEATURES_KHR = 1000211000, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PCI_BUS_INFO_PROPERTIES_EXT = 1000212000, + VK_STRUCTURE_TYPE_IMAGEPIPE_SURFACE_CREATE_INFO_FUCHSIA = 1000214000, + VK_STRUCTURE_TYPE_DEBUG_REPORT_CREATE_INFO_EXT = VK_STRUCTURE_TYPE_DEBUG_REPORT_CALLBACK_CREATE_INFO_EXT, + VK_STRUCTURE_TYPE_RENDER_PASS_MULTIVIEW_CREATE_INFO_KHR = VK_STRUCTURE_TYPE_RENDER_PASS_MULTIVIEW_CREATE_INFO, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MULTIVIEW_FEATURES_KHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MULTIVIEW_FEATURES, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MULTIVIEW_PROPERTIES_KHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MULTIVIEW_PROPERTIES, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FEATURES_2_KHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FEATURES_2, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PROPERTIES_2_KHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PROPERTIES_2, + VK_STRUCTURE_TYPE_FORMAT_PROPERTIES_2_KHR = VK_STRUCTURE_TYPE_FORMAT_PROPERTIES_2, + VK_STRUCTURE_TYPE_IMAGE_FORMAT_PROPERTIES_2_KHR = VK_STRUCTURE_TYPE_IMAGE_FORMAT_PROPERTIES_2, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_IMAGE_FORMAT_INFO_2_KHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_IMAGE_FORMAT_INFO_2, + VK_STRUCTURE_TYPE_QUEUE_FAMILY_PROPERTIES_2_KHR = VK_STRUCTURE_TYPE_QUEUE_FAMILY_PROPERTIES_2, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MEMORY_PROPERTIES_2_KHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MEMORY_PROPERTIES_2, + VK_STRUCTURE_TYPE_SPARSE_IMAGE_FORMAT_PROPERTIES_2_KHR = VK_STRUCTURE_TYPE_SPARSE_IMAGE_FORMAT_PROPERTIES_2, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SPARSE_IMAGE_FORMAT_INFO_2_KHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SPARSE_IMAGE_FORMAT_INFO_2, + VK_STRUCTURE_TYPE_MEMORY_ALLOCATE_FLAGS_INFO_KHR = VK_STRUCTURE_TYPE_MEMORY_ALLOCATE_FLAGS_INFO, + VK_STRUCTURE_TYPE_DEVICE_GROUP_RENDER_PASS_BEGIN_INFO_KHR = VK_STRUCTURE_TYPE_DEVICE_GROUP_RENDER_PASS_BEGIN_INFO, + VK_STRUCTURE_TYPE_DEVICE_GROUP_COMMAND_BUFFER_BEGIN_INFO_KHR = VK_STRUCTURE_TYPE_DEVICE_GROUP_COMMAND_BUFFER_BEGIN_INFO, + VK_STRUCTURE_TYPE_DEVICE_GROUP_SUBMIT_INFO_KHR = VK_STRUCTURE_TYPE_DEVICE_GROUP_SUBMIT_INFO, + VK_STRUCTURE_TYPE_DEVICE_GROUP_BIND_SPARSE_INFO_KHR = VK_STRUCTURE_TYPE_DEVICE_GROUP_BIND_SPARSE_INFO, + VK_STRUCTURE_TYPE_BIND_BUFFER_MEMORY_DEVICE_GROUP_INFO_KHR = VK_STRUCTURE_TYPE_BIND_BUFFER_MEMORY_DEVICE_GROUP_INFO, + VK_STRUCTURE_TYPE_BIND_IMAGE_MEMORY_DEVICE_GROUP_INFO_KHR = VK_STRUCTURE_TYPE_BIND_IMAGE_MEMORY_DEVICE_GROUP_INFO, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_GROUP_PROPERTIES_KHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_GROUP_PROPERTIES, + VK_STRUCTURE_TYPE_DEVICE_GROUP_DEVICE_CREATE_INFO_KHR = VK_STRUCTURE_TYPE_DEVICE_GROUP_DEVICE_CREATE_INFO, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_EXTERNAL_IMAGE_FORMAT_INFO_KHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_EXTERNAL_IMAGE_FORMAT_INFO, + VK_STRUCTURE_TYPE_EXTERNAL_IMAGE_FORMAT_PROPERTIES_KHR = VK_STRUCTURE_TYPE_EXTERNAL_IMAGE_FORMAT_PROPERTIES, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_EXTERNAL_BUFFER_INFO_KHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_EXTERNAL_BUFFER_INFO, + VK_STRUCTURE_TYPE_EXTERNAL_BUFFER_PROPERTIES_KHR = VK_STRUCTURE_TYPE_EXTERNAL_BUFFER_PROPERTIES, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_ID_PROPERTIES_KHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_ID_PROPERTIES, + VK_STRUCTURE_TYPE_EXTERNAL_MEMORY_BUFFER_CREATE_INFO_KHR = VK_STRUCTURE_TYPE_EXTERNAL_MEMORY_BUFFER_CREATE_INFO, + VK_STRUCTURE_TYPE_EXTERNAL_MEMORY_IMAGE_CREATE_INFO_KHR = VK_STRUCTURE_TYPE_EXTERNAL_MEMORY_IMAGE_CREATE_INFO, + VK_STRUCTURE_TYPE_EXPORT_MEMORY_ALLOCATE_INFO_KHR = VK_STRUCTURE_TYPE_EXPORT_MEMORY_ALLOCATE_INFO, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_EXTERNAL_SEMAPHORE_INFO_KHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_EXTERNAL_SEMAPHORE_INFO, + VK_STRUCTURE_TYPE_EXTERNAL_SEMAPHORE_PROPERTIES_KHR = VK_STRUCTURE_TYPE_EXTERNAL_SEMAPHORE_PROPERTIES, + VK_STRUCTURE_TYPE_EXPORT_SEMAPHORE_CREATE_INFO_KHR = VK_STRUCTURE_TYPE_EXPORT_SEMAPHORE_CREATE_INFO, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_16BIT_STORAGE_FEATURES_KHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_16BIT_STORAGE_FEATURES, + VK_STRUCTURE_TYPE_DESCRIPTOR_UPDATE_TEMPLATE_CREATE_INFO_KHR = VK_STRUCTURE_TYPE_DESCRIPTOR_UPDATE_TEMPLATE_CREATE_INFO, + VK_STRUCTURE_TYPE_SURFACE_CAPABILITIES2_EXT = VK_STRUCTURE_TYPE_SURFACE_CAPABILITIES_2_EXT, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_EXTERNAL_FENCE_INFO_KHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_EXTERNAL_FENCE_INFO, + VK_STRUCTURE_TYPE_EXTERNAL_FENCE_PROPERTIES_KHR = VK_STRUCTURE_TYPE_EXTERNAL_FENCE_PROPERTIES, + VK_STRUCTURE_TYPE_EXPORT_FENCE_CREATE_INFO_KHR = VK_STRUCTURE_TYPE_EXPORT_FENCE_CREATE_INFO, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_POINT_CLIPPING_PROPERTIES_KHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_POINT_CLIPPING_PROPERTIES, + VK_STRUCTURE_TYPE_RENDER_PASS_INPUT_ATTACHMENT_ASPECT_CREATE_INFO_KHR = VK_STRUCTURE_TYPE_RENDER_PASS_INPUT_ATTACHMENT_ASPECT_CREATE_INFO, + VK_STRUCTURE_TYPE_IMAGE_VIEW_USAGE_CREATE_INFO_KHR = VK_STRUCTURE_TYPE_IMAGE_VIEW_USAGE_CREATE_INFO, + VK_STRUCTURE_TYPE_PIPELINE_TESSELLATION_DOMAIN_ORIGIN_STATE_CREATE_INFO_KHR = VK_STRUCTURE_TYPE_PIPELINE_TESSELLATION_DOMAIN_ORIGIN_STATE_CREATE_INFO, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VARIABLE_POINTER_FEATURES_KHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VARIABLE_POINTER_FEATURES, + VK_STRUCTURE_TYPE_MEMORY_DEDICATED_REQUIREMENTS_KHR = VK_STRUCTURE_TYPE_MEMORY_DEDICATED_REQUIREMENTS, + VK_STRUCTURE_TYPE_MEMORY_DEDICATED_ALLOCATE_INFO_KHR = VK_STRUCTURE_TYPE_MEMORY_DEDICATED_ALLOCATE_INFO, + VK_STRUCTURE_TYPE_BUFFER_MEMORY_REQUIREMENTS_INFO_2_KHR = VK_STRUCTURE_TYPE_BUFFER_MEMORY_REQUIREMENTS_INFO_2, + VK_STRUCTURE_TYPE_IMAGE_MEMORY_REQUIREMENTS_INFO_2_KHR = VK_STRUCTURE_TYPE_IMAGE_MEMORY_REQUIREMENTS_INFO_2, + VK_STRUCTURE_TYPE_IMAGE_SPARSE_MEMORY_REQUIREMENTS_INFO_2_KHR = VK_STRUCTURE_TYPE_IMAGE_SPARSE_MEMORY_REQUIREMENTS_INFO_2, + VK_STRUCTURE_TYPE_MEMORY_REQUIREMENTS_2_KHR = VK_STRUCTURE_TYPE_MEMORY_REQUIREMENTS_2, + VK_STRUCTURE_TYPE_SPARSE_IMAGE_MEMORY_REQUIREMENTS_2_KHR = VK_STRUCTURE_TYPE_SPARSE_IMAGE_MEMORY_REQUIREMENTS_2, + VK_STRUCTURE_TYPE_SAMPLER_YCBCR_CONVERSION_CREATE_INFO_KHR = VK_STRUCTURE_TYPE_SAMPLER_YCBCR_CONVERSION_CREATE_INFO, + VK_STRUCTURE_TYPE_SAMPLER_YCBCR_CONVERSION_INFO_KHR = VK_STRUCTURE_TYPE_SAMPLER_YCBCR_CONVERSION_INFO, + VK_STRUCTURE_TYPE_BIND_IMAGE_PLANE_MEMORY_INFO_KHR = VK_STRUCTURE_TYPE_BIND_IMAGE_PLANE_MEMORY_INFO, + VK_STRUCTURE_TYPE_IMAGE_PLANE_MEMORY_REQUIREMENTS_INFO_KHR = VK_STRUCTURE_TYPE_IMAGE_PLANE_MEMORY_REQUIREMENTS_INFO, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SAMPLER_YCBCR_CONVERSION_FEATURES_KHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SAMPLER_YCBCR_CONVERSION_FEATURES, + VK_STRUCTURE_TYPE_SAMPLER_YCBCR_CONVERSION_IMAGE_FORMAT_PROPERTIES_KHR = VK_STRUCTURE_TYPE_SAMPLER_YCBCR_CONVERSION_IMAGE_FORMAT_PROPERTIES, + VK_STRUCTURE_TYPE_BIND_BUFFER_MEMORY_INFO_KHR = VK_STRUCTURE_TYPE_BIND_BUFFER_MEMORY_INFO, + VK_STRUCTURE_TYPE_BIND_IMAGE_MEMORY_INFO_KHR = VK_STRUCTURE_TYPE_BIND_IMAGE_MEMORY_INFO, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MAINTENANCE_3_PROPERTIES_KHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MAINTENANCE_3_PROPERTIES, + VK_STRUCTURE_TYPE_DESCRIPTOR_SET_LAYOUT_SUPPORT_KHR = VK_STRUCTURE_TYPE_DESCRIPTOR_SET_LAYOUT_SUPPORT, + VK_STRUCTURE_TYPE_BEGIN_RANGE = VK_STRUCTURE_TYPE_APPLICATION_INFO, + VK_STRUCTURE_TYPE_END_RANGE = VK_STRUCTURE_TYPE_LOADER_DEVICE_CREATE_INFO, + VK_STRUCTURE_TYPE_RANGE_SIZE = (VK_STRUCTURE_TYPE_LOADER_DEVICE_CREATE_INFO - VK_STRUCTURE_TYPE_APPLICATION_INFO + 1), + VK_STRUCTURE_TYPE_MAX_ENUM = 0x7FFFFFFF +} VkStructureType; + +typedef enum VkSystemAllocationScope { + VK_SYSTEM_ALLOCATION_SCOPE_COMMAND = 0, + VK_SYSTEM_ALLOCATION_SCOPE_OBJECT = 1, + VK_SYSTEM_ALLOCATION_SCOPE_CACHE = 2, + VK_SYSTEM_ALLOCATION_SCOPE_DEVICE = 3, + VK_SYSTEM_ALLOCATION_SCOPE_INSTANCE = 4, + VK_SYSTEM_ALLOCATION_SCOPE_BEGIN_RANGE = VK_SYSTEM_ALLOCATION_SCOPE_COMMAND, + VK_SYSTEM_ALLOCATION_SCOPE_END_RANGE = VK_SYSTEM_ALLOCATION_SCOPE_INSTANCE, + VK_SYSTEM_ALLOCATION_SCOPE_RANGE_SIZE = (VK_SYSTEM_ALLOCATION_SCOPE_INSTANCE - VK_SYSTEM_ALLOCATION_SCOPE_COMMAND + 1), + VK_SYSTEM_ALLOCATION_SCOPE_MAX_ENUM = 0x7FFFFFFF +} VkSystemAllocationScope; + +typedef enum VkInternalAllocationType { + VK_INTERNAL_ALLOCATION_TYPE_EXECUTABLE = 0, + VK_INTERNAL_ALLOCATION_TYPE_BEGIN_RANGE = VK_INTERNAL_ALLOCATION_TYPE_EXECUTABLE, + VK_INTERNAL_ALLOCATION_TYPE_END_RANGE = VK_INTERNAL_ALLOCATION_TYPE_EXECUTABLE, + VK_INTERNAL_ALLOCATION_TYPE_RANGE_SIZE = (VK_INTERNAL_ALLOCATION_TYPE_EXECUTABLE - VK_INTERNAL_ALLOCATION_TYPE_EXECUTABLE + 1), + VK_INTERNAL_ALLOCATION_TYPE_MAX_ENUM = 0x7FFFFFFF +} VkInternalAllocationType; + +typedef enum VkFormat { + VK_FORMAT_UNDEFINED = 0, + VK_FORMAT_R4G4_UNORM_PACK8 = 1, + VK_FORMAT_R4G4B4A4_UNORM_PACK16 = 2, + VK_FORMAT_B4G4R4A4_UNORM_PACK16 = 3, + VK_FORMAT_R5G6B5_UNORM_PACK16 = 4, + VK_FORMAT_B5G6R5_UNORM_PACK16 = 5, + VK_FORMAT_R5G5B5A1_UNORM_PACK16 = 6, + VK_FORMAT_B5G5R5A1_UNORM_PACK16 = 7, + VK_FORMAT_A1R5G5B5_UNORM_PACK16 = 8, + VK_FORMAT_R8_UNORM = 9, + VK_FORMAT_R8_SNORM = 10, + VK_FORMAT_R8_USCALED = 11, + VK_FORMAT_R8_SSCALED = 12, + VK_FORMAT_R8_UINT = 13, + VK_FORMAT_R8_SINT = 14, + VK_FORMAT_R8_SRGB = 15, + VK_FORMAT_R8G8_UNORM = 16, + VK_FORMAT_R8G8_SNORM = 17, + VK_FORMAT_R8G8_USCALED = 18, + VK_FORMAT_R8G8_SSCALED = 19, + VK_FORMAT_R8G8_UINT = 20, + VK_FORMAT_R8G8_SINT = 21, + VK_FORMAT_R8G8_SRGB = 22, + VK_FORMAT_R8G8B8_UNORM = 23, + VK_FORMAT_R8G8B8_SNORM = 24, + VK_FORMAT_R8G8B8_USCALED = 25, + VK_FORMAT_R8G8B8_SSCALED = 26, + VK_FORMAT_R8G8B8_UINT = 27, + VK_FORMAT_R8G8B8_SINT = 28, + VK_FORMAT_R8G8B8_SRGB = 29, + VK_FORMAT_B8G8R8_UNORM = 30, + VK_FORMAT_B8G8R8_SNORM = 31, + VK_FORMAT_B8G8R8_USCALED = 32, + VK_FORMAT_B8G8R8_SSCALED = 33, + VK_FORMAT_B8G8R8_UINT = 34, + VK_FORMAT_B8G8R8_SINT = 35, + VK_FORMAT_B8G8R8_SRGB = 36, + VK_FORMAT_R8G8B8A8_UNORM = 37, + VK_FORMAT_R8G8B8A8_SNORM = 38, + VK_FORMAT_R8G8B8A8_USCALED = 39, + VK_FORMAT_R8G8B8A8_SSCALED = 40, + VK_FORMAT_R8G8B8A8_UINT = 41, + VK_FORMAT_R8G8B8A8_SINT = 42, + VK_FORMAT_R8G8B8A8_SRGB = 43, + VK_FORMAT_B8G8R8A8_UNORM = 44, + VK_FORMAT_B8G8R8A8_SNORM = 45, + VK_FORMAT_B8G8R8A8_USCALED = 46, + VK_FORMAT_B8G8R8A8_SSCALED = 47, + VK_FORMAT_B8G8R8A8_UINT = 48, + VK_FORMAT_B8G8R8A8_SINT = 49, + VK_FORMAT_B8G8R8A8_SRGB = 50, + VK_FORMAT_A8B8G8R8_UNORM_PACK32 = 51, + VK_FORMAT_A8B8G8R8_SNORM_PACK32 = 52, + VK_FORMAT_A8B8G8R8_USCALED_PACK32 = 53, + VK_FORMAT_A8B8G8R8_SSCALED_PACK32 = 54, + VK_FORMAT_A8B8G8R8_UINT_PACK32 = 55, + VK_FORMAT_A8B8G8R8_SINT_PACK32 = 56, + VK_FORMAT_A8B8G8R8_SRGB_PACK32 = 57, + VK_FORMAT_A2R10G10B10_UNORM_PACK32 = 58, + VK_FORMAT_A2R10G10B10_SNORM_PACK32 = 59, + VK_FORMAT_A2R10G10B10_USCALED_PACK32 = 60, + VK_FORMAT_A2R10G10B10_SSCALED_PACK32 = 61, + VK_FORMAT_A2R10G10B10_UINT_PACK32 = 62, + VK_FORMAT_A2R10G10B10_SINT_PACK32 = 63, + VK_FORMAT_A2B10G10R10_UNORM_PACK32 = 64, + VK_FORMAT_A2B10G10R10_SNORM_PACK32 = 65, + VK_FORMAT_A2B10G10R10_USCALED_PACK32 = 66, + VK_FORMAT_A2B10G10R10_SSCALED_PACK32 = 67, + VK_FORMAT_A2B10G10R10_UINT_PACK32 = 68, + VK_FORMAT_A2B10G10R10_SINT_PACK32 = 69, + VK_FORMAT_R16_UNORM = 70, + VK_FORMAT_R16_SNORM = 71, + VK_FORMAT_R16_USCALED = 72, + VK_FORMAT_R16_SSCALED = 73, + VK_FORMAT_R16_UINT = 74, + VK_FORMAT_R16_SINT = 75, + VK_FORMAT_R16_SFLOAT = 76, + VK_FORMAT_R16G16_UNORM = 77, + VK_FORMAT_R16G16_SNORM = 78, + VK_FORMAT_R16G16_USCALED = 79, + VK_FORMAT_R16G16_SSCALED = 80, + VK_FORMAT_R16G16_UINT = 81, + VK_FORMAT_R16G16_SINT = 82, + VK_FORMAT_R16G16_SFLOAT = 83, + VK_FORMAT_R16G16B16_UNORM = 84, + VK_FORMAT_R16G16B16_SNORM = 85, + VK_FORMAT_R16G16B16_USCALED = 86, + VK_FORMAT_R16G16B16_SSCALED = 87, + VK_FORMAT_R16G16B16_UINT = 88, + VK_FORMAT_R16G16B16_SINT = 89, + VK_FORMAT_R16G16B16_SFLOAT = 90, + VK_FORMAT_R16G16B16A16_UNORM = 91, + VK_FORMAT_R16G16B16A16_SNORM = 92, + VK_FORMAT_R16G16B16A16_USCALED = 93, + VK_FORMAT_R16G16B16A16_SSCALED = 94, + VK_FORMAT_R16G16B16A16_UINT = 95, + VK_FORMAT_R16G16B16A16_SINT = 96, + VK_FORMAT_R16G16B16A16_SFLOAT = 97, + VK_FORMAT_R32_UINT = 98, + VK_FORMAT_R32_SINT = 99, + VK_FORMAT_R32_SFLOAT = 100, + VK_FORMAT_R32G32_UINT = 101, + VK_FORMAT_R32G32_SINT = 102, + VK_FORMAT_R32G32_SFLOAT = 103, + VK_FORMAT_R32G32B32_UINT = 104, + VK_FORMAT_R32G32B32_SINT = 105, + VK_FORMAT_R32G32B32_SFLOAT = 106, + VK_FORMAT_R32G32B32A32_UINT = 107, + VK_FORMAT_R32G32B32A32_SINT = 108, + VK_FORMAT_R32G32B32A32_SFLOAT = 109, + VK_FORMAT_R64_UINT = 110, + VK_FORMAT_R64_SINT = 111, + VK_FORMAT_R64_SFLOAT = 112, + VK_FORMAT_R64G64_UINT = 113, + VK_FORMAT_R64G64_SINT = 114, + VK_FORMAT_R64G64_SFLOAT = 115, + VK_FORMAT_R64G64B64_UINT = 116, + VK_FORMAT_R64G64B64_SINT = 117, + VK_FORMAT_R64G64B64_SFLOAT = 118, + VK_FORMAT_R64G64B64A64_UINT = 119, + VK_FORMAT_R64G64B64A64_SINT = 120, + VK_FORMAT_R64G64B64A64_SFLOAT = 121, + VK_FORMAT_B10G11R11_UFLOAT_PACK32 = 122, + VK_FORMAT_E5B9G9R9_UFLOAT_PACK32 = 123, + VK_FORMAT_D16_UNORM = 124, + VK_FORMAT_X8_D24_UNORM_PACK32 = 125, + VK_FORMAT_D32_SFLOAT = 126, + VK_FORMAT_S8_UINT = 127, + VK_FORMAT_D16_UNORM_S8_UINT = 128, + VK_FORMAT_D24_UNORM_S8_UINT = 129, + VK_FORMAT_D32_SFLOAT_S8_UINT = 130, + VK_FORMAT_BC1_RGB_UNORM_BLOCK = 131, + VK_FORMAT_BC1_RGB_SRGB_BLOCK = 132, + VK_FORMAT_BC1_RGBA_UNORM_BLOCK = 133, + VK_FORMAT_BC1_RGBA_SRGB_BLOCK = 134, + VK_FORMAT_BC2_UNORM_BLOCK = 135, + VK_FORMAT_BC2_SRGB_BLOCK = 136, + VK_FORMAT_BC3_UNORM_BLOCK = 137, + VK_FORMAT_BC3_SRGB_BLOCK = 138, + VK_FORMAT_BC4_UNORM_BLOCK = 139, + VK_FORMAT_BC4_SNORM_BLOCK = 140, + VK_FORMAT_BC5_UNORM_BLOCK = 141, + VK_FORMAT_BC5_SNORM_BLOCK = 142, + VK_FORMAT_BC6H_UFLOAT_BLOCK = 143, + VK_FORMAT_BC6H_SFLOAT_BLOCK = 144, + VK_FORMAT_BC7_UNORM_BLOCK = 145, + VK_FORMAT_BC7_SRGB_BLOCK = 146, + VK_FORMAT_ETC2_R8G8B8_UNORM_BLOCK = 147, + VK_FORMAT_ETC2_R8G8B8_SRGB_BLOCK = 148, + VK_FORMAT_ETC2_R8G8B8A1_UNORM_BLOCK = 149, + VK_FORMAT_ETC2_R8G8B8A1_SRGB_BLOCK = 150, + VK_FORMAT_ETC2_R8G8B8A8_UNORM_BLOCK = 151, + VK_FORMAT_ETC2_R8G8B8A8_SRGB_BLOCK = 152, + VK_FORMAT_EAC_R11_UNORM_BLOCK = 153, + VK_FORMAT_EAC_R11_SNORM_BLOCK = 154, + VK_FORMAT_EAC_R11G11_UNORM_BLOCK = 155, + VK_FORMAT_EAC_R11G11_SNORM_BLOCK = 156, + VK_FORMAT_ASTC_4x4_UNORM_BLOCK = 157, + VK_FORMAT_ASTC_4x4_SRGB_BLOCK = 158, + VK_FORMAT_ASTC_5x4_UNORM_BLOCK = 159, + VK_FORMAT_ASTC_5x4_SRGB_BLOCK = 160, + VK_FORMAT_ASTC_5x5_UNORM_BLOCK = 161, + VK_FORMAT_ASTC_5x5_SRGB_BLOCK = 162, + VK_FORMAT_ASTC_6x5_UNORM_BLOCK = 163, + VK_FORMAT_ASTC_6x5_SRGB_BLOCK = 164, + VK_FORMAT_ASTC_6x6_UNORM_BLOCK = 165, + VK_FORMAT_ASTC_6x6_SRGB_BLOCK = 166, + VK_FORMAT_ASTC_8x5_UNORM_BLOCK = 167, + VK_FORMAT_ASTC_8x5_SRGB_BLOCK = 168, + VK_FORMAT_ASTC_8x6_UNORM_BLOCK = 169, + VK_FORMAT_ASTC_8x6_SRGB_BLOCK = 170, + VK_FORMAT_ASTC_8x8_UNORM_BLOCK = 171, + VK_FORMAT_ASTC_8x8_SRGB_BLOCK = 172, + VK_FORMAT_ASTC_10x5_UNORM_BLOCK = 173, + VK_FORMAT_ASTC_10x5_SRGB_BLOCK = 174, + VK_FORMAT_ASTC_10x6_UNORM_BLOCK = 175, + VK_FORMAT_ASTC_10x6_SRGB_BLOCK = 176, + VK_FORMAT_ASTC_10x8_UNORM_BLOCK = 177, + VK_FORMAT_ASTC_10x8_SRGB_BLOCK = 178, + VK_FORMAT_ASTC_10x10_UNORM_BLOCK = 179, + VK_FORMAT_ASTC_10x10_SRGB_BLOCK = 180, + VK_FORMAT_ASTC_12x10_UNORM_BLOCK = 181, + VK_FORMAT_ASTC_12x10_SRGB_BLOCK = 182, + VK_FORMAT_ASTC_12x12_UNORM_BLOCK = 183, + VK_FORMAT_ASTC_12x12_SRGB_BLOCK = 184, + VK_FORMAT_G8B8G8R8_422_UNORM = 1000156000, + VK_FORMAT_B8G8R8G8_422_UNORM = 1000156001, + VK_FORMAT_G8_B8_R8_3PLANE_420_UNORM = 1000156002, + VK_FORMAT_G8_B8R8_2PLANE_420_UNORM = 1000156003, + VK_FORMAT_G8_B8_R8_3PLANE_422_UNORM = 1000156004, + VK_FORMAT_G8_B8R8_2PLANE_422_UNORM = 1000156005, + VK_FORMAT_G8_B8_R8_3PLANE_444_UNORM = 1000156006, + VK_FORMAT_R10X6_UNORM_PACK16 = 1000156007, + VK_FORMAT_R10X6G10X6_UNORM_2PACK16 = 1000156008, + VK_FORMAT_R10X6G10X6B10X6A10X6_UNORM_4PACK16 = 1000156009, + VK_FORMAT_G10X6B10X6G10X6R10X6_422_UNORM_4PACK16 = 1000156010, + VK_FORMAT_B10X6G10X6R10X6G10X6_422_UNORM_4PACK16 = 1000156011, + VK_FORMAT_G10X6_B10X6_R10X6_3PLANE_420_UNORM_3PACK16 = 1000156012, + VK_FORMAT_G10X6_B10X6R10X6_2PLANE_420_UNORM_3PACK16 = 1000156013, + VK_FORMAT_G10X6_B10X6_R10X6_3PLANE_422_UNORM_3PACK16 = 1000156014, + VK_FORMAT_G10X6_B10X6R10X6_2PLANE_422_UNORM_3PACK16 = 1000156015, + VK_FORMAT_G10X6_B10X6_R10X6_3PLANE_444_UNORM_3PACK16 = 1000156016, + VK_FORMAT_R12X4_UNORM_PACK16 = 1000156017, + VK_FORMAT_R12X4G12X4_UNORM_2PACK16 = 1000156018, + VK_FORMAT_R12X4G12X4B12X4A12X4_UNORM_4PACK16 = 1000156019, + VK_FORMAT_G12X4B12X4G12X4R12X4_422_UNORM_4PACK16 = 1000156020, + VK_FORMAT_B12X4G12X4R12X4G12X4_422_UNORM_4PACK16 = 1000156021, + VK_FORMAT_G12X4_B12X4_R12X4_3PLANE_420_UNORM_3PACK16 = 1000156022, + VK_FORMAT_G12X4_B12X4R12X4_2PLANE_420_UNORM_3PACK16 = 1000156023, + VK_FORMAT_G12X4_B12X4_R12X4_3PLANE_422_UNORM_3PACK16 = 1000156024, + VK_FORMAT_G12X4_B12X4R12X4_2PLANE_422_UNORM_3PACK16 = 1000156025, + VK_FORMAT_G12X4_B12X4_R12X4_3PLANE_444_UNORM_3PACK16 = 1000156026, + VK_FORMAT_G16B16G16R16_422_UNORM = 1000156027, + VK_FORMAT_B16G16R16G16_422_UNORM = 1000156028, + VK_FORMAT_G16_B16_R16_3PLANE_420_UNORM = 1000156029, + VK_FORMAT_G16_B16R16_2PLANE_420_UNORM = 1000156030, + VK_FORMAT_G16_B16_R16_3PLANE_422_UNORM = 1000156031, + VK_FORMAT_G16_B16R16_2PLANE_422_UNORM = 1000156032, + VK_FORMAT_G16_B16_R16_3PLANE_444_UNORM = 1000156033, + VK_FORMAT_PVRTC1_2BPP_UNORM_BLOCK_IMG = 1000054000, + VK_FORMAT_PVRTC1_4BPP_UNORM_BLOCK_IMG = 1000054001, + VK_FORMAT_PVRTC2_2BPP_UNORM_BLOCK_IMG = 1000054002, + VK_FORMAT_PVRTC2_4BPP_UNORM_BLOCK_IMG = 1000054003, + VK_FORMAT_PVRTC1_2BPP_SRGB_BLOCK_IMG = 1000054004, + VK_FORMAT_PVRTC1_4BPP_SRGB_BLOCK_IMG = 1000054005, + VK_FORMAT_PVRTC2_2BPP_SRGB_BLOCK_IMG = 1000054006, + VK_FORMAT_PVRTC2_4BPP_SRGB_BLOCK_IMG = 1000054007, + VK_FORMAT_G8B8G8R8_422_UNORM_KHR = VK_FORMAT_G8B8G8R8_422_UNORM, + VK_FORMAT_B8G8R8G8_422_UNORM_KHR = VK_FORMAT_B8G8R8G8_422_UNORM, + VK_FORMAT_G8_B8_R8_3PLANE_420_UNORM_KHR = VK_FORMAT_G8_B8_R8_3PLANE_420_UNORM, + VK_FORMAT_G8_B8R8_2PLANE_420_UNORM_KHR = VK_FORMAT_G8_B8R8_2PLANE_420_UNORM, + VK_FORMAT_G8_B8_R8_3PLANE_422_UNORM_KHR = VK_FORMAT_G8_B8_R8_3PLANE_422_UNORM, + VK_FORMAT_G8_B8R8_2PLANE_422_UNORM_KHR = VK_FORMAT_G8_B8R8_2PLANE_422_UNORM, + VK_FORMAT_G8_B8_R8_3PLANE_444_UNORM_KHR = VK_FORMAT_G8_B8_R8_3PLANE_444_UNORM, + VK_FORMAT_R10X6_UNORM_PACK16_KHR = VK_FORMAT_R10X6_UNORM_PACK16, + VK_FORMAT_R10X6G10X6_UNORM_2PACK16_KHR = VK_FORMAT_R10X6G10X6_UNORM_2PACK16, + VK_FORMAT_R10X6G10X6B10X6A10X6_UNORM_4PACK16_KHR = VK_FORMAT_R10X6G10X6B10X6A10X6_UNORM_4PACK16, + VK_FORMAT_G10X6B10X6G10X6R10X6_422_UNORM_4PACK16_KHR = VK_FORMAT_G10X6B10X6G10X6R10X6_422_UNORM_4PACK16, + VK_FORMAT_B10X6G10X6R10X6G10X6_422_UNORM_4PACK16_KHR = VK_FORMAT_B10X6G10X6R10X6G10X6_422_UNORM_4PACK16, + VK_FORMAT_G10X6_B10X6_R10X6_3PLANE_420_UNORM_3PACK16_KHR = VK_FORMAT_G10X6_B10X6_R10X6_3PLANE_420_UNORM_3PACK16, + VK_FORMAT_G10X6_B10X6R10X6_2PLANE_420_UNORM_3PACK16_KHR = VK_FORMAT_G10X6_B10X6R10X6_2PLANE_420_UNORM_3PACK16, + VK_FORMAT_G10X6_B10X6_R10X6_3PLANE_422_UNORM_3PACK16_KHR = VK_FORMAT_G10X6_B10X6_R10X6_3PLANE_422_UNORM_3PACK16, + VK_FORMAT_G10X6_B10X6R10X6_2PLANE_422_UNORM_3PACK16_KHR = VK_FORMAT_G10X6_B10X6R10X6_2PLANE_422_UNORM_3PACK16, + VK_FORMAT_G10X6_B10X6_R10X6_3PLANE_444_UNORM_3PACK16_KHR = VK_FORMAT_G10X6_B10X6_R10X6_3PLANE_444_UNORM_3PACK16, + VK_FORMAT_R12X4_UNORM_PACK16_KHR = VK_FORMAT_R12X4_UNORM_PACK16, + VK_FORMAT_R12X4G12X4_UNORM_2PACK16_KHR = VK_FORMAT_R12X4G12X4_UNORM_2PACK16, + VK_FORMAT_R12X4G12X4B12X4A12X4_UNORM_4PACK16_KHR = VK_FORMAT_R12X4G12X4B12X4A12X4_UNORM_4PACK16, + VK_FORMAT_G12X4B12X4G12X4R12X4_422_UNORM_4PACK16_KHR = VK_FORMAT_G12X4B12X4G12X4R12X4_422_UNORM_4PACK16, + VK_FORMAT_B12X4G12X4R12X4G12X4_422_UNORM_4PACK16_KHR = VK_FORMAT_B12X4G12X4R12X4G12X4_422_UNORM_4PACK16, + VK_FORMAT_G12X4_B12X4_R12X4_3PLANE_420_UNORM_3PACK16_KHR = VK_FORMAT_G12X4_B12X4_R12X4_3PLANE_420_UNORM_3PACK16, + VK_FORMAT_G12X4_B12X4R12X4_2PLANE_420_UNORM_3PACK16_KHR = VK_FORMAT_G12X4_B12X4R12X4_2PLANE_420_UNORM_3PACK16, + VK_FORMAT_G12X4_B12X4_R12X4_3PLANE_422_UNORM_3PACK16_KHR = VK_FORMAT_G12X4_B12X4_R12X4_3PLANE_422_UNORM_3PACK16, + VK_FORMAT_G12X4_B12X4R12X4_2PLANE_422_UNORM_3PACK16_KHR = VK_FORMAT_G12X4_B12X4R12X4_2PLANE_422_UNORM_3PACK16, + VK_FORMAT_G12X4_B12X4_R12X4_3PLANE_444_UNORM_3PACK16_KHR = VK_FORMAT_G12X4_B12X4_R12X4_3PLANE_444_UNORM_3PACK16, + VK_FORMAT_G16B16G16R16_422_UNORM_KHR = VK_FORMAT_G16B16G16R16_422_UNORM, + VK_FORMAT_B16G16R16G16_422_UNORM_KHR = VK_FORMAT_B16G16R16G16_422_UNORM, + VK_FORMAT_G16_B16_R16_3PLANE_420_UNORM_KHR = VK_FORMAT_G16_B16_R16_3PLANE_420_UNORM, + VK_FORMAT_G16_B16R16_2PLANE_420_UNORM_KHR = VK_FORMAT_G16_B16R16_2PLANE_420_UNORM, + VK_FORMAT_G16_B16_R16_3PLANE_422_UNORM_KHR = VK_FORMAT_G16_B16_R16_3PLANE_422_UNORM, + VK_FORMAT_G16_B16R16_2PLANE_422_UNORM_KHR = VK_FORMAT_G16_B16R16_2PLANE_422_UNORM, + VK_FORMAT_G16_B16_R16_3PLANE_444_UNORM_KHR = VK_FORMAT_G16_B16_R16_3PLANE_444_UNORM, + VK_FORMAT_BEGIN_RANGE = VK_FORMAT_UNDEFINED, + VK_FORMAT_END_RANGE = VK_FORMAT_ASTC_12x12_SRGB_BLOCK, + VK_FORMAT_RANGE_SIZE = (VK_FORMAT_ASTC_12x12_SRGB_BLOCK - VK_FORMAT_UNDEFINED + 1), + VK_FORMAT_MAX_ENUM = 0x7FFFFFFF +} VkFormat; + +typedef enum VkImageType { + VK_IMAGE_TYPE_1D = 0, + VK_IMAGE_TYPE_2D = 1, + VK_IMAGE_TYPE_3D = 2, + VK_IMAGE_TYPE_BEGIN_RANGE = VK_IMAGE_TYPE_1D, + VK_IMAGE_TYPE_END_RANGE = VK_IMAGE_TYPE_3D, + VK_IMAGE_TYPE_RANGE_SIZE = (VK_IMAGE_TYPE_3D - VK_IMAGE_TYPE_1D + 1), + VK_IMAGE_TYPE_MAX_ENUM = 0x7FFFFFFF +} VkImageType; + +typedef enum VkImageTiling { + VK_IMAGE_TILING_OPTIMAL = 0, + VK_IMAGE_TILING_LINEAR = 1, + VK_IMAGE_TILING_DRM_FORMAT_MODIFIER_EXT = 1000158000, + VK_IMAGE_TILING_BEGIN_RANGE = VK_IMAGE_TILING_OPTIMAL, + VK_IMAGE_TILING_END_RANGE = VK_IMAGE_TILING_LINEAR, + VK_IMAGE_TILING_RANGE_SIZE = (VK_IMAGE_TILING_LINEAR - VK_IMAGE_TILING_OPTIMAL + 1), + VK_IMAGE_TILING_MAX_ENUM = 0x7FFFFFFF +} VkImageTiling; + +typedef enum VkPhysicalDeviceType { + VK_PHYSICAL_DEVICE_TYPE_OTHER = 0, + VK_PHYSICAL_DEVICE_TYPE_INTEGRATED_GPU = 1, + VK_PHYSICAL_DEVICE_TYPE_DISCRETE_GPU = 2, + VK_PHYSICAL_DEVICE_TYPE_VIRTUAL_GPU = 3, + VK_PHYSICAL_DEVICE_TYPE_CPU = 4, + VK_PHYSICAL_DEVICE_TYPE_BEGIN_RANGE = VK_PHYSICAL_DEVICE_TYPE_OTHER, + VK_PHYSICAL_DEVICE_TYPE_END_RANGE = VK_PHYSICAL_DEVICE_TYPE_CPU, + VK_PHYSICAL_DEVICE_TYPE_RANGE_SIZE = (VK_PHYSICAL_DEVICE_TYPE_CPU - VK_PHYSICAL_DEVICE_TYPE_OTHER + 1), + VK_PHYSICAL_DEVICE_TYPE_MAX_ENUM = 0x7FFFFFFF +} VkPhysicalDeviceType; + +typedef enum VkQueryType { + VK_QUERY_TYPE_OCCLUSION = 0, + VK_QUERY_TYPE_PIPELINE_STATISTICS = 1, + VK_QUERY_TYPE_TIMESTAMP = 2, + VK_QUERY_TYPE_TRANSFORM_FEEDBACK_STREAM_EXT = 1000028004, + VK_QUERY_TYPE_ACCELERATION_STRUCTURE_COMPACTED_SIZE_NV = 1000165000, + VK_QUERY_TYPE_BEGIN_RANGE = VK_QUERY_TYPE_OCCLUSION, + VK_QUERY_TYPE_END_RANGE = VK_QUERY_TYPE_TIMESTAMP, + VK_QUERY_TYPE_RANGE_SIZE = (VK_QUERY_TYPE_TIMESTAMP - VK_QUERY_TYPE_OCCLUSION + 1), + VK_QUERY_TYPE_MAX_ENUM = 0x7FFFFFFF +} VkQueryType; + +typedef enum VkSharingMode { + VK_SHARING_MODE_EXCLUSIVE = 0, + VK_SHARING_MODE_CONCURRENT = 1, + VK_SHARING_MODE_BEGIN_RANGE = VK_SHARING_MODE_EXCLUSIVE, + VK_SHARING_MODE_END_RANGE = VK_SHARING_MODE_CONCURRENT, + VK_SHARING_MODE_RANGE_SIZE = (VK_SHARING_MODE_CONCURRENT - VK_SHARING_MODE_EXCLUSIVE + 1), + VK_SHARING_MODE_MAX_ENUM = 0x7FFFFFFF +} VkSharingMode; + +typedef enum VkImageLayout { + VK_IMAGE_LAYOUT_UNDEFINED = 0, + VK_IMAGE_LAYOUT_GENERAL = 1, + VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL = 2, + VK_IMAGE_LAYOUT_DEPTH_STENCIL_ATTACHMENT_OPTIMAL = 3, + VK_IMAGE_LAYOUT_DEPTH_STENCIL_READ_ONLY_OPTIMAL = 4, + VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL = 5, + VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL = 6, + VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL = 7, + VK_IMAGE_LAYOUT_PREINITIALIZED = 8, + VK_IMAGE_LAYOUT_DEPTH_READ_ONLY_STENCIL_ATTACHMENT_OPTIMAL = 1000117000, + VK_IMAGE_LAYOUT_DEPTH_ATTACHMENT_STENCIL_READ_ONLY_OPTIMAL = 1000117001, + VK_IMAGE_LAYOUT_PRESENT_SRC_KHR = 1000001002, + VK_IMAGE_LAYOUT_SHARED_PRESENT_KHR = 1000111000, + VK_IMAGE_LAYOUT_SHADING_RATE_OPTIMAL_NV = 1000164003, + VK_IMAGE_LAYOUT_DEPTH_READ_ONLY_STENCIL_ATTACHMENT_OPTIMAL_KHR = VK_IMAGE_LAYOUT_DEPTH_READ_ONLY_STENCIL_ATTACHMENT_OPTIMAL, + VK_IMAGE_LAYOUT_DEPTH_ATTACHMENT_STENCIL_READ_ONLY_OPTIMAL_KHR = VK_IMAGE_LAYOUT_DEPTH_ATTACHMENT_STENCIL_READ_ONLY_OPTIMAL, + VK_IMAGE_LAYOUT_BEGIN_RANGE = VK_IMAGE_LAYOUT_UNDEFINED, + VK_IMAGE_LAYOUT_END_RANGE = VK_IMAGE_LAYOUT_PREINITIALIZED, + VK_IMAGE_LAYOUT_RANGE_SIZE = (VK_IMAGE_LAYOUT_PREINITIALIZED - VK_IMAGE_LAYOUT_UNDEFINED + 1), + VK_IMAGE_LAYOUT_MAX_ENUM = 0x7FFFFFFF +} VkImageLayout; + +typedef enum VkImageViewType { + VK_IMAGE_VIEW_TYPE_1D = 0, + VK_IMAGE_VIEW_TYPE_2D = 1, + VK_IMAGE_VIEW_TYPE_3D = 2, + VK_IMAGE_VIEW_TYPE_CUBE = 3, + VK_IMAGE_VIEW_TYPE_1D_ARRAY = 4, + VK_IMAGE_VIEW_TYPE_2D_ARRAY = 5, + VK_IMAGE_VIEW_TYPE_CUBE_ARRAY = 6, + VK_IMAGE_VIEW_TYPE_BEGIN_RANGE = VK_IMAGE_VIEW_TYPE_1D, + VK_IMAGE_VIEW_TYPE_END_RANGE = VK_IMAGE_VIEW_TYPE_CUBE_ARRAY, + VK_IMAGE_VIEW_TYPE_RANGE_SIZE = (VK_IMAGE_VIEW_TYPE_CUBE_ARRAY - VK_IMAGE_VIEW_TYPE_1D + 1), + VK_IMAGE_VIEW_TYPE_MAX_ENUM = 0x7FFFFFFF +} VkImageViewType; + +typedef enum VkComponentSwizzle { + VK_COMPONENT_SWIZZLE_IDENTITY = 0, + VK_COMPONENT_SWIZZLE_ZERO = 1, + VK_COMPONENT_SWIZZLE_ONE = 2, + VK_COMPONENT_SWIZZLE_R = 3, + VK_COMPONENT_SWIZZLE_G = 4, + VK_COMPONENT_SWIZZLE_B = 5, + VK_COMPONENT_SWIZZLE_A = 6, + VK_COMPONENT_SWIZZLE_BEGIN_RANGE = VK_COMPONENT_SWIZZLE_IDENTITY, + VK_COMPONENT_SWIZZLE_END_RANGE = VK_COMPONENT_SWIZZLE_A, + VK_COMPONENT_SWIZZLE_RANGE_SIZE = (VK_COMPONENT_SWIZZLE_A - VK_COMPONENT_SWIZZLE_IDENTITY + 1), + VK_COMPONENT_SWIZZLE_MAX_ENUM = 0x7FFFFFFF +} VkComponentSwizzle; + +typedef enum VkVertexInputRate { + VK_VERTEX_INPUT_RATE_VERTEX = 0, + VK_VERTEX_INPUT_RATE_INSTANCE = 1, + VK_VERTEX_INPUT_RATE_BEGIN_RANGE = VK_VERTEX_INPUT_RATE_VERTEX, + VK_VERTEX_INPUT_RATE_END_RANGE = VK_VERTEX_INPUT_RATE_INSTANCE, + VK_VERTEX_INPUT_RATE_RANGE_SIZE = (VK_VERTEX_INPUT_RATE_INSTANCE - VK_VERTEX_INPUT_RATE_VERTEX + 1), + VK_VERTEX_INPUT_RATE_MAX_ENUM = 0x7FFFFFFF +} VkVertexInputRate; + +typedef enum VkPrimitiveTopology { + VK_PRIMITIVE_TOPOLOGY_POINT_LIST = 0, + VK_PRIMITIVE_TOPOLOGY_LINE_LIST = 1, + VK_PRIMITIVE_TOPOLOGY_LINE_STRIP = 2, + VK_PRIMITIVE_TOPOLOGY_TRIANGLE_LIST = 3, + VK_PRIMITIVE_TOPOLOGY_TRIANGLE_STRIP = 4, + VK_PRIMITIVE_TOPOLOGY_TRIANGLE_FAN = 5, + VK_PRIMITIVE_TOPOLOGY_LINE_LIST_WITH_ADJACENCY = 6, + VK_PRIMITIVE_TOPOLOGY_LINE_STRIP_WITH_ADJACENCY = 7, + VK_PRIMITIVE_TOPOLOGY_TRIANGLE_LIST_WITH_ADJACENCY = 8, + VK_PRIMITIVE_TOPOLOGY_TRIANGLE_STRIP_WITH_ADJACENCY = 9, + VK_PRIMITIVE_TOPOLOGY_PATCH_LIST = 10, + VK_PRIMITIVE_TOPOLOGY_BEGIN_RANGE = VK_PRIMITIVE_TOPOLOGY_POINT_LIST, + VK_PRIMITIVE_TOPOLOGY_END_RANGE = VK_PRIMITIVE_TOPOLOGY_PATCH_LIST, + VK_PRIMITIVE_TOPOLOGY_RANGE_SIZE = (VK_PRIMITIVE_TOPOLOGY_PATCH_LIST - VK_PRIMITIVE_TOPOLOGY_POINT_LIST + 1), + VK_PRIMITIVE_TOPOLOGY_MAX_ENUM = 0x7FFFFFFF +} VkPrimitiveTopology; + +typedef enum VkPolygonMode { + VK_POLYGON_MODE_FILL = 0, + VK_POLYGON_MODE_LINE = 1, + VK_POLYGON_MODE_POINT = 2, + VK_POLYGON_MODE_FILL_RECTANGLE_NV = 1000153000, + VK_POLYGON_MODE_BEGIN_RANGE = VK_POLYGON_MODE_FILL, + VK_POLYGON_MODE_END_RANGE = VK_POLYGON_MODE_POINT, + VK_POLYGON_MODE_RANGE_SIZE = (VK_POLYGON_MODE_POINT - VK_POLYGON_MODE_FILL + 1), + VK_POLYGON_MODE_MAX_ENUM = 0x7FFFFFFF +} VkPolygonMode; + +typedef enum VkFrontFace { + VK_FRONT_FACE_COUNTER_CLOCKWISE = 0, + VK_FRONT_FACE_CLOCKWISE = 1, + VK_FRONT_FACE_BEGIN_RANGE = VK_FRONT_FACE_COUNTER_CLOCKWISE, + VK_FRONT_FACE_END_RANGE = VK_FRONT_FACE_CLOCKWISE, + VK_FRONT_FACE_RANGE_SIZE = (VK_FRONT_FACE_CLOCKWISE - VK_FRONT_FACE_COUNTER_CLOCKWISE + 1), + VK_FRONT_FACE_MAX_ENUM = 0x7FFFFFFF +} VkFrontFace; + +typedef enum VkCompareOp { + VK_COMPARE_OP_NEVER = 0, + VK_COMPARE_OP_LESS = 1, + VK_COMPARE_OP_EQUAL = 2, + VK_COMPARE_OP_LESS_OR_EQUAL = 3, + VK_COMPARE_OP_GREATER = 4, + VK_COMPARE_OP_NOT_EQUAL = 5, + VK_COMPARE_OP_GREATER_OR_EQUAL = 6, + VK_COMPARE_OP_ALWAYS = 7, + VK_COMPARE_OP_BEGIN_RANGE = VK_COMPARE_OP_NEVER, + VK_COMPARE_OP_END_RANGE = VK_COMPARE_OP_ALWAYS, + VK_COMPARE_OP_RANGE_SIZE = (VK_COMPARE_OP_ALWAYS - VK_COMPARE_OP_NEVER + 1), + VK_COMPARE_OP_MAX_ENUM = 0x7FFFFFFF +} VkCompareOp; + +typedef enum VkStencilOp { + VK_STENCIL_OP_KEEP = 0, + VK_STENCIL_OP_ZERO = 1, + VK_STENCIL_OP_REPLACE = 2, + VK_STENCIL_OP_INCREMENT_AND_CLAMP = 3, + VK_STENCIL_OP_DECREMENT_AND_CLAMP = 4, + VK_STENCIL_OP_INVERT = 5, + VK_STENCIL_OP_INCREMENT_AND_WRAP = 6, + VK_STENCIL_OP_DECREMENT_AND_WRAP = 7, + VK_STENCIL_OP_BEGIN_RANGE = VK_STENCIL_OP_KEEP, + VK_STENCIL_OP_END_RANGE = VK_STENCIL_OP_DECREMENT_AND_WRAP, + VK_STENCIL_OP_RANGE_SIZE = (VK_STENCIL_OP_DECREMENT_AND_WRAP - VK_STENCIL_OP_KEEP + 1), + VK_STENCIL_OP_MAX_ENUM = 0x7FFFFFFF +} VkStencilOp; + +typedef enum VkLogicOp { + VK_LOGIC_OP_CLEAR = 0, + VK_LOGIC_OP_AND = 1, + VK_LOGIC_OP_AND_REVERSE = 2, + VK_LOGIC_OP_COPY = 3, + VK_LOGIC_OP_AND_INVERTED = 4, + VK_LOGIC_OP_NO_OP = 5, + VK_LOGIC_OP_XOR = 6, + VK_LOGIC_OP_OR = 7, + VK_LOGIC_OP_NOR = 8, + VK_LOGIC_OP_EQUIVALENT = 9, + VK_LOGIC_OP_INVERT = 10, + VK_LOGIC_OP_OR_REVERSE = 11, + VK_LOGIC_OP_COPY_INVERTED = 12, + VK_LOGIC_OP_OR_INVERTED = 13, + VK_LOGIC_OP_NAND = 14, + VK_LOGIC_OP_SET = 15, + VK_LOGIC_OP_BEGIN_RANGE = VK_LOGIC_OP_CLEAR, + VK_LOGIC_OP_END_RANGE = VK_LOGIC_OP_SET, + VK_LOGIC_OP_RANGE_SIZE = (VK_LOGIC_OP_SET - VK_LOGIC_OP_CLEAR + 1), + VK_LOGIC_OP_MAX_ENUM = 0x7FFFFFFF +} VkLogicOp; + +typedef enum VkBlendFactor { + VK_BLEND_FACTOR_ZERO = 0, + VK_BLEND_FACTOR_ONE = 1, + VK_BLEND_FACTOR_SRC_COLOR = 2, + VK_BLEND_FACTOR_ONE_MINUS_SRC_COLOR = 3, + VK_BLEND_FACTOR_DST_COLOR = 4, + VK_BLEND_FACTOR_ONE_MINUS_DST_COLOR = 5, + VK_BLEND_FACTOR_SRC_ALPHA = 6, + VK_BLEND_FACTOR_ONE_MINUS_SRC_ALPHA = 7, + VK_BLEND_FACTOR_DST_ALPHA = 8, + VK_BLEND_FACTOR_ONE_MINUS_DST_ALPHA = 9, + VK_BLEND_FACTOR_CONSTANT_COLOR = 10, + VK_BLEND_FACTOR_ONE_MINUS_CONSTANT_COLOR = 11, + VK_BLEND_FACTOR_CONSTANT_ALPHA = 12, + VK_BLEND_FACTOR_ONE_MINUS_CONSTANT_ALPHA = 13, + VK_BLEND_FACTOR_SRC_ALPHA_SATURATE = 14, + VK_BLEND_FACTOR_SRC1_COLOR = 15, + VK_BLEND_FACTOR_ONE_MINUS_SRC1_COLOR = 16, + VK_BLEND_FACTOR_SRC1_ALPHA = 17, + VK_BLEND_FACTOR_ONE_MINUS_SRC1_ALPHA = 18, + VK_BLEND_FACTOR_BEGIN_RANGE = VK_BLEND_FACTOR_ZERO, + VK_BLEND_FACTOR_END_RANGE = VK_BLEND_FACTOR_ONE_MINUS_SRC1_ALPHA, + VK_BLEND_FACTOR_RANGE_SIZE = (VK_BLEND_FACTOR_ONE_MINUS_SRC1_ALPHA - VK_BLEND_FACTOR_ZERO + 1), + VK_BLEND_FACTOR_MAX_ENUM = 0x7FFFFFFF +} VkBlendFactor; + +typedef enum VkBlendOp { + VK_BLEND_OP_ADD = 0, + VK_BLEND_OP_SUBTRACT = 1, + VK_BLEND_OP_REVERSE_SUBTRACT = 2, + VK_BLEND_OP_MIN = 3, + VK_BLEND_OP_MAX = 4, + VK_BLEND_OP_ZERO_EXT = 1000148000, + VK_BLEND_OP_SRC_EXT = 1000148001, + VK_BLEND_OP_DST_EXT = 1000148002, + VK_BLEND_OP_SRC_OVER_EXT = 1000148003, + VK_BLEND_OP_DST_OVER_EXT = 1000148004, + VK_BLEND_OP_SRC_IN_EXT = 1000148005, + VK_BLEND_OP_DST_IN_EXT = 1000148006, + VK_BLEND_OP_SRC_OUT_EXT = 1000148007, + VK_BLEND_OP_DST_OUT_EXT = 1000148008, + VK_BLEND_OP_SRC_ATOP_EXT = 1000148009, + VK_BLEND_OP_DST_ATOP_EXT = 1000148010, + VK_BLEND_OP_XOR_EXT = 1000148011, + VK_BLEND_OP_MULTIPLY_EXT = 1000148012, + VK_BLEND_OP_SCREEN_EXT = 1000148013, + VK_BLEND_OP_OVERLAY_EXT = 1000148014, + VK_BLEND_OP_DARKEN_EXT = 1000148015, + VK_BLEND_OP_LIGHTEN_EXT = 1000148016, + VK_BLEND_OP_COLORDODGE_EXT = 1000148017, + VK_BLEND_OP_COLORBURN_EXT = 1000148018, + VK_BLEND_OP_HARDLIGHT_EXT = 1000148019, + VK_BLEND_OP_SOFTLIGHT_EXT = 1000148020, + VK_BLEND_OP_DIFFERENCE_EXT = 1000148021, + VK_BLEND_OP_EXCLUSION_EXT = 1000148022, + VK_BLEND_OP_INVERT_EXT = 1000148023, + VK_BLEND_OP_INVERT_RGB_EXT = 1000148024, + VK_BLEND_OP_LINEARDODGE_EXT = 1000148025, + VK_BLEND_OP_LINEARBURN_EXT = 1000148026, + VK_BLEND_OP_VIVIDLIGHT_EXT = 1000148027, + VK_BLEND_OP_LINEARLIGHT_EXT = 1000148028, + VK_BLEND_OP_PINLIGHT_EXT = 1000148029, + VK_BLEND_OP_HARDMIX_EXT = 1000148030, + VK_BLEND_OP_HSL_HUE_EXT = 1000148031, + VK_BLEND_OP_HSL_SATURATION_EXT = 1000148032, + VK_BLEND_OP_HSL_COLOR_EXT = 1000148033, + VK_BLEND_OP_HSL_LUMINOSITY_EXT = 1000148034, + VK_BLEND_OP_PLUS_EXT = 1000148035, + VK_BLEND_OP_PLUS_CLAMPED_EXT = 1000148036, + VK_BLEND_OP_PLUS_CLAMPED_ALPHA_EXT = 1000148037, + VK_BLEND_OP_PLUS_DARKER_EXT = 1000148038, + VK_BLEND_OP_MINUS_EXT = 1000148039, + VK_BLEND_OP_MINUS_CLAMPED_EXT = 1000148040, + VK_BLEND_OP_CONTRAST_EXT = 1000148041, + VK_BLEND_OP_INVERT_OVG_EXT = 1000148042, + VK_BLEND_OP_RED_EXT = 1000148043, + VK_BLEND_OP_GREEN_EXT = 1000148044, + VK_BLEND_OP_BLUE_EXT = 1000148045, + VK_BLEND_OP_BEGIN_RANGE = VK_BLEND_OP_ADD, + VK_BLEND_OP_END_RANGE = VK_BLEND_OP_MAX, + VK_BLEND_OP_RANGE_SIZE = (VK_BLEND_OP_MAX - VK_BLEND_OP_ADD + 1), + VK_BLEND_OP_MAX_ENUM = 0x7FFFFFFF +} VkBlendOp; + +typedef enum VkDynamicState { + VK_DYNAMIC_STATE_VIEWPORT = 0, + VK_DYNAMIC_STATE_SCISSOR = 1, + VK_DYNAMIC_STATE_LINE_WIDTH = 2, + VK_DYNAMIC_STATE_DEPTH_BIAS = 3, + VK_DYNAMIC_STATE_BLEND_CONSTANTS = 4, + VK_DYNAMIC_STATE_DEPTH_BOUNDS = 5, + VK_DYNAMIC_STATE_STENCIL_COMPARE_MASK = 6, + VK_DYNAMIC_STATE_STENCIL_WRITE_MASK = 7, + VK_DYNAMIC_STATE_STENCIL_REFERENCE = 8, + VK_DYNAMIC_STATE_VIEWPORT_W_SCALING_NV = 1000087000, + VK_DYNAMIC_STATE_DISCARD_RECTANGLE_EXT = 1000099000, + VK_DYNAMIC_STATE_SAMPLE_LOCATIONS_EXT = 1000143000, + VK_DYNAMIC_STATE_VIEWPORT_SHADING_RATE_PALETTE_NV = 1000164004, + VK_DYNAMIC_STATE_VIEWPORT_COARSE_SAMPLE_ORDER_NV = 1000164006, + VK_DYNAMIC_STATE_EXCLUSIVE_SCISSOR_NV = 1000205001, + VK_DYNAMIC_STATE_BEGIN_RANGE = VK_DYNAMIC_STATE_VIEWPORT, + VK_DYNAMIC_STATE_END_RANGE = VK_DYNAMIC_STATE_STENCIL_REFERENCE, + VK_DYNAMIC_STATE_RANGE_SIZE = (VK_DYNAMIC_STATE_STENCIL_REFERENCE - VK_DYNAMIC_STATE_VIEWPORT + 1), + VK_DYNAMIC_STATE_MAX_ENUM = 0x7FFFFFFF +} VkDynamicState; + +typedef enum VkFilter { + VK_FILTER_NEAREST = 0, + VK_FILTER_LINEAR = 1, + VK_FILTER_CUBIC_IMG = 1000015000, + VK_FILTER_BEGIN_RANGE = VK_FILTER_NEAREST, + VK_FILTER_END_RANGE = VK_FILTER_LINEAR, + VK_FILTER_RANGE_SIZE = (VK_FILTER_LINEAR - VK_FILTER_NEAREST + 1), + VK_FILTER_MAX_ENUM = 0x7FFFFFFF +} VkFilter; + +typedef enum VkSamplerMipmapMode { + VK_SAMPLER_MIPMAP_MODE_NEAREST = 0, + VK_SAMPLER_MIPMAP_MODE_LINEAR = 1, + VK_SAMPLER_MIPMAP_MODE_BEGIN_RANGE = VK_SAMPLER_MIPMAP_MODE_NEAREST, + VK_SAMPLER_MIPMAP_MODE_END_RANGE = VK_SAMPLER_MIPMAP_MODE_LINEAR, + VK_SAMPLER_MIPMAP_MODE_RANGE_SIZE = (VK_SAMPLER_MIPMAP_MODE_LINEAR - VK_SAMPLER_MIPMAP_MODE_NEAREST + 1), + VK_SAMPLER_MIPMAP_MODE_MAX_ENUM = 0x7FFFFFFF +} VkSamplerMipmapMode; + +typedef enum VkSamplerAddressMode { + VK_SAMPLER_ADDRESS_MODE_REPEAT = 0, + VK_SAMPLER_ADDRESS_MODE_MIRRORED_REPEAT = 1, + VK_SAMPLER_ADDRESS_MODE_CLAMP_TO_EDGE = 2, + VK_SAMPLER_ADDRESS_MODE_CLAMP_TO_BORDER = 3, + VK_SAMPLER_ADDRESS_MODE_MIRROR_CLAMP_TO_EDGE = 4, + VK_SAMPLER_ADDRESS_MODE_BEGIN_RANGE = VK_SAMPLER_ADDRESS_MODE_REPEAT, + VK_SAMPLER_ADDRESS_MODE_END_RANGE = VK_SAMPLER_ADDRESS_MODE_CLAMP_TO_BORDER, + VK_SAMPLER_ADDRESS_MODE_RANGE_SIZE = (VK_SAMPLER_ADDRESS_MODE_CLAMP_TO_BORDER - VK_SAMPLER_ADDRESS_MODE_REPEAT + 1), + VK_SAMPLER_ADDRESS_MODE_MAX_ENUM = 0x7FFFFFFF +} VkSamplerAddressMode; + +typedef enum VkBorderColor { + VK_BORDER_COLOR_FLOAT_TRANSPARENT_BLACK = 0, + VK_BORDER_COLOR_INT_TRANSPARENT_BLACK = 1, + VK_BORDER_COLOR_FLOAT_OPAQUE_BLACK = 2, + VK_BORDER_COLOR_INT_OPAQUE_BLACK = 3, + VK_BORDER_COLOR_FLOAT_OPAQUE_WHITE = 4, + VK_BORDER_COLOR_INT_OPAQUE_WHITE = 5, + VK_BORDER_COLOR_BEGIN_RANGE = VK_BORDER_COLOR_FLOAT_TRANSPARENT_BLACK, + VK_BORDER_COLOR_END_RANGE = VK_BORDER_COLOR_INT_OPAQUE_WHITE, + VK_BORDER_COLOR_RANGE_SIZE = (VK_BORDER_COLOR_INT_OPAQUE_WHITE - VK_BORDER_COLOR_FLOAT_TRANSPARENT_BLACK + 1), + VK_BORDER_COLOR_MAX_ENUM = 0x7FFFFFFF +} VkBorderColor; + +typedef enum VkDescriptorType { + VK_DESCRIPTOR_TYPE_SAMPLER = 0, + VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER = 1, + VK_DESCRIPTOR_TYPE_SAMPLED_IMAGE = 2, + VK_DESCRIPTOR_TYPE_STORAGE_IMAGE = 3, + VK_DESCRIPTOR_TYPE_UNIFORM_TEXEL_BUFFER = 4, + VK_DESCRIPTOR_TYPE_STORAGE_TEXEL_BUFFER = 5, + VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER = 6, + VK_DESCRIPTOR_TYPE_STORAGE_BUFFER = 7, + VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER_DYNAMIC = 8, + VK_DESCRIPTOR_TYPE_STORAGE_BUFFER_DYNAMIC = 9, + VK_DESCRIPTOR_TYPE_INPUT_ATTACHMENT = 10, + VK_DESCRIPTOR_TYPE_INLINE_UNIFORM_BLOCK_EXT = 1000138000, + VK_DESCRIPTOR_TYPE_ACCELERATION_STRUCTURE_NV = 1000165000, + VK_DESCRIPTOR_TYPE_BEGIN_RANGE = VK_DESCRIPTOR_TYPE_SAMPLER, + VK_DESCRIPTOR_TYPE_END_RANGE = VK_DESCRIPTOR_TYPE_INPUT_ATTACHMENT, + VK_DESCRIPTOR_TYPE_RANGE_SIZE = (VK_DESCRIPTOR_TYPE_INPUT_ATTACHMENT - VK_DESCRIPTOR_TYPE_SAMPLER + 1), + VK_DESCRIPTOR_TYPE_MAX_ENUM = 0x7FFFFFFF +} VkDescriptorType; + +typedef enum VkAttachmentLoadOp { + VK_ATTACHMENT_LOAD_OP_LOAD = 0, + VK_ATTACHMENT_LOAD_OP_CLEAR = 1, + VK_ATTACHMENT_LOAD_OP_DONT_CARE = 2, + VK_ATTACHMENT_LOAD_OP_BEGIN_RANGE = VK_ATTACHMENT_LOAD_OP_LOAD, + VK_ATTACHMENT_LOAD_OP_END_RANGE = VK_ATTACHMENT_LOAD_OP_DONT_CARE, + VK_ATTACHMENT_LOAD_OP_RANGE_SIZE = (VK_ATTACHMENT_LOAD_OP_DONT_CARE - VK_ATTACHMENT_LOAD_OP_LOAD + 1), + VK_ATTACHMENT_LOAD_OP_MAX_ENUM = 0x7FFFFFFF +} VkAttachmentLoadOp; + +typedef enum VkAttachmentStoreOp { + VK_ATTACHMENT_STORE_OP_STORE = 0, + VK_ATTACHMENT_STORE_OP_DONT_CARE = 1, + VK_ATTACHMENT_STORE_OP_BEGIN_RANGE = VK_ATTACHMENT_STORE_OP_STORE, + VK_ATTACHMENT_STORE_OP_END_RANGE = VK_ATTACHMENT_STORE_OP_DONT_CARE, + VK_ATTACHMENT_STORE_OP_RANGE_SIZE = (VK_ATTACHMENT_STORE_OP_DONT_CARE - VK_ATTACHMENT_STORE_OP_STORE + 1), + VK_ATTACHMENT_STORE_OP_MAX_ENUM = 0x7FFFFFFF +} VkAttachmentStoreOp; + +typedef enum VkPipelineBindPoint { + VK_PIPELINE_BIND_POINT_GRAPHICS = 0, + VK_PIPELINE_BIND_POINT_COMPUTE = 1, + VK_PIPELINE_BIND_POINT_RAY_TRACING_NV = 1000165000, + VK_PIPELINE_BIND_POINT_BEGIN_RANGE = VK_PIPELINE_BIND_POINT_GRAPHICS, + VK_PIPELINE_BIND_POINT_END_RANGE = VK_PIPELINE_BIND_POINT_COMPUTE, + VK_PIPELINE_BIND_POINT_RANGE_SIZE = (VK_PIPELINE_BIND_POINT_COMPUTE - VK_PIPELINE_BIND_POINT_GRAPHICS + 1), + VK_PIPELINE_BIND_POINT_MAX_ENUM = 0x7FFFFFFF +} VkPipelineBindPoint; + +typedef enum VkCommandBufferLevel { + VK_COMMAND_BUFFER_LEVEL_PRIMARY = 0, + VK_COMMAND_BUFFER_LEVEL_SECONDARY = 1, + VK_COMMAND_BUFFER_LEVEL_BEGIN_RANGE = VK_COMMAND_BUFFER_LEVEL_PRIMARY, + VK_COMMAND_BUFFER_LEVEL_END_RANGE = VK_COMMAND_BUFFER_LEVEL_SECONDARY, + VK_COMMAND_BUFFER_LEVEL_RANGE_SIZE = (VK_COMMAND_BUFFER_LEVEL_SECONDARY - VK_COMMAND_BUFFER_LEVEL_PRIMARY + 1), + VK_COMMAND_BUFFER_LEVEL_MAX_ENUM = 0x7FFFFFFF +} VkCommandBufferLevel; + +typedef enum VkIndexType { + VK_INDEX_TYPE_UINT16 = 0, + VK_INDEX_TYPE_UINT32 = 1, + VK_INDEX_TYPE_NONE_NV = 1000165000, + VK_INDEX_TYPE_BEGIN_RANGE = VK_INDEX_TYPE_UINT16, + VK_INDEX_TYPE_END_RANGE = VK_INDEX_TYPE_UINT32, + VK_INDEX_TYPE_RANGE_SIZE = (VK_INDEX_TYPE_UINT32 - VK_INDEX_TYPE_UINT16 + 1), + VK_INDEX_TYPE_MAX_ENUM = 0x7FFFFFFF +} VkIndexType; + +typedef enum VkSubpassContents { + VK_SUBPASS_CONTENTS_INLINE = 0, + VK_SUBPASS_CONTENTS_SECONDARY_COMMAND_BUFFERS = 1, + VK_SUBPASS_CONTENTS_BEGIN_RANGE = VK_SUBPASS_CONTENTS_INLINE, + VK_SUBPASS_CONTENTS_END_RANGE = VK_SUBPASS_CONTENTS_SECONDARY_COMMAND_BUFFERS, + VK_SUBPASS_CONTENTS_RANGE_SIZE = (VK_SUBPASS_CONTENTS_SECONDARY_COMMAND_BUFFERS - VK_SUBPASS_CONTENTS_INLINE + 1), + VK_SUBPASS_CONTENTS_MAX_ENUM = 0x7FFFFFFF +} VkSubpassContents; + +typedef enum VkObjectType { + VK_OBJECT_TYPE_UNKNOWN = 0, + VK_OBJECT_TYPE_INSTANCE = 1, + VK_OBJECT_TYPE_PHYSICAL_DEVICE = 2, + VK_OBJECT_TYPE_DEVICE = 3, + VK_OBJECT_TYPE_QUEUE = 4, + VK_OBJECT_TYPE_SEMAPHORE = 5, + VK_OBJECT_TYPE_COMMAND_BUFFER = 6, + VK_OBJECT_TYPE_FENCE = 7, + VK_OBJECT_TYPE_DEVICE_MEMORY = 8, + VK_OBJECT_TYPE_BUFFER = 9, + VK_OBJECT_TYPE_IMAGE = 10, + VK_OBJECT_TYPE_EVENT = 11, + VK_OBJECT_TYPE_QUERY_POOL = 12, + VK_OBJECT_TYPE_BUFFER_VIEW = 13, + VK_OBJECT_TYPE_IMAGE_VIEW = 14, + VK_OBJECT_TYPE_SHADER_MODULE = 15, + VK_OBJECT_TYPE_PIPELINE_CACHE = 16, + VK_OBJECT_TYPE_PIPELINE_LAYOUT = 17, + VK_OBJECT_TYPE_RENDER_PASS = 18, + VK_OBJECT_TYPE_PIPELINE = 19, + VK_OBJECT_TYPE_DESCRIPTOR_SET_LAYOUT = 20, + VK_OBJECT_TYPE_SAMPLER = 21, + VK_OBJECT_TYPE_DESCRIPTOR_POOL = 22, + VK_OBJECT_TYPE_DESCRIPTOR_SET = 23, + VK_OBJECT_TYPE_FRAMEBUFFER = 24, + VK_OBJECT_TYPE_COMMAND_POOL = 25, + VK_OBJECT_TYPE_SAMPLER_YCBCR_CONVERSION = 1000156000, + VK_OBJECT_TYPE_DESCRIPTOR_UPDATE_TEMPLATE = 1000085000, + VK_OBJECT_TYPE_SURFACE_KHR = 1000000000, + VK_OBJECT_TYPE_SWAPCHAIN_KHR = 1000001000, + VK_OBJECT_TYPE_DISPLAY_KHR = 1000002000, + VK_OBJECT_TYPE_DISPLAY_MODE_KHR = 1000002001, + VK_OBJECT_TYPE_DEBUG_REPORT_CALLBACK_EXT = 1000011000, + VK_OBJECT_TYPE_OBJECT_TABLE_NVX = 1000086000, + VK_OBJECT_TYPE_INDIRECT_COMMANDS_LAYOUT_NVX = 1000086001, + VK_OBJECT_TYPE_DEBUG_UTILS_MESSENGER_EXT = 1000128000, + VK_OBJECT_TYPE_VALIDATION_CACHE_EXT = 1000160000, + VK_OBJECT_TYPE_ACCELERATION_STRUCTURE_NV = 1000165000, + VK_OBJECT_TYPE_DESCRIPTOR_UPDATE_TEMPLATE_KHR = VK_OBJECT_TYPE_DESCRIPTOR_UPDATE_TEMPLATE, + VK_OBJECT_TYPE_SAMPLER_YCBCR_CONVERSION_KHR = VK_OBJECT_TYPE_SAMPLER_YCBCR_CONVERSION, + VK_OBJECT_TYPE_BEGIN_RANGE = VK_OBJECT_TYPE_UNKNOWN, + VK_OBJECT_TYPE_END_RANGE = VK_OBJECT_TYPE_COMMAND_POOL, + VK_OBJECT_TYPE_RANGE_SIZE = (VK_OBJECT_TYPE_COMMAND_POOL - VK_OBJECT_TYPE_UNKNOWN + 1), + VK_OBJECT_TYPE_MAX_ENUM = 0x7FFFFFFF +} VkObjectType; + +typedef enum VkVendorId { + VK_VENDOR_ID_VIV = 0x10001, + VK_VENDOR_ID_VSI = 0x10002, + VK_VENDOR_ID_KAZAN = 0x10003, + VK_VENDOR_ID_BEGIN_RANGE = VK_VENDOR_ID_VIV, + VK_VENDOR_ID_END_RANGE = VK_VENDOR_ID_KAZAN, + VK_VENDOR_ID_RANGE_SIZE = (VK_VENDOR_ID_KAZAN - VK_VENDOR_ID_VIV + 1), + VK_VENDOR_ID_MAX_ENUM = 0x7FFFFFFF +} VkVendorId; + +typedef VkFlags VkInstanceCreateFlags; + +typedef enum VkFormatFeatureFlagBits { + VK_FORMAT_FEATURE_SAMPLED_IMAGE_BIT = 0x00000001, + VK_FORMAT_FEATURE_STORAGE_IMAGE_BIT = 0x00000002, + VK_FORMAT_FEATURE_STORAGE_IMAGE_ATOMIC_BIT = 0x00000004, + VK_FORMAT_FEATURE_UNIFORM_TEXEL_BUFFER_BIT = 0x00000008, + VK_FORMAT_FEATURE_STORAGE_TEXEL_BUFFER_BIT = 0x00000010, + VK_FORMAT_FEATURE_STORAGE_TEXEL_BUFFER_ATOMIC_BIT = 0x00000020, + VK_FORMAT_FEATURE_VERTEX_BUFFER_BIT = 0x00000040, + VK_FORMAT_FEATURE_COLOR_ATTACHMENT_BIT = 0x00000080, + VK_FORMAT_FEATURE_COLOR_ATTACHMENT_BLEND_BIT = 0x00000100, + VK_FORMAT_FEATURE_DEPTH_STENCIL_ATTACHMENT_BIT = 0x00000200, + VK_FORMAT_FEATURE_BLIT_SRC_BIT = 0x00000400, + VK_FORMAT_FEATURE_BLIT_DST_BIT = 0x00000800, + VK_FORMAT_FEATURE_SAMPLED_IMAGE_FILTER_LINEAR_BIT = 0x00001000, + VK_FORMAT_FEATURE_TRANSFER_SRC_BIT = 0x00004000, + VK_FORMAT_FEATURE_TRANSFER_DST_BIT = 0x00008000, + VK_FORMAT_FEATURE_MIDPOINT_CHROMA_SAMPLES_BIT = 0x00020000, + VK_FORMAT_FEATURE_SAMPLED_IMAGE_YCBCR_CONVERSION_LINEAR_FILTER_BIT = 0x00040000, + VK_FORMAT_FEATURE_SAMPLED_IMAGE_YCBCR_CONVERSION_SEPARATE_RECONSTRUCTION_FILTER_BIT = 0x00080000, + VK_FORMAT_FEATURE_SAMPLED_IMAGE_YCBCR_CONVERSION_CHROMA_RECONSTRUCTION_EXPLICIT_BIT = 0x00100000, + VK_FORMAT_FEATURE_SAMPLED_IMAGE_YCBCR_CONVERSION_CHROMA_RECONSTRUCTION_EXPLICIT_FORCEABLE_BIT = 0x00200000, + VK_FORMAT_FEATURE_DISJOINT_BIT = 0x00400000, + VK_FORMAT_FEATURE_COSITED_CHROMA_SAMPLES_BIT = 0x00800000, + VK_FORMAT_FEATURE_SAMPLED_IMAGE_FILTER_CUBIC_BIT_IMG = 0x00002000, + VK_FORMAT_FEATURE_SAMPLED_IMAGE_FILTER_MINMAX_BIT_EXT = 0x00010000, + VK_FORMAT_FEATURE_TRANSFER_SRC_BIT_KHR = VK_FORMAT_FEATURE_TRANSFER_SRC_BIT, + VK_FORMAT_FEATURE_TRANSFER_DST_BIT_KHR = VK_FORMAT_FEATURE_TRANSFER_DST_BIT, + VK_FORMAT_FEATURE_MIDPOINT_CHROMA_SAMPLES_BIT_KHR = VK_FORMAT_FEATURE_MIDPOINT_CHROMA_SAMPLES_BIT, + VK_FORMAT_FEATURE_SAMPLED_IMAGE_YCBCR_CONVERSION_LINEAR_FILTER_BIT_KHR = VK_FORMAT_FEATURE_SAMPLED_IMAGE_YCBCR_CONVERSION_LINEAR_FILTER_BIT, + VK_FORMAT_FEATURE_SAMPLED_IMAGE_YCBCR_CONVERSION_SEPARATE_RECONSTRUCTION_FILTER_BIT_KHR = VK_FORMAT_FEATURE_SAMPLED_IMAGE_YCBCR_CONVERSION_SEPARATE_RECONSTRUCTION_FILTER_BIT, + VK_FORMAT_FEATURE_SAMPLED_IMAGE_YCBCR_CONVERSION_CHROMA_RECONSTRUCTION_EXPLICIT_BIT_KHR = VK_FORMAT_FEATURE_SAMPLED_IMAGE_YCBCR_CONVERSION_CHROMA_RECONSTRUCTION_EXPLICIT_BIT, + VK_FORMAT_FEATURE_SAMPLED_IMAGE_YCBCR_CONVERSION_CHROMA_RECONSTRUCTION_EXPLICIT_FORCEABLE_BIT_KHR = VK_FORMAT_FEATURE_SAMPLED_IMAGE_YCBCR_CONVERSION_CHROMA_RECONSTRUCTION_EXPLICIT_FORCEABLE_BIT, + VK_FORMAT_FEATURE_DISJOINT_BIT_KHR = VK_FORMAT_FEATURE_DISJOINT_BIT, + VK_FORMAT_FEATURE_COSITED_CHROMA_SAMPLES_BIT_KHR = VK_FORMAT_FEATURE_COSITED_CHROMA_SAMPLES_BIT, + VK_FORMAT_FEATURE_FLAG_BITS_MAX_ENUM = 0x7FFFFFFF +} VkFormatFeatureFlagBits; +typedef VkFlags VkFormatFeatureFlags; + +typedef enum VkImageUsageFlagBits { + VK_IMAGE_USAGE_TRANSFER_SRC_BIT = 0x00000001, + VK_IMAGE_USAGE_TRANSFER_DST_BIT = 0x00000002, + VK_IMAGE_USAGE_SAMPLED_BIT = 0x00000004, + VK_IMAGE_USAGE_STORAGE_BIT = 0x00000008, + VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT = 0x00000010, + VK_IMAGE_USAGE_DEPTH_STENCIL_ATTACHMENT_BIT = 0x00000020, + VK_IMAGE_USAGE_TRANSIENT_ATTACHMENT_BIT = 0x00000040, + VK_IMAGE_USAGE_INPUT_ATTACHMENT_BIT = 0x00000080, + VK_IMAGE_USAGE_SHADING_RATE_IMAGE_BIT_NV = 0x00000100, + VK_IMAGE_USAGE_FLAG_BITS_MAX_ENUM = 0x7FFFFFFF +} VkImageUsageFlagBits; +typedef VkFlags VkImageUsageFlags; + +typedef enum VkImageCreateFlagBits { + VK_IMAGE_CREATE_SPARSE_BINDING_BIT = 0x00000001, + VK_IMAGE_CREATE_SPARSE_RESIDENCY_BIT = 0x00000002, + VK_IMAGE_CREATE_SPARSE_ALIASED_BIT = 0x00000004, + VK_IMAGE_CREATE_MUTABLE_FORMAT_BIT = 0x00000008, + VK_IMAGE_CREATE_CUBE_COMPATIBLE_BIT = 0x00000010, + VK_IMAGE_CREATE_ALIAS_BIT = 0x00000400, + VK_IMAGE_CREATE_SPLIT_INSTANCE_BIND_REGIONS_BIT = 0x00000040, + VK_IMAGE_CREATE_2D_ARRAY_COMPATIBLE_BIT = 0x00000020, + VK_IMAGE_CREATE_BLOCK_TEXEL_VIEW_COMPATIBLE_BIT = 0x00000080, + VK_IMAGE_CREATE_EXTENDED_USAGE_BIT = 0x00000100, + VK_IMAGE_CREATE_PROTECTED_BIT = 0x00000800, + VK_IMAGE_CREATE_DISJOINT_BIT = 0x00000200, + VK_IMAGE_CREATE_CORNER_SAMPLED_BIT_NV = 0x00002000, + VK_IMAGE_CREATE_SAMPLE_LOCATIONS_COMPATIBLE_DEPTH_BIT_EXT = 0x00001000, + VK_IMAGE_CREATE_SPLIT_INSTANCE_BIND_REGIONS_BIT_KHR = VK_IMAGE_CREATE_SPLIT_INSTANCE_BIND_REGIONS_BIT, + VK_IMAGE_CREATE_2D_ARRAY_COMPATIBLE_BIT_KHR = VK_IMAGE_CREATE_2D_ARRAY_COMPATIBLE_BIT, + VK_IMAGE_CREATE_BLOCK_TEXEL_VIEW_COMPATIBLE_BIT_KHR = VK_IMAGE_CREATE_BLOCK_TEXEL_VIEW_COMPATIBLE_BIT, + VK_IMAGE_CREATE_EXTENDED_USAGE_BIT_KHR = VK_IMAGE_CREATE_EXTENDED_USAGE_BIT, + VK_IMAGE_CREATE_DISJOINT_BIT_KHR = VK_IMAGE_CREATE_DISJOINT_BIT, + VK_IMAGE_CREATE_ALIAS_BIT_KHR = VK_IMAGE_CREATE_ALIAS_BIT, + VK_IMAGE_CREATE_FLAG_BITS_MAX_ENUM = 0x7FFFFFFF +} VkImageCreateFlagBits; +typedef VkFlags VkImageCreateFlags; + +typedef enum VkSampleCountFlagBits { + VK_SAMPLE_COUNT_1_BIT = 0x00000001, + VK_SAMPLE_COUNT_2_BIT = 0x00000002, + VK_SAMPLE_COUNT_4_BIT = 0x00000004, + VK_SAMPLE_COUNT_8_BIT = 0x00000008, + VK_SAMPLE_COUNT_16_BIT = 0x00000010, + VK_SAMPLE_COUNT_32_BIT = 0x00000020, + VK_SAMPLE_COUNT_64_BIT = 0x00000040, + VK_SAMPLE_COUNT_FLAG_BITS_MAX_ENUM = 0x7FFFFFFF +} VkSampleCountFlagBits; +typedef VkFlags VkSampleCountFlags; + +typedef enum VkQueueFlagBits { + VK_QUEUE_GRAPHICS_BIT = 0x00000001, + VK_QUEUE_COMPUTE_BIT = 0x00000002, + VK_QUEUE_TRANSFER_BIT = 0x00000004, + VK_QUEUE_SPARSE_BINDING_BIT = 0x00000008, + VK_QUEUE_PROTECTED_BIT = 0x00000010, + VK_QUEUE_FLAG_BITS_MAX_ENUM = 0x7FFFFFFF +} VkQueueFlagBits; +typedef VkFlags VkQueueFlags; + +typedef enum VkMemoryPropertyFlagBits { + VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT = 0x00000001, + VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT = 0x00000002, + VK_MEMORY_PROPERTY_HOST_COHERENT_BIT = 0x00000004, + VK_MEMORY_PROPERTY_HOST_CACHED_BIT = 0x00000008, + VK_MEMORY_PROPERTY_LAZILY_ALLOCATED_BIT = 0x00000010, + VK_MEMORY_PROPERTY_PROTECTED_BIT = 0x00000020, + VK_MEMORY_PROPERTY_FLAG_BITS_MAX_ENUM = 0x7FFFFFFF +} VkMemoryPropertyFlagBits; +typedef VkFlags VkMemoryPropertyFlags; + +typedef enum VkMemoryHeapFlagBits { + VK_MEMORY_HEAP_DEVICE_LOCAL_BIT = 0x00000001, + VK_MEMORY_HEAP_MULTI_INSTANCE_BIT = 0x00000002, + VK_MEMORY_HEAP_MULTI_INSTANCE_BIT_KHR = VK_MEMORY_HEAP_MULTI_INSTANCE_BIT, + VK_MEMORY_HEAP_FLAG_BITS_MAX_ENUM = 0x7FFFFFFF +} VkMemoryHeapFlagBits; +typedef VkFlags VkMemoryHeapFlags; +typedef VkFlags VkDeviceCreateFlags; + +typedef enum VkDeviceQueueCreateFlagBits { + VK_DEVICE_QUEUE_CREATE_PROTECTED_BIT = 0x00000001, + VK_DEVICE_QUEUE_CREATE_FLAG_BITS_MAX_ENUM = 0x7FFFFFFF +} VkDeviceQueueCreateFlagBits; +typedef VkFlags VkDeviceQueueCreateFlags; + +typedef enum VkPipelineStageFlagBits { + VK_PIPELINE_STAGE_TOP_OF_PIPE_BIT = 0x00000001, + VK_PIPELINE_STAGE_DRAW_INDIRECT_BIT = 0x00000002, + VK_PIPELINE_STAGE_VERTEX_INPUT_BIT = 0x00000004, + VK_PIPELINE_STAGE_VERTEX_SHADER_BIT = 0x00000008, + VK_PIPELINE_STAGE_TESSELLATION_CONTROL_SHADER_BIT = 0x00000010, + VK_PIPELINE_STAGE_TESSELLATION_EVALUATION_SHADER_BIT = 0x00000020, + VK_PIPELINE_STAGE_GEOMETRY_SHADER_BIT = 0x00000040, + VK_PIPELINE_STAGE_FRAGMENT_SHADER_BIT = 0x00000080, + VK_PIPELINE_STAGE_EARLY_FRAGMENT_TESTS_BIT = 0x00000100, + VK_PIPELINE_STAGE_LATE_FRAGMENT_TESTS_BIT = 0x00000200, + VK_PIPELINE_STAGE_COLOR_ATTACHMENT_OUTPUT_BIT = 0x00000400, + VK_PIPELINE_STAGE_COMPUTE_SHADER_BIT = 0x00000800, + VK_PIPELINE_STAGE_TRANSFER_BIT = 0x00001000, + VK_PIPELINE_STAGE_BOTTOM_OF_PIPE_BIT = 0x00002000, + VK_PIPELINE_STAGE_HOST_BIT = 0x00004000, + VK_PIPELINE_STAGE_ALL_GRAPHICS_BIT = 0x00008000, + VK_PIPELINE_STAGE_ALL_COMMANDS_BIT = 0x00010000, + VK_PIPELINE_STAGE_TRANSFORM_FEEDBACK_BIT_EXT = 0x01000000, + VK_PIPELINE_STAGE_CONDITIONAL_RENDERING_BIT_EXT = 0x00040000, + VK_PIPELINE_STAGE_COMMAND_PROCESS_BIT_NVX = 0x00020000, + VK_PIPELINE_STAGE_SHADING_RATE_IMAGE_BIT_NV = 0x00400000, + VK_PIPELINE_STAGE_RAY_TRACING_SHADER_BIT_NV = 0x00200000, + VK_PIPELINE_STAGE_ACCELERATION_STRUCTURE_BUILD_BIT_NV = 0x02000000, + VK_PIPELINE_STAGE_TASK_SHADER_BIT_NV = 0x00080000, + VK_PIPELINE_STAGE_MESH_SHADER_BIT_NV = 0x00100000, + VK_PIPELINE_STAGE_FLAG_BITS_MAX_ENUM = 0x7FFFFFFF +} VkPipelineStageFlagBits; +typedef VkFlags VkPipelineStageFlags; +typedef VkFlags VkMemoryMapFlags; + +typedef enum VkImageAspectFlagBits { + VK_IMAGE_ASPECT_COLOR_BIT = 0x00000001, + VK_IMAGE_ASPECT_DEPTH_BIT = 0x00000002, + VK_IMAGE_ASPECT_STENCIL_BIT = 0x00000004, + VK_IMAGE_ASPECT_METADATA_BIT = 0x00000008, + VK_IMAGE_ASPECT_PLANE_0_BIT = 0x00000010, + VK_IMAGE_ASPECT_PLANE_1_BIT = 0x00000020, + VK_IMAGE_ASPECT_PLANE_2_BIT = 0x00000040, + VK_IMAGE_ASPECT_MEMORY_PLANE_0_BIT_EXT = 0x00000080, + VK_IMAGE_ASPECT_MEMORY_PLANE_1_BIT_EXT = 0x00000100, + VK_IMAGE_ASPECT_MEMORY_PLANE_2_BIT_EXT = 0x00000200, + VK_IMAGE_ASPECT_MEMORY_PLANE_3_BIT_EXT = 0x00000400, + VK_IMAGE_ASPECT_PLANE_0_BIT_KHR = VK_IMAGE_ASPECT_PLANE_0_BIT, + VK_IMAGE_ASPECT_PLANE_1_BIT_KHR = VK_IMAGE_ASPECT_PLANE_1_BIT, + VK_IMAGE_ASPECT_PLANE_2_BIT_KHR = VK_IMAGE_ASPECT_PLANE_2_BIT, + VK_IMAGE_ASPECT_FLAG_BITS_MAX_ENUM = 0x7FFFFFFF +} VkImageAspectFlagBits; +typedef VkFlags VkImageAspectFlags; + +typedef enum VkSparseImageFormatFlagBits { + VK_SPARSE_IMAGE_FORMAT_SINGLE_MIPTAIL_BIT = 0x00000001, + VK_SPARSE_IMAGE_FORMAT_ALIGNED_MIP_SIZE_BIT = 0x00000002, + VK_SPARSE_IMAGE_FORMAT_NONSTANDARD_BLOCK_SIZE_BIT = 0x00000004, + VK_SPARSE_IMAGE_FORMAT_FLAG_BITS_MAX_ENUM = 0x7FFFFFFF +} VkSparseImageFormatFlagBits; +typedef VkFlags VkSparseImageFormatFlags; + +typedef enum VkSparseMemoryBindFlagBits { + VK_SPARSE_MEMORY_BIND_METADATA_BIT = 0x00000001, + VK_SPARSE_MEMORY_BIND_FLAG_BITS_MAX_ENUM = 0x7FFFFFFF +} VkSparseMemoryBindFlagBits; +typedef VkFlags VkSparseMemoryBindFlags; + +typedef enum VkFenceCreateFlagBits { + VK_FENCE_CREATE_SIGNALED_BIT = 0x00000001, + VK_FENCE_CREATE_FLAG_BITS_MAX_ENUM = 0x7FFFFFFF +} VkFenceCreateFlagBits; +typedef VkFlags VkFenceCreateFlags; +typedef VkFlags VkSemaphoreCreateFlags; +typedef VkFlags VkEventCreateFlags; +typedef VkFlags VkQueryPoolCreateFlags; + +typedef enum VkQueryPipelineStatisticFlagBits { + VK_QUERY_PIPELINE_STATISTIC_INPUT_ASSEMBLY_VERTICES_BIT = 0x00000001, + VK_QUERY_PIPELINE_STATISTIC_INPUT_ASSEMBLY_PRIMITIVES_BIT = 0x00000002, + VK_QUERY_PIPELINE_STATISTIC_VERTEX_SHADER_INVOCATIONS_BIT = 0x00000004, + VK_QUERY_PIPELINE_STATISTIC_GEOMETRY_SHADER_INVOCATIONS_BIT = 0x00000008, + VK_QUERY_PIPELINE_STATISTIC_GEOMETRY_SHADER_PRIMITIVES_BIT = 0x00000010, + VK_QUERY_PIPELINE_STATISTIC_CLIPPING_INVOCATIONS_BIT = 0x00000020, + VK_QUERY_PIPELINE_STATISTIC_CLIPPING_PRIMITIVES_BIT = 0x00000040, + VK_QUERY_PIPELINE_STATISTIC_FRAGMENT_SHADER_INVOCATIONS_BIT = 0x00000080, + VK_QUERY_PIPELINE_STATISTIC_TESSELLATION_CONTROL_SHADER_PATCHES_BIT = 0x00000100, + VK_QUERY_PIPELINE_STATISTIC_TESSELLATION_EVALUATION_SHADER_INVOCATIONS_BIT = 0x00000200, + VK_QUERY_PIPELINE_STATISTIC_COMPUTE_SHADER_INVOCATIONS_BIT = 0x00000400, + VK_QUERY_PIPELINE_STATISTIC_FLAG_BITS_MAX_ENUM = 0x7FFFFFFF +} VkQueryPipelineStatisticFlagBits; +typedef VkFlags VkQueryPipelineStatisticFlags; + +typedef enum VkQueryResultFlagBits { + VK_QUERY_RESULT_64_BIT = 0x00000001, + VK_QUERY_RESULT_WAIT_BIT = 0x00000002, + VK_QUERY_RESULT_WITH_AVAILABILITY_BIT = 0x00000004, + VK_QUERY_RESULT_PARTIAL_BIT = 0x00000008, + VK_QUERY_RESULT_FLAG_BITS_MAX_ENUM = 0x7FFFFFFF +} VkQueryResultFlagBits; +typedef VkFlags VkQueryResultFlags; + +typedef enum VkBufferCreateFlagBits { + VK_BUFFER_CREATE_SPARSE_BINDING_BIT = 0x00000001, + VK_BUFFER_CREATE_SPARSE_RESIDENCY_BIT = 0x00000002, + VK_BUFFER_CREATE_SPARSE_ALIASED_BIT = 0x00000004, + VK_BUFFER_CREATE_PROTECTED_BIT = 0x00000008, + VK_BUFFER_CREATE_FLAG_BITS_MAX_ENUM = 0x7FFFFFFF +} VkBufferCreateFlagBits; +typedef VkFlags VkBufferCreateFlags; + +typedef enum VkBufferUsageFlagBits { + VK_BUFFER_USAGE_TRANSFER_SRC_BIT = 0x00000001, + VK_BUFFER_USAGE_TRANSFER_DST_BIT = 0x00000002, + VK_BUFFER_USAGE_UNIFORM_TEXEL_BUFFER_BIT = 0x00000004, + VK_BUFFER_USAGE_STORAGE_TEXEL_BUFFER_BIT = 0x00000008, + VK_BUFFER_USAGE_UNIFORM_BUFFER_BIT = 0x00000010, + VK_BUFFER_USAGE_STORAGE_BUFFER_BIT = 0x00000020, + VK_BUFFER_USAGE_INDEX_BUFFER_BIT = 0x00000040, + VK_BUFFER_USAGE_VERTEX_BUFFER_BIT = 0x00000080, + VK_BUFFER_USAGE_INDIRECT_BUFFER_BIT = 0x00000100, + VK_BUFFER_USAGE_TRANSFORM_FEEDBACK_BUFFER_BIT_EXT = 0x00000800, + VK_BUFFER_USAGE_TRANSFORM_FEEDBACK_COUNTER_BUFFER_BIT_EXT = 0x00001000, + VK_BUFFER_USAGE_CONDITIONAL_RENDERING_BIT_EXT = 0x00000200, + VK_BUFFER_USAGE_RAY_TRACING_BIT_NV = 0x00000400, + VK_BUFFER_USAGE_FLAG_BITS_MAX_ENUM = 0x7FFFFFFF +} VkBufferUsageFlagBits; +typedef VkFlags VkBufferUsageFlags; +typedef VkFlags VkBufferViewCreateFlags; +typedef VkFlags VkImageViewCreateFlags; +typedef VkFlags VkShaderModuleCreateFlags; +typedef VkFlags VkPipelineCacheCreateFlags; + +typedef enum VkPipelineCreateFlagBits { + VK_PIPELINE_CREATE_DISABLE_OPTIMIZATION_BIT = 0x00000001, + VK_PIPELINE_CREATE_ALLOW_DERIVATIVES_BIT = 0x00000002, + VK_PIPELINE_CREATE_DERIVATIVE_BIT = 0x00000004, + VK_PIPELINE_CREATE_VIEW_INDEX_FROM_DEVICE_INDEX_BIT = 0x00000008, + VK_PIPELINE_CREATE_DISPATCH_BASE = 0x00000010, + VK_PIPELINE_CREATE_DEFER_COMPILE_BIT_NV = 0x00000020, + VK_PIPELINE_CREATE_VIEW_INDEX_FROM_DEVICE_INDEX_BIT_KHR = VK_PIPELINE_CREATE_VIEW_INDEX_FROM_DEVICE_INDEX_BIT, + VK_PIPELINE_CREATE_DISPATCH_BASE_KHR = VK_PIPELINE_CREATE_DISPATCH_BASE, + VK_PIPELINE_CREATE_FLAG_BITS_MAX_ENUM = 0x7FFFFFFF +} VkPipelineCreateFlagBits; +typedef VkFlags VkPipelineCreateFlags; +typedef VkFlags VkPipelineShaderStageCreateFlags; + +typedef enum VkShaderStageFlagBits { + VK_SHADER_STAGE_VERTEX_BIT = 0x00000001, + VK_SHADER_STAGE_TESSELLATION_CONTROL_BIT = 0x00000002, + VK_SHADER_STAGE_TESSELLATION_EVALUATION_BIT = 0x00000004, + VK_SHADER_STAGE_GEOMETRY_BIT = 0x00000008, + VK_SHADER_STAGE_FRAGMENT_BIT = 0x00000010, + VK_SHADER_STAGE_COMPUTE_BIT = 0x00000020, + VK_SHADER_STAGE_ALL_GRAPHICS = 0x0000001F, + VK_SHADER_STAGE_ALL = 0x7FFFFFFF, + VK_SHADER_STAGE_RAYGEN_BIT_NV = 0x00000100, + VK_SHADER_STAGE_ANY_HIT_BIT_NV = 0x00000200, + VK_SHADER_STAGE_CLOSEST_HIT_BIT_NV = 0x00000400, + VK_SHADER_STAGE_MISS_BIT_NV = 0x00000800, + VK_SHADER_STAGE_INTERSECTION_BIT_NV = 0x00001000, + VK_SHADER_STAGE_CALLABLE_BIT_NV = 0x00002000, + VK_SHADER_STAGE_TASK_BIT_NV = 0x00000040, + VK_SHADER_STAGE_MESH_BIT_NV = 0x00000080, + VK_SHADER_STAGE_FLAG_BITS_MAX_ENUM = 0x7FFFFFFF +} VkShaderStageFlagBits; +typedef VkFlags VkPipelineVertexInputStateCreateFlags; +typedef VkFlags VkPipelineInputAssemblyStateCreateFlags; +typedef VkFlags VkPipelineTessellationStateCreateFlags; +typedef VkFlags VkPipelineViewportStateCreateFlags; +typedef VkFlags VkPipelineRasterizationStateCreateFlags; + +typedef enum VkCullModeFlagBits { + VK_CULL_MODE_NONE = 0, + VK_CULL_MODE_FRONT_BIT = 0x00000001, + VK_CULL_MODE_BACK_BIT = 0x00000002, + VK_CULL_MODE_FRONT_AND_BACK = 0x00000003, + VK_CULL_MODE_FLAG_BITS_MAX_ENUM = 0x7FFFFFFF +} VkCullModeFlagBits; +typedef VkFlags VkCullModeFlags; +typedef VkFlags VkPipelineMultisampleStateCreateFlags; +typedef VkFlags VkPipelineDepthStencilStateCreateFlags; +typedef VkFlags VkPipelineColorBlendStateCreateFlags; + +typedef enum VkColorComponentFlagBits { + VK_COLOR_COMPONENT_R_BIT = 0x00000001, + VK_COLOR_COMPONENT_G_BIT = 0x00000002, + VK_COLOR_COMPONENT_B_BIT = 0x00000004, + VK_COLOR_COMPONENT_A_BIT = 0x00000008, + VK_COLOR_COMPONENT_FLAG_BITS_MAX_ENUM = 0x7FFFFFFF +} VkColorComponentFlagBits; +typedef VkFlags VkColorComponentFlags; +typedef VkFlags VkPipelineDynamicStateCreateFlags; +typedef VkFlags VkPipelineLayoutCreateFlags; +typedef VkFlags VkShaderStageFlags; +typedef VkFlags VkSamplerCreateFlags; + +typedef enum VkDescriptorSetLayoutCreateFlagBits { + VK_DESCRIPTOR_SET_LAYOUT_CREATE_PUSH_DESCRIPTOR_BIT_KHR = 0x00000001, + VK_DESCRIPTOR_SET_LAYOUT_CREATE_UPDATE_AFTER_BIND_POOL_BIT_EXT = 0x00000002, + VK_DESCRIPTOR_SET_LAYOUT_CREATE_FLAG_BITS_MAX_ENUM = 0x7FFFFFFF +} VkDescriptorSetLayoutCreateFlagBits; +typedef VkFlags VkDescriptorSetLayoutCreateFlags; + +typedef enum VkDescriptorPoolCreateFlagBits { + VK_DESCRIPTOR_POOL_CREATE_FREE_DESCRIPTOR_SET_BIT = 0x00000001, + VK_DESCRIPTOR_POOL_CREATE_UPDATE_AFTER_BIND_BIT_EXT = 0x00000002, + VK_DESCRIPTOR_POOL_CREATE_FLAG_BITS_MAX_ENUM = 0x7FFFFFFF +} VkDescriptorPoolCreateFlagBits; +typedef VkFlags VkDescriptorPoolCreateFlags; +typedef VkFlags VkDescriptorPoolResetFlags; +typedef VkFlags VkFramebufferCreateFlags; +typedef VkFlags VkRenderPassCreateFlags; + +typedef enum VkAttachmentDescriptionFlagBits { + VK_ATTACHMENT_DESCRIPTION_MAY_ALIAS_BIT = 0x00000001, + VK_ATTACHMENT_DESCRIPTION_FLAG_BITS_MAX_ENUM = 0x7FFFFFFF +} VkAttachmentDescriptionFlagBits; +typedef VkFlags VkAttachmentDescriptionFlags; + +typedef enum VkSubpassDescriptionFlagBits { + VK_SUBPASS_DESCRIPTION_PER_VIEW_ATTRIBUTES_BIT_NVX = 0x00000001, + VK_SUBPASS_DESCRIPTION_PER_VIEW_POSITION_X_ONLY_BIT_NVX = 0x00000002, + VK_SUBPASS_DESCRIPTION_FLAG_BITS_MAX_ENUM = 0x7FFFFFFF +} VkSubpassDescriptionFlagBits; +typedef VkFlags VkSubpassDescriptionFlags; + +typedef enum VkAccessFlagBits { + VK_ACCESS_INDIRECT_COMMAND_READ_BIT = 0x00000001, + VK_ACCESS_INDEX_READ_BIT = 0x00000002, + VK_ACCESS_VERTEX_ATTRIBUTE_READ_BIT = 0x00000004, + VK_ACCESS_UNIFORM_READ_BIT = 0x00000008, + VK_ACCESS_INPUT_ATTACHMENT_READ_BIT = 0x00000010, + VK_ACCESS_SHADER_READ_BIT = 0x00000020, + VK_ACCESS_SHADER_WRITE_BIT = 0x00000040, + VK_ACCESS_COLOR_ATTACHMENT_READ_BIT = 0x00000080, + VK_ACCESS_COLOR_ATTACHMENT_WRITE_BIT = 0x00000100, + VK_ACCESS_DEPTH_STENCIL_ATTACHMENT_READ_BIT = 0x00000200, + VK_ACCESS_DEPTH_STENCIL_ATTACHMENT_WRITE_BIT = 0x00000400, + VK_ACCESS_TRANSFER_READ_BIT = 0x00000800, + VK_ACCESS_TRANSFER_WRITE_BIT = 0x00001000, + VK_ACCESS_HOST_READ_BIT = 0x00002000, + VK_ACCESS_HOST_WRITE_BIT = 0x00004000, + VK_ACCESS_MEMORY_READ_BIT = 0x00008000, + VK_ACCESS_MEMORY_WRITE_BIT = 0x00010000, + VK_ACCESS_TRANSFORM_FEEDBACK_WRITE_BIT_EXT = 0x02000000, + VK_ACCESS_TRANSFORM_FEEDBACK_COUNTER_READ_BIT_EXT = 0x04000000, + VK_ACCESS_TRANSFORM_FEEDBACK_COUNTER_WRITE_BIT_EXT = 0x08000000, + VK_ACCESS_CONDITIONAL_RENDERING_READ_BIT_EXT = 0x00100000, + VK_ACCESS_COMMAND_PROCESS_READ_BIT_NVX = 0x00020000, + VK_ACCESS_COMMAND_PROCESS_WRITE_BIT_NVX = 0x00040000, + VK_ACCESS_COLOR_ATTACHMENT_READ_NONCOHERENT_BIT_EXT = 0x00080000, + VK_ACCESS_SHADING_RATE_IMAGE_READ_BIT_NV = 0x00800000, + VK_ACCESS_ACCELERATION_STRUCTURE_READ_BIT_NV = 0x00200000, + VK_ACCESS_ACCELERATION_STRUCTURE_WRITE_BIT_NV = 0x00400000, + VK_ACCESS_FLAG_BITS_MAX_ENUM = 0x7FFFFFFF +} VkAccessFlagBits; +typedef VkFlags VkAccessFlags; + +typedef enum VkDependencyFlagBits { + VK_DEPENDENCY_BY_REGION_BIT = 0x00000001, + VK_DEPENDENCY_DEVICE_GROUP_BIT = 0x00000004, + VK_DEPENDENCY_VIEW_LOCAL_BIT = 0x00000002, + VK_DEPENDENCY_VIEW_LOCAL_BIT_KHR = VK_DEPENDENCY_VIEW_LOCAL_BIT, + VK_DEPENDENCY_DEVICE_GROUP_BIT_KHR = VK_DEPENDENCY_DEVICE_GROUP_BIT, + VK_DEPENDENCY_FLAG_BITS_MAX_ENUM = 0x7FFFFFFF +} VkDependencyFlagBits; +typedef VkFlags VkDependencyFlags; + +typedef enum VkCommandPoolCreateFlagBits { + VK_COMMAND_POOL_CREATE_TRANSIENT_BIT = 0x00000001, + VK_COMMAND_POOL_CREATE_RESET_COMMAND_BUFFER_BIT = 0x00000002, + VK_COMMAND_POOL_CREATE_PROTECTED_BIT = 0x00000004, + VK_COMMAND_POOL_CREATE_FLAG_BITS_MAX_ENUM = 0x7FFFFFFF +} VkCommandPoolCreateFlagBits; +typedef VkFlags VkCommandPoolCreateFlags; + +typedef enum VkCommandPoolResetFlagBits { + VK_COMMAND_POOL_RESET_RELEASE_RESOURCES_BIT = 0x00000001, + VK_COMMAND_POOL_RESET_FLAG_BITS_MAX_ENUM = 0x7FFFFFFF +} VkCommandPoolResetFlagBits; +typedef VkFlags VkCommandPoolResetFlags; + +typedef enum VkCommandBufferUsageFlagBits { + VK_COMMAND_BUFFER_USAGE_ONE_TIME_SUBMIT_BIT = 0x00000001, + VK_COMMAND_BUFFER_USAGE_RENDER_PASS_CONTINUE_BIT = 0x00000002, + VK_COMMAND_BUFFER_USAGE_SIMULTANEOUS_USE_BIT = 0x00000004, + VK_COMMAND_BUFFER_USAGE_FLAG_BITS_MAX_ENUM = 0x7FFFFFFF +} VkCommandBufferUsageFlagBits; +typedef VkFlags VkCommandBufferUsageFlags; + +typedef enum VkQueryControlFlagBits { + VK_QUERY_CONTROL_PRECISE_BIT = 0x00000001, + VK_QUERY_CONTROL_FLAG_BITS_MAX_ENUM = 0x7FFFFFFF +} VkQueryControlFlagBits; +typedef VkFlags VkQueryControlFlags; + +typedef enum VkCommandBufferResetFlagBits { + VK_COMMAND_BUFFER_RESET_RELEASE_RESOURCES_BIT = 0x00000001, + VK_COMMAND_BUFFER_RESET_FLAG_BITS_MAX_ENUM = 0x7FFFFFFF +} VkCommandBufferResetFlagBits; +typedef VkFlags VkCommandBufferResetFlags; + +typedef enum VkStencilFaceFlagBits { + VK_STENCIL_FACE_FRONT_BIT = 0x00000001, + VK_STENCIL_FACE_BACK_BIT = 0x00000002, + VK_STENCIL_FRONT_AND_BACK = 0x00000003, + VK_STENCIL_FACE_FLAG_BITS_MAX_ENUM = 0x7FFFFFFF +} VkStencilFaceFlagBits; +typedef VkFlags VkStencilFaceFlags; + +typedef struct VkApplicationInfo { + VkStructureType sType; + const void* pNext; + const char* pApplicationName; + uint32_t applicationVersion; + const char* pEngineName; + uint32_t engineVersion; + uint32_t apiVersion; +} VkApplicationInfo; + +typedef struct VkInstanceCreateInfo { + VkStructureType sType; + const void* pNext; + VkInstanceCreateFlags flags; + const VkApplicationInfo* pApplicationInfo; + uint32_t enabledLayerCount; + const char* const* ppEnabledLayerNames; + uint32_t enabledExtensionCount; + const char* const* ppEnabledExtensionNames; +} VkInstanceCreateInfo; + +typedef void* (VKAPI_PTR *PFN_vkAllocationFunction)( + void* pUserData, + size_t size, + size_t alignment, + VkSystemAllocationScope allocationScope); + +typedef void* (VKAPI_PTR *PFN_vkReallocationFunction)( + void* pUserData, + void* pOriginal, + size_t size, + size_t alignment, + VkSystemAllocationScope allocationScope); + +typedef void (VKAPI_PTR *PFN_vkFreeFunction)( + void* pUserData, + void* pMemory); + +typedef void (VKAPI_PTR *PFN_vkInternalAllocationNotification)( + void* pUserData, + size_t size, + VkInternalAllocationType allocationType, + VkSystemAllocationScope allocationScope); + +typedef void (VKAPI_PTR *PFN_vkInternalFreeNotification)( + void* pUserData, + size_t size, + VkInternalAllocationType allocationType, + VkSystemAllocationScope allocationScope); + +typedef struct VkAllocationCallbacks { + void* pUserData; + PFN_vkAllocationFunction pfnAllocation; + PFN_vkReallocationFunction pfnReallocation; + PFN_vkFreeFunction pfnFree; + PFN_vkInternalAllocationNotification pfnInternalAllocation; + PFN_vkInternalFreeNotification pfnInternalFree; +} VkAllocationCallbacks; + +typedef struct VkPhysicalDeviceFeatures { + VkBool32 robustBufferAccess; + VkBool32 fullDrawIndexUint32; + VkBool32 imageCubeArray; + VkBool32 independentBlend; + VkBool32 geometryShader; + VkBool32 tessellationShader; + VkBool32 sampleRateShading; + VkBool32 dualSrcBlend; + VkBool32 logicOp; + VkBool32 multiDrawIndirect; + VkBool32 drawIndirectFirstInstance; + VkBool32 depthClamp; + VkBool32 depthBiasClamp; + VkBool32 fillModeNonSolid; + VkBool32 depthBounds; + VkBool32 wideLines; + VkBool32 largePoints; + VkBool32 alphaToOne; + VkBool32 multiViewport; + VkBool32 samplerAnisotropy; + VkBool32 textureCompressionETC2; + VkBool32 textureCompressionASTC_LDR; + VkBool32 textureCompressionBC; + VkBool32 occlusionQueryPrecise; + VkBool32 pipelineStatisticsQuery; + VkBool32 vertexPipelineStoresAndAtomics; + VkBool32 fragmentStoresAndAtomics; + VkBool32 shaderTessellationAndGeometryPointSize; + VkBool32 shaderImageGatherExtended; + VkBool32 shaderStorageImageExtendedFormats; + VkBool32 shaderStorageImageMultisample; + VkBool32 shaderStorageImageReadWithoutFormat; + VkBool32 shaderStorageImageWriteWithoutFormat; + VkBool32 shaderUniformBufferArrayDynamicIndexing; + VkBool32 shaderSampledImageArrayDynamicIndexing; + VkBool32 shaderStorageBufferArrayDynamicIndexing; + VkBool32 shaderStorageImageArrayDynamicIndexing; + VkBool32 shaderClipDistance; + VkBool32 shaderCullDistance; + VkBool32 shaderFloat64; + VkBool32 shaderInt64; + VkBool32 shaderInt16; + VkBool32 shaderResourceResidency; + VkBool32 shaderResourceMinLod; + VkBool32 sparseBinding; + VkBool32 sparseResidencyBuffer; + VkBool32 sparseResidencyImage2D; + VkBool32 sparseResidencyImage3D; + VkBool32 sparseResidency2Samples; + VkBool32 sparseResidency4Samples; + VkBool32 sparseResidency8Samples; + VkBool32 sparseResidency16Samples; + VkBool32 sparseResidencyAliased; + VkBool32 variableMultisampleRate; + VkBool32 inheritedQueries; +} VkPhysicalDeviceFeatures; + +typedef struct VkFormatProperties { + VkFormatFeatureFlags linearTilingFeatures; + VkFormatFeatureFlags optimalTilingFeatures; + VkFormatFeatureFlags bufferFeatures; +} VkFormatProperties; + +typedef struct VkExtent3D { + uint32_t width; + uint32_t height; + uint32_t depth; +} VkExtent3D; + +typedef struct VkImageFormatProperties { + VkExtent3D maxExtent; + uint32_t maxMipLevels; + uint32_t maxArrayLayers; + VkSampleCountFlags sampleCounts; + VkDeviceSize maxResourceSize; +} VkImageFormatProperties; + +typedef struct VkPhysicalDeviceLimits { + uint32_t maxImageDimension1D; + uint32_t maxImageDimension2D; + uint32_t maxImageDimension3D; + uint32_t maxImageDimensionCube; + uint32_t maxImageArrayLayers; + uint32_t maxTexelBufferElements; + uint32_t maxUniformBufferRange; + uint32_t maxStorageBufferRange; + uint32_t maxPushConstantsSize; + uint32_t maxMemoryAllocationCount; + uint32_t maxSamplerAllocationCount; + VkDeviceSize bufferImageGranularity; + VkDeviceSize sparseAddressSpaceSize; + uint32_t maxBoundDescriptorSets; + uint32_t maxPerStageDescriptorSamplers; + uint32_t maxPerStageDescriptorUniformBuffers; + uint32_t maxPerStageDescriptorStorageBuffers; + uint32_t maxPerStageDescriptorSampledImages; + uint32_t maxPerStageDescriptorStorageImages; + uint32_t maxPerStageDescriptorInputAttachments; + uint32_t maxPerStageResources; + uint32_t maxDescriptorSetSamplers; + uint32_t maxDescriptorSetUniformBuffers; + uint32_t maxDescriptorSetUniformBuffersDynamic; + uint32_t maxDescriptorSetStorageBuffers; + uint32_t maxDescriptorSetStorageBuffersDynamic; + uint32_t maxDescriptorSetSampledImages; + uint32_t maxDescriptorSetStorageImages; + uint32_t maxDescriptorSetInputAttachments; + uint32_t maxVertexInputAttributes; + uint32_t maxVertexInputBindings; + uint32_t maxVertexInputAttributeOffset; + uint32_t maxVertexInputBindingStride; + uint32_t maxVertexOutputComponents; + uint32_t maxTessellationGenerationLevel; + uint32_t maxTessellationPatchSize; + uint32_t maxTessellationControlPerVertexInputComponents; + uint32_t maxTessellationControlPerVertexOutputComponents; + uint32_t maxTessellationControlPerPatchOutputComponents; + uint32_t maxTessellationControlTotalOutputComponents; + uint32_t maxTessellationEvaluationInputComponents; + uint32_t maxTessellationEvaluationOutputComponents; + uint32_t maxGeometryShaderInvocations; + uint32_t maxGeometryInputComponents; + uint32_t maxGeometryOutputComponents; + uint32_t maxGeometryOutputVertices; + uint32_t maxGeometryTotalOutputComponents; + uint32_t maxFragmentInputComponents; + uint32_t maxFragmentOutputAttachments; + uint32_t maxFragmentDualSrcAttachments; + uint32_t maxFragmentCombinedOutputResources; + uint32_t maxComputeSharedMemorySize; + uint32_t maxComputeWorkGroupCount[3]; + uint32_t maxComputeWorkGroupInvocations; + uint32_t maxComputeWorkGroupSize[3]; + uint32_t subPixelPrecisionBits; + uint32_t subTexelPrecisionBits; + uint32_t mipmapPrecisionBits; + uint32_t maxDrawIndexedIndexValue; + uint32_t maxDrawIndirectCount; + float maxSamplerLodBias; + float maxSamplerAnisotropy; + uint32_t maxViewports; + uint32_t maxViewportDimensions[2]; + float viewportBoundsRange[2]; + uint32_t viewportSubPixelBits; + size_t minMemoryMapAlignment; + VkDeviceSize minTexelBufferOffsetAlignment; + VkDeviceSize minUniformBufferOffsetAlignment; + VkDeviceSize minStorageBufferOffsetAlignment; + int32_t minTexelOffset; + uint32_t maxTexelOffset; + int32_t minTexelGatherOffset; + uint32_t maxTexelGatherOffset; + float minInterpolationOffset; + float maxInterpolationOffset; + uint32_t subPixelInterpolationOffsetBits; + uint32_t maxFramebufferWidth; + uint32_t maxFramebufferHeight; + uint32_t maxFramebufferLayers; + VkSampleCountFlags framebufferColorSampleCounts; + VkSampleCountFlags framebufferDepthSampleCounts; + VkSampleCountFlags framebufferStencilSampleCounts; + VkSampleCountFlags framebufferNoAttachmentsSampleCounts; + uint32_t maxColorAttachments; + VkSampleCountFlags sampledImageColorSampleCounts; + VkSampleCountFlags sampledImageIntegerSampleCounts; + VkSampleCountFlags sampledImageDepthSampleCounts; + VkSampleCountFlags sampledImageStencilSampleCounts; + VkSampleCountFlags storageImageSampleCounts; + uint32_t maxSampleMaskWords; + VkBool32 timestampComputeAndGraphics; + float timestampPeriod; + uint32_t maxClipDistances; + uint32_t maxCullDistances; + uint32_t maxCombinedClipAndCullDistances; + uint32_t discreteQueuePriorities; + float pointSizeRange[2]; + float lineWidthRange[2]; + float pointSizeGranularity; + float lineWidthGranularity; + VkBool32 strictLines; + VkBool32 standardSampleLocations; + VkDeviceSize optimalBufferCopyOffsetAlignment; + VkDeviceSize optimalBufferCopyRowPitchAlignment; + VkDeviceSize nonCoherentAtomSize; +} VkPhysicalDeviceLimits; + +typedef struct VkPhysicalDeviceSparseProperties { + VkBool32 residencyStandard2DBlockShape; + VkBool32 residencyStandard2DMultisampleBlockShape; + VkBool32 residencyStandard3DBlockShape; + VkBool32 residencyAlignedMipSize; + VkBool32 residencyNonResidentStrict; +} VkPhysicalDeviceSparseProperties; + +typedef struct VkPhysicalDeviceProperties { + uint32_t apiVersion; + uint32_t driverVersion; + uint32_t vendorID; + uint32_t deviceID; + VkPhysicalDeviceType deviceType; + char deviceName[VK_MAX_PHYSICAL_DEVICE_NAME_SIZE]; + uint8_t pipelineCacheUUID[VK_UUID_SIZE]; + VkPhysicalDeviceLimits limits; + VkPhysicalDeviceSparseProperties sparseProperties; +} VkPhysicalDeviceProperties; + +typedef struct VkQueueFamilyProperties { + VkQueueFlags queueFlags; + uint32_t queueCount; + uint32_t timestampValidBits; + VkExtent3D minImageTransferGranularity; +} VkQueueFamilyProperties; + +typedef struct VkMemoryType { + VkMemoryPropertyFlags propertyFlags; + uint32_t heapIndex; +} VkMemoryType; + +typedef struct VkMemoryHeap { + VkDeviceSize size; + VkMemoryHeapFlags flags; +} VkMemoryHeap; + +typedef struct VkPhysicalDeviceMemoryProperties { + uint32_t memoryTypeCount; + VkMemoryType memoryTypes[VK_MAX_MEMORY_TYPES]; + uint32_t memoryHeapCount; + VkMemoryHeap memoryHeaps[VK_MAX_MEMORY_HEAPS]; +} VkPhysicalDeviceMemoryProperties; + +typedef void (VKAPI_PTR *PFN_vkVoidFunction)(void); +typedef struct VkDeviceQueueCreateInfo { + VkStructureType sType; + const void* pNext; + VkDeviceQueueCreateFlags flags; + uint32_t queueFamilyIndex; + uint32_t queueCount; + const float* pQueuePriorities; +} VkDeviceQueueCreateInfo; + +typedef struct VkDeviceCreateInfo { + VkStructureType sType; + const void* pNext; + VkDeviceCreateFlags flags; + uint32_t queueCreateInfoCount; + const VkDeviceQueueCreateInfo* pQueueCreateInfos; + uint32_t enabledLayerCount; + const char* const* ppEnabledLayerNames; + uint32_t enabledExtensionCount; + const char* const* ppEnabledExtensionNames; + const VkPhysicalDeviceFeatures* pEnabledFeatures; +} VkDeviceCreateInfo; + +typedef struct VkExtensionProperties { + char extensionName[VK_MAX_EXTENSION_NAME_SIZE]; + uint32_t specVersion; +} VkExtensionProperties; + +typedef struct VkLayerProperties { + char layerName[VK_MAX_EXTENSION_NAME_SIZE]; + uint32_t specVersion; + uint32_t implementationVersion; + char description[VK_MAX_DESCRIPTION_SIZE]; +} VkLayerProperties; + +typedef struct VkSubmitInfo { + VkStructureType sType; + const void* pNext; + uint32_t waitSemaphoreCount; + const VkSemaphore* pWaitSemaphores; + const VkPipelineStageFlags* pWaitDstStageMask; + uint32_t commandBufferCount; + const VkCommandBuffer* pCommandBuffers; + uint32_t signalSemaphoreCount; + const VkSemaphore* pSignalSemaphores; +} VkSubmitInfo; + +typedef struct VkMemoryAllocateInfo { + VkStructureType sType; + const void* pNext; + VkDeviceSize allocationSize; + uint32_t memoryTypeIndex; +} VkMemoryAllocateInfo; + +typedef struct VkMappedMemoryRange { + VkStructureType sType; + const void* pNext; + VkDeviceMemory memory; + VkDeviceSize offset; + VkDeviceSize size; +} VkMappedMemoryRange; + +typedef struct VkMemoryRequirements { + VkDeviceSize size; + VkDeviceSize alignment; + uint32_t memoryTypeBits; +} VkMemoryRequirements; + +typedef struct VkSparseImageFormatProperties { + VkImageAspectFlags aspectMask; + VkExtent3D imageGranularity; + VkSparseImageFormatFlags flags; +} VkSparseImageFormatProperties; + +typedef struct VkSparseImageMemoryRequirements { + VkSparseImageFormatProperties formatProperties; + uint32_t imageMipTailFirstLod; + VkDeviceSize imageMipTailSize; + VkDeviceSize imageMipTailOffset; + VkDeviceSize imageMipTailStride; +} VkSparseImageMemoryRequirements; + +typedef struct VkSparseMemoryBind { + VkDeviceSize resourceOffset; + VkDeviceSize size; + VkDeviceMemory memory; + VkDeviceSize memoryOffset; + VkSparseMemoryBindFlags flags; +} VkSparseMemoryBind; + +typedef struct VkSparseBufferMemoryBindInfo { + VkBuffer buffer; + uint32_t bindCount; + const VkSparseMemoryBind* pBinds; +} VkSparseBufferMemoryBindInfo; + +typedef struct VkSparseImageOpaqueMemoryBindInfo { + VkImage image; + uint32_t bindCount; + const VkSparseMemoryBind* pBinds; +} VkSparseImageOpaqueMemoryBindInfo; + +typedef struct VkImageSubresource { + VkImageAspectFlags aspectMask; + uint32_t mipLevel; + uint32_t arrayLayer; +} VkImageSubresource; + +typedef struct VkOffset3D { + int32_t x; + int32_t y; + int32_t z; +} VkOffset3D; + +typedef struct VkSparseImageMemoryBind { + VkImageSubresource subresource; + VkOffset3D offset; + VkExtent3D extent; + VkDeviceMemory memory; + VkDeviceSize memoryOffset; + VkSparseMemoryBindFlags flags; +} VkSparseImageMemoryBind; + +typedef struct VkSparseImageMemoryBindInfo { + VkImage image; + uint32_t bindCount; + const VkSparseImageMemoryBind* pBinds; +} VkSparseImageMemoryBindInfo; + +typedef struct VkBindSparseInfo { + VkStructureType sType; + const void* pNext; + uint32_t waitSemaphoreCount; + const VkSemaphore* pWaitSemaphores; + uint32_t bufferBindCount; + const VkSparseBufferMemoryBindInfo* pBufferBinds; + uint32_t imageOpaqueBindCount; + const VkSparseImageOpaqueMemoryBindInfo* pImageOpaqueBinds; + uint32_t imageBindCount; + const VkSparseImageMemoryBindInfo* pImageBinds; + uint32_t signalSemaphoreCount; + const VkSemaphore* pSignalSemaphores; +} VkBindSparseInfo; + +typedef struct VkFenceCreateInfo { + VkStructureType sType; + const void* pNext; + VkFenceCreateFlags flags; +} VkFenceCreateInfo; + +typedef struct VkSemaphoreCreateInfo { + VkStructureType sType; + const void* pNext; + VkSemaphoreCreateFlags flags; +} VkSemaphoreCreateInfo; + +typedef struct VkEventCreateInfo { + VkStructureType sType; + const void* pNext; + VkEventCreateFlags flags; +} VkEventCreateInfo; + +typedef struct VkQueryPoolCreateInfo { + VkStructureType sType; + const void* pNext; + VkQueryPoolCreateFlags flags; + VkQueryType queryType; + uint32_t queryCount; + VkQueryPipelineStatisticFlags pipelineStatistics; +} VkQueryPoolCreateInfo; + +typedef struct VkBufferCreateInfo { + VkStructureType sType; + const void* pNext; + VkBufferCreateFlags flags; + VkDeviceSize size; + VkBufferUsageFlags usage; + VkSharingMode sharingMode; + uint32_t queueFamilyIndexCount; + const uint32_t* pQueueFamilyIndices; +} VkBufferCreateInfo; + +typedef struct VkBufferViewCreateInfo { + VkStructureType sType; + const void* pNext; + VkBufferViewCreateFlags flags; + VkBuffer buffer; + VkFormat format; + VkDeviceSize offset; + VkDeviceSize range; +} VkBufferViewCreateInfo; + +typedef struct VkImageCreateInfo { + VkStructureType sType; + const void* pNext; + VkImageCreateFlags flags; + VkImageType imageType; + VkFormat format; + VkExtent3D extent; + uint32_t mipLevels; + uint32_t arrayLayers; + VkSampleCountFlagBits samples; + VkImageTiling tiling; + VkImageUsageFlags usage; + VkSharingMode sharingMode; + uint32_t queueFamilyIndexCount; + const uint32_t* pQueueFamilyIndices; + VkImageLayout initialLayout; +} VkImageCreateInfo; + +typedef struct VkSubresourceLayout { + VkDeviceSize offset; + VkDeviceSize size; + VkDeviceSize rowPitch; + VkDeviceSize arrayPitch; + VkDeviceSize depthPitch; +} VkSubresourceLayout; + +typedef struct VkComponentMapping { + VkComponentSwizzle r; + VkComponentSwizzle g; + VkComponentSwizzle b; + VkComponentSwizzle a; +} VkComponentMapping; + +typedef struct VkImageSubresourceRange { + VkImageAspectFlags aspectMask; + uint32_t baseMipLevel; + uint32_t levelCount; + uint32_t baseArrayLayer; + uint32_t layerCount; +} VkImageSubresourceRange; + +typedef struct VkImageViewCreateInfo { + VkStructureType sType; + const void* pNext; + VkImageViewCreateFlags flags; + VkImage image; + VkImageViewType viewType; + VkFormat format; + VkComponentMapping components; + VkImageSubresourceRange subresourceRange; +} VkImageViewCreateInfo; + +typedef struct VkShaderModuleCreateInfo { + VkStructureType sType; + const void* pNext; + VkShaderModuleCreateFlags flags; + size_t codeSize; + const uint32_t* pCode; +} VkShaderModuleCreateInfo; + +typedef struct VkPipelineCacheCreateInfo { + VkStructureType sType; + const void* pNext; + VkPipelineCacheCreateFlags flags; + size_t initialDataSize; + const void* pInitialData; +} VkPipelineCacheCreateInfo; + +typedef struct VkSpecializationMapEntry { + uint32_t constantID; + uint32_t offset; + size_t size; +} VkSpecializationMapEntry; + +typedef struct VkSpecializationInfo { + uint32_t mapEntryCount; + const VkSpecializationMapEntry* pMapEntries; + size_t dataSize; + const void* pData; +} VkSpecializationInfo; + +typedef struct VkPipelineShaderStageCreateInfo { + VkStructureType sType; + const void* pNext; + VkPipelineShaderStageCreateFlags flags; + VkShaderStageFlagBits stage; + VkShaderModule module; + const char* pName; + const VkSpecializationInfo* pSpecializationInfo; +} VkPipelineShaderStageCreateInfo; + +typedef struct VkVertexInputBindingDescription { + uint32_t binding; + uint32_t stride; + VkVertexInputRate inputRate; +} VkVertexInputBindingDescription; + +typedef struct VkVertexInputAttributeDescription { + uint32_t location; + uint32_t binding; + VkFormat format; + uint32_t offset; +} VkVertexInputAttributeDescription; + +typedef struct VkPipelineVertexInputStateCreateInfo { + VkStructureType sType; + const void* pNext; + VkPipelineVertexInputStateCreateFlags flags; + uint32_t vertexBindingDescriptionCount; + const VkVertexInputBindingDescription* pVertexBindingDescriptions; + uint32_t vertexAttributeDescriptionCount; + const VkVertexInputAttributeDescription* pVertexAttributeDescriptions; +} VkPipelineVertexInputStateCreateInfo; + +typedef struct VkPipelineInputAssemblyStateCreateInfo { + VkStructureType sType; + const void* pNext; + VkPipelineInputAssemblyStateCreateFlags flags; + VkPrimitiveTopology topology; + VkBool32 primitiveRestartEnable; +} VkPipelineInputAssemblyStateCreateInfo; + +typedef struct VkPipelineTessellationStateCreateInfo { + VkStructureType sType; + const void* pNext; + VkPipelineTessellationStateCreateFlags flags; + uint32_t patchControlPoints; +} VkPipelineTessellationStateCreateInfo; + +typedef struct VkViewport { + float x; + float y; + float width; + float height; + float minDepth; + float maxDepth; +} VkViewport; + +typedef struct VkOffset2D { + int32_t x; + int32_t y; +} VkOffset2D; + +typedef struct VkExtent2D { + uint32_t width; + uint32_t height; +} VkExtent2D; + +typedef struct VkRect2D { + VkOffset2D offset; + VkExtent2D extent; +} VkRect2D; + +typedef struct VkPipelineViewportStateCreateInfo { + VkStructureType sType; + const void* pNext; + VkPipelineViewportStateCreateFlags flags; + uint32_t viewportCount; + const VkViewport* pViewports; + uint32_t scissorCount; + const VkRect2D* pScissors; +} VkPipelineViewportStateCreateInfo; + +typedef struct VkPipelineRasterizationStateCreateInfo { + VkStructureType sType; + const void* pNext; + VkPipelineRasterizationStateCreateFlags flags; + VkBool32 depthClampEnable; + VkBool32 rasterizerDiscardEnable; + VkPolygonMode polygonMode; + VkCullModeFlags cullMode; + VkFrontFace frontFace; + VkBool32 depthBiasEnable; + float depthBiasConstantFactor; + float depthBiasClamp; + float depthBiasSlopeFactor; + float lineWidth; +} VkPipelineRasterizationStateCreateInfo; + +typedef struct VkPipelineMultisampleStateCreateInfo { + VkStructureType sType; + const void* pNext; + VkPipelineMultisampleStateCreateFlags flags; + VkSampleCountFlagBits rasterizationSamples; + VkBool32 sampleShadingEnable; + float minSampleShading; + const VkSampleMask* pSampleMask; + VkBool32 alphaToCoverageEnable; + VkBool32 alphaToOneEnable; +} VkPipelineMultisampleStateCreateInfo; + +typedef struct VkStencilOpState { + VkStencilOp failOp; + VkStencilOp passOp; + VkStencilOp depthFailOp; + VkCompareOp compareOp; + uint32_t compareMask; + uint32_t writeMask; + uint32_t reference; +} VkStencilOpState; + +typedef struct VkPipelineDepthStencilStateCreateInfo { + VkStructureType sType; + const void* pNext; + VkPipelineDepthStencilStateCreateFlags flags; + VkBool32 depthTestEnable; + VkBool32 depthWriteEnable; + VkCompareOp depthCompareOp; + VkBool32 depthBoundsTestEnable; + VkBool32 stencilTestEnable; + VkStencilOpState front; + VkStencilOpState back; + float minDepthBounds; + float maxDepthBounds; +} VkPipelineDepthStencilStateCreateInfo; + +typedef struct VkPipelineColorBlendAttachmentState { + VkBool32 blendEnable; + VkBlendFactor srcColorBlendFactor; + VkBlendFactor dstColorBlendFactor; + VkBlendOp colorBlendOp; + VkBlendFactor srcAlphaBlendFactor; + VkBlendFactor dstAlphaBlendFactor; + VkBlendOp alphaBlendOp; + VkColorComponentFlags colorWriteMask; +} VkPipelineColorBlendAttachmentState; + +typedef struct VkPipelineColorBlendStateCreateInfo { + VkStructureType sType; + const void* pNext; + VkPipelineColorBlendStateCreateFlags flags; + VkBool32 logicOpEnable; + VkLogicOp logicOp; + uint32_t attachmentCount; + const VkPipelineColorBlendAttachmentState* pAttachments; + float blendConstants[4]; +} VkPipelineColorBlendStateCreateInfo; + +typedef struct VkPipelineDynamicStateCreateInfo { + VkStructureType sType; + const void* pNext; + VkPipelineDynamicStateCreateFlags flags; + uint32_t dynamicStateCount; + const VkDynamicState* pDynamicStates; +} VkPipelineDynamicStateCreateInfo; + +typedef struct VkGraphicsPipelineCreateInfo { + VkStructureType sType; + const void* pNext; + VkPipelineCreateFlags flags; + uint32_t stageCount; + const VkPipelineShaderStageCreateInfo* pStages; + const VkPipelineVertexInputStateCreateInfo* pVertexInputState; + const VkPipelineInputAssemblyStateCreateInfo* pInputAssemblyState; + const VkPipelineTessellationStateCreateInfo* pTessellationState; + const VkPipelineViewportStateCreateInfo* pViewportState; + const VkPipelineRasterizationStateCreateInfo* pRasterizationState; + const VkPipelineMultisampleStateCreateInfo* pMultisampleState; + const VkPipelineDepthStencilStateCreateInfo* pDepthStencilState; + const VkPipelineColorBlendStateCreateInfo* pColorBlendState; + const VkPipelineDynamicStateCreateInfo* pDynamicState; + VkPipelineLayout layout; + VkRenderPass renderPass; + uint32_t subpass; + VkPipeline basePipelineHandle; + int32_t basePipelineIndex; +} VkGraphicsPipelineCreateInfo; + +typedef struct VkComputePipelineCreateInfo { + VkStructureType sType; + const void* pNext; + VkPipelineCreateFlags flags; + VkPipelineShaderStageCreateInfo stage; + VkPipelineLayout layout; + VkPipeline basePipelineHandle; + int32_t basePipelineIndex; +} VkComputePipelineCreateInfo; + +typedef struct VkPushConstantRange { + VkShaderStageFlags stageFlags; + uint32_t offset; + uint32_t size; +} VkPushConstantRange; + +typedef struct VkPipelineLayoutCreateInfo { + VkStructureType sType; + const void* pNext; + VkPipelineLayoutCreateFlags flags; + uint32_t setLayoutCount; + const VkDescriptorSetLayout* pSetLayouts; + uint32_t pushConstantRangeCount; + const VkPushConstantRange* pPushConstantRanges; +} VkPipelineLayoutCreateInfo; + +typedef struct VkSamplerCreateInfo { + VkStructureType sType; + const void* pNext; + VkSamplerCreateFlags flags; + VkFilter magFilter; + VkFilter minFilter; + VkSamplerMipmapMode mipmapMode; + VkSamplerAddressMode addressModeU; + VkSamplerAddressMode addressModeV; + VkSamplerAddressMode addressModeW; + float mipLodBias; + VkBool32 anisotropyEnable; + float maxAnisotropy; + VkBool32 compareEnable; + VkCompareOp compareOp; + float minLod; + float maxLod; + VkBorderColor borderColor; + VkBool32 unnormalizedCoordinates; +} VkSamplerCreateInfo; + +typedef struct VkDescriptorSetLayoutBinding { + uint32_t binding; + VkDescriptorType descriptorType; + uint32_t descriptorCount; + VkShaderStageFlags stageFlags; + const VkSampler* pImmutableSamplers; +} VkDescriptorSetLayoutBinding; + +typedef struct VkDescriptorSetLayoutCreateInfo { + VkStructureType sType; + const void* pNext; + VkDescriptorSetLayoutCreateFlags flags; + uint32_t bindingCount; + const VkDescriptorSetLayoutBinding* pBindings; +} VkDescriptorSetLayoutCreateInfo; + +typedef struct VkDescriptorPoolSize { + VkDescriptorType type; + uint32_t descriptorCount; +} VkDescriptorPoolSize; + +typedef struct VkDescriptorPoolCreateInfo { + VkStructureType sType; + const void* pNext; + VkDescriptorPoolCreateFlags flags; + uint32_t maxSets; + uint32_t poolSizeCount; + const VkDescriptorPoolSize* pPoolSizes; +} VkDescriptorPoolCreateInfo; + +typedef struct VkDescriptorSetAllocateInfo { + VkStructureType sType; + const void* pNext; + VkDescriptorPool descriptorPool; + uint32_t descriptorSetCount; + const VkDescriptorSetLayout* pSetLayouts; +} VkDescriptorSetAllocateInfo; + +typedef struct VkDescriptorImageInfo { + VkSampler sampler; + VkImageView imageView; + VkImageLayout imageLayout; +} VkDescriptorImageInfo; + +typedef struct VkDescriptorBufferInfo { + VkBuffer buffer; + VkDeviceSize offset; + VkDeviceSize range; +} VkDescriptorBufferInfo; + +typedef struct VkWriteDescriptorSet { + VkStructureType sType; + const void* pNext; + VkDescriptorSet dstSet; + uint32_t dstBinding; + uint32_t dstArrayElement; + uint32_t descriptorCount; + VkDescriptorType descriptorType; + const VkDescriptorImageInfo* pImageInfo; + const VkDescriptorBufferInfo* pBufferInfo; + const VkBufferView* pTexelBufferView; +} VkWriteDescriptorSet; + +typedef struct VkCopyDescriptorSet { + VkStructureType sType; + const void* pNext; + VkDescriptorSet srcSet; + uint32_t srcBinding; + uint32_t srcArrayElement; + VkDescriptorSet dstSet; + uint32_t dstBinding; + uint32_t dstArrayElement; + uint32_t descriptorCount; +} VkCopyDescriptorSet; + +typedef struct VkFramebufferCreateInfo { + VkStructureType sType; + const void* pNext; + VkFramebufferCreateFlags flags; + VkRenderPass renderPass; + uint32_t attachmentCount; + const VkImageView* pAttachments; + uint32_t width; + uint32_t height; + uint32_t layers; +} VkFramebufferCreateInfo; + +typedef struct VkAttachmentDescription { + VkAttachmentDescriptionFlags flags; + VkFormat format; + VkSampleCountFlagBits samples; + VkAttachmentLoadOp loadOp; + VkAttachmentStoreOp storeOp; + VkAttachmentLoadOp stencilLoadOp; + VkAttachmentStoreOp stencilStoreOp; + VkImageLayout initialLayout; + VkImageLayout finalLayout; +} VkAttachmentDescription; + +typedef struct VkAttachmentReference { + uint32_t attachment; + VkImageLayout layout; +} VkAttachmentReference; + +typedef struct VkSubpassDescription { + VkSubpassDescriptionFlags flags; + VkPipelineBindPoint pipelineBindPoint; + uint32_t inputAttachmentCount; + const VkAttachmentReference* pInputAttachments; + uint32_t colorAttachmentCount; + const VkAttachmentReference* pColorAttachments; + const VkAttachmentReference* pResolveAttachments; + const VkAttachmentReference* pDepthStencilAttachment; + uint32_t preserveAttachmentCount; + const uint32_t* pPreserveAttachments; +} VkSubpassDescription; + +typedef struct VkSubpassDependency { + uint32_t srcSubpass; + uint32_t dstSubpass; + VkPipelineStageFlags srcStageMask; + VkPipelineStageFlags dstStageMask; + VkAccessFlags srcAccessMask; + VkAccessFlags dstAccessMask; + VkDependencyFlags dependencyFlags; +} VkSubpassDependency; + +typedef struct VkRenderPassCreateInfo { + VkStructureType sType; + const void* pNext; + VkRenderPassCreateFlags flags; + uint32_t attachmentCount; + const VkAttachmentDescription* pAttachments; + uint32_t subpassCount; + const VkSubpassDescription* pSubpasses; + uint32_t dependencyCount; + const VkSubpassDependency* pDependencies; +} VkRenderPassCreateInfo; + +typedef struct VkCommandPoolCreateInfo { + VkStructureType sType; + const void* pNext; + VkCommandPoolCreateFlags flags; + uint32_t queueFamilyIndex; +} VkCommandPoolCreateInfo; + +typedef struct VkCommandBufferAllocateInfo { + VkStructureType sType; + const void* pNext; + VkCommandPool commandPool; + VkCommandBufferLevel level; + uint32_t commandBufferCount; +} VkCommandBufferAllocateInfo; + +typedef struct VkCommandBufferInheritanceInfo { + VkStructureType sType; + const void* pNext; + VkRenderPass renderPass; + uint32_t subpass; + VkFramebuffer framebuffer; + VkBool32 occlusionQueryEnable; + VkQueryControlFlags queryFlags; + VkQueryPipelineStatisticFlags pipelineStatistics; +} VkCommandBufferInheritanceInfo; + +typedef struct VkCommandBufferBeginInfo { + VkStructureType sType; + const void* pNext; + VkCommandBufferUsageFlags flags; + const VkCommandBufferInheritanceInfo* pInheritanceInfo; +} VkCommandBufferBeginInfo; + +typedef struct VkBufferCopy { + VkDeviceSize srcOffset; + VkDeviceSize dstOffset; + VkDeviceSize size; +} VkBufferCopy; + +typedef struct VkImageSubresourceLayers { + VkImageAspectFlags aspectMask; + uint32_t mipLevel; + uint32_t baseArrayLayer; + uint32_t layerCount; +} VkImageSubresourceLayers; + +typedef struct VkImageCopy { + VkImageSubresourceLayers srcSubresource; + VkOffset3D srcOffset; + VkImageSubresourceLayers dstSubresource; + VkOffset3D dstOffset; + VkExtent3D extent; +} VkImageCopy; + +typedef struct VkImageBlit { + VkImageSubresourceLayers srcSubresource; + VkOffset3D srcOffsets[2]; + VkImageSubresourceLayers dstSubresource; + VkOffset3D dstOffsets[2]; +} VkImageBlit; + +typedef struct VkBufferImageCopy { + VkDeviceSize bufferOffset; + uint32_t bufferRowLength; + uint32_t bufferImageHeight; + VkImageSubresourceLayers imageSubresource; + VkOffset3D imageOffset; + VkExtent3D imageExtent; +} VkBufferImageCopy; + +typedef union VkClearColorValue { + float float32[4]; + int32_t int32[4]; + uint32_t uint32[4]; +} VkClearColorValue; + +typedef struct VkClearDepthStencilValue { + float depth; + uint32_t stencil; +} VkClearDepthStencilValue; + +typedef union VkClearValue { + VkClearColorValue color; + VkClearDepthStencilValue depthStencil; +} VkClearValue; + +typedef struct VkClearAttachment { + VkImageAspectFlags aspectMask; + uint32_t colorAttachment; + VkClearValue clearValue; +} VkClearAttachment; + +typedef struct VkClearRect { + VkRect2D rect; + uint32_t baseArrayLayer; + uint32_t layerCount; +} VkClearRect; + +typedef struct VkImageResolve { + VkImageSubresourceLayers srcSubresource; + VkOffset3D srcOffset; + VkImageSubresourceLayers dstSubresource; + VkOffset3D dstOffset; + VkExtent3D extent; +} VkImageResolve; + +typedef struct VkMemoryBarrier { + VkStructureType sType; + const void* pNext; + VkAccessFlags srcAccessMask; + VkAccessFlags dstAccessMask; +} VkMemoryBarrier; + +typedef struct VkBufferMemoryBarrier { + VkStructureType sType; + const void* pNext; + VkAccessFlags srcAccessMask; + VkAccessFlags dstAccessMask; + uint32_t srcQueueFamilyIndex; + uint32_t dstQueueFamilyIndex; + VkBuffer buffer; + VkDeviceSize offset; + VkDeviceSize size; +} VkBufferMemoryBarrier; + +typedef struct VkImageMemoryBarrier { + VkStructureType sType; + const void* pNext; + VkAccessFlags srcAccessMask; + VkAccessFlags dstAccessMask; + VkImageLayout oldLayout; + VkImageLayout newLayout; + uint32_t srcQueueFamilyIndex; + uint32_t dstQueueFamilyIndex; + VkImage image; + VkImageSubresourceRange subresourceRange; +} VkImageMemoryBarrier; + +typedef struct VkRenderPassBeginInfo { + VkStructureType sType; + const void* pNext; + VkRenderPass renderPass; + VkFramebuffer framebuffer; + VkRect2D renderArea; + uint32_t clearValueCount; + const VkClearValue* pClearValues; +} VkRenderPassBeginInfo; + +typedef struct VkDispatchIndirectCommand { + uint32_t x; + uint32_t y; + uint32_t z; +} VkDispatchIndirectCommand; + +typedef struct VkDrawIndexedIndirectCommand { + uint32_t indexCount; + uint32_t instanceCount; + uint32_t firstIndex; + int32_t vertexOffset; + uint32_t firstInstance; +} VkDrawIndexedIndirectCommand; + +typedef struct VkDrawIndirectCommand { + uint32_t vertexCount; + uint32_t instanceCount; + uint32_t firstVertex; + uint32_t firstInstance; +} VkDrawIndirectCommand; + +typedef struct VkBaseOutStructure { + VkStructureType sType; + struct VkBaseOutStructure* pNext; +} VkBaseOutStructure; + +typedef struct VkBaseInStructure { + VkStructureType sType; + const struct VkBaseInStructure* pNext; +} VkBaseInStructure; + + +typedef VkResult (VKAPI_PTR *PFN_vkCreateInstance)(const VkInstanceCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkInstance* pInstance); +typedef void (VKAPI_PTR *PFN_vkDestroyInstance)(VkInstance instance, const VkAllocationCallbacks* pAllocator); +typedef VkResult (VKAPI_PTR *PFN_vkEnumeratePhysicalDevices)(VkInstance instance, uint32_t* pPhysicalDeviceCount, VkPhysicalDevice* pPhysicalDevices); +typedef void (VKAPI_PTR *PFN_vkGetPhysicalDeviceFeatures)(VkPhysicalDevice physicalDevice, VkPhysicalDeviceFeatures* pFeatures); +typedef void (VKAPI_PTR *PFN_vkGetPhysicalDeviceFormatProperties)(VkPhysicalDevice physicalDevice, VkFormat format, VkFormatProperties* pFormatProperties); +typedef VkResult (VKAPI_PTR *PFN_vkGetPhysicalDeviceImageFormatProperties)(VkPhysicalDevice physicalDevice, VkFormat format, VkImageType type, VkImageTiling tiling, VkImageUsageFlags usage, VkImageCreateFlags flags, VkImageFormatProperties* pImageFormatProperties); +typedef void (VKAPI_PTR *PFN_vkGetPhysicalDeviceProperties)(VkPhysicalDevice physicalDevice, VkPhysicalDeviceProperties* pProperties); +typedef void (VKAPI_PTR *PFN_vkGetPhysicalDeviceQueueFamilyProperties)(VkPhysicalDevice physicalDevice, uint32_t* pQueueFamilyPropertyCount, VkQueueFamilyProperties* pQueueFamilyProperties); +typedef void (VKAPI_PTR *PFN_vkGetPhysicalDeviceMemoryProperties)(VkPhysicalDevice physicalDevice, VkPhysicalDeviceMemoryProperties* pMemoryProperties); +typedef PFN_vkVoidFunction (VKAPI_PTR *PFN_vkGetInstanceProcAddr)(VkInstance instance, const char* pName); +typedef PFN_vkVoidFunction (VKAPI_PTR *PFN_vkGetDeviceProcAddr)(VkDevice device, const char* pName); +typedef VkResult (VKAPI_PTR *PFN_vkCreateDevice)(VkPhysicalDevice physicalDevice, const VkDeviceCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkDevice* pDevice); +typedef void (VKAPI_PTR *PFN_vkDestroyDevice)(VkDevice device, const VkAllocationCallbacks* pAllocator); +typedef VkResult (VKAPI_PTR *PFN_vkEnumerateInstanceExtensionProperties)(const char* pLayerName, uint32_t* pPropertyCount, VkExtensionProperties* pProperties); +typedef VkResult (VKAPI_PTR *PFN_vkEnumerateDeviceExtensionProperties)(VkPhysicalDevice physicalDevice, const char* pLayerName, uint32_t* pPropertyCount, VkExtensionProperties* pProperties); +typedef VkResult (VKAPI_PTR *PFN_vkEnumerateInstanceLayerProperties)(uint32_t* pPropertyCount, VkLayerProperties* pProperties); +typedef VkResult (VKAPI_PTR *PFN_vkEnumerateDeviceLayerProperties)(VkPhysicalDevice physicalDevice, uint32_t* pPropertyCount, VkLayerProperties* pProperties); +typedef void (VKAPI_PTR *PFN_vkGetDeviceQueue)(VkDevice device, uint32_t queueFamilyIndex, uint32_t queueIndex, VkQueue* pQueue); +typedef VkResult (VKAPI_PTR *PFN_vkQueueSubmit)(VkQueue queue, uint32_t submitCount, const VkSubmitInfo* pSubmits, VkFence fence); +typedef VkResult (VKAPI_PTR *PFN_vkQueueWaitIdle)(VkQueue queue); +typedef VkResult (VKAPI_PTR *PFN_vkDeviceWaitIdle)(VkDevice device); +typedef VkResult (VKAPI_PTR *PFN_vkAllocateMemory)(VkDevice device, const VkMemoryAllocateInfo* pAllocateInfo, const VkAllocationCallbacks* pAllocator, VkDeviceMemory* pMemory); +typedef void (VKAPI_PTR *PFN_vkFreeMemory)(VkDevice device, VkDeviceMemory memory, const VkAllocationCallbacks* pAllocator); +typedef VkResult (VKAPI_PTR *PFN_vkMapMemory)(VkDevice device, VkDeviceMemory memory, VkDeviceSize offset, VkDeviceSize size, VkMemoryMapFlags flags, void** ppData); +typedef void (VKAPI_PTR *PFN_vkUnmapMemory)(VkDevice device, VkDeviceMemory memory); +typedef VkResult (VKAPI_PTR *PFN_vkFlushMappedMemoryRanges)(VkDevice device, uint32_t memoryRangeCount, const VkMappedMemoryRange* pMemoryRanges); +typedef VkResult (VKAPI_PTR *PFN_vkInvalidateMappedMemoryRanges)(VkDevice device, uint32_t memoryRangeCount, const VkMappedMemoryRange* pMemoryRanges); +typedef void (VKAPI_PTR *PFN_vkGetDeviceMemoryCommitment)(VkDevice device, VkDeviceMemory memory, VkDeviceSize* pCommittedMemoryInBytes); +typedef VkResult (VKAPI_PTR *PFN_vkBindBufferMemory)(VkDevice device, VkBuffer buffer, VkDeviceMemory memory, VkDeviceSize memoryOffset); +typedef VkResult (VKAPI_PTR *PFN_vkBindImageMemory)(VkDevice device, VkImage image, VkDeviceMemory memory, VkDeviceSize memoryOffset); +typedef void (VKAPI_PTR *PFN_vkGetBufferMemoryRequirements)(VkDevice device, VkBuffer buffer, VkMemoryRequirements* pMemoryRequirements); +typedef void (VKAPI_PTR *PFN_vkGetImageMemoryRequirements)(VkDevice device, VkImage image, VkMemoryRequirements* pMemoryRequirements); +typedef void (VKAPI_PTR *PFN_vkGetImageSparseMemoryRequirements)(VkDevice device, VkImage image, uint32_t* pSparseMemoryRequirementCount, VkSparseImageMemoryRequirements* pSparseMemoryRequirements); +typedef void (VKAPI_PTR *PFN_vkGetPhysicalDeviceSparseImageFormatProperties)(VkPhysicalDevice physicalDevice, VkFormat format, VkImageType type, VkSampleCountFlagBits samples, VkImageUsageFlags usage, VkImageTiling tiling, uint32_t* pPropertyCount, VkSparseImageFormatProperties* pProperties); +typedef VkResult (VKAPI_PTR *PFN_vkQueueBindSparse)(VkQueue queue, uint32_t bindInfoCount, const VkBindSparseInfo* pBindInfo, VkFence fence); +typedef VkResult (VKAPI_PTR *PFN_vkCreateFence)(VkDevice device, const VkFenceCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkFence* pFence); +typedef void (VKAPI_PTR *PFN_vkDestroyFence)(VkDevice device, VkFence fence, const VkAllocationCallbacks* pAllocator); +typedef VkResult (VKAPI_PTR *PFN_vkResetFences)(VkDevice device, uint32_t fenceCount, const VkFence* pFences); +typedef VkResult (VKAPI_PTR *PFN_vkGetFenceStatus)(VkDevice device, VkFence fence); +typedef VkResult (VKAPI_PTR *PFN_vkWaitForFences)(VkDevice device, uint32_t fenceCount, const VkFence* pFences, VkBool32 waitAll, uint64_t timeout); +typedef VkResult (VKAPI_PTR *PFN_vkCreateSemaphore)(VkDevice device, const VkSemaphoreCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkSemaphore* pSemaphore); +typedef void (VKAPI_PTR *PFN_vkDestroySemaphore)(VkDevice device, VkSemaphore semaphore, const VkAllocationCallbacks* pAllocator); +typedef VkResult (VKAPI_PTR *PFN_vkCreateEvent)(VkDevice device, const VkEventCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkEvent* pEvent); +typedef void (VKAPI_PTR *PFN_vkDestroyEvent)(VkDevice device, VkEvent event, const VkAllocationCallbacks* pAllocator); +typedef VkResult (VKAPI_PTR *PFN_vkGetEventStatus)(VkDevice device, VkEvent event); +typedef VkResult (VKAPI_PTR *PFN_vkSetEvent)(VkDevice device, VkEvent event); +typedef VkResult (VKAPI_PTR *PFN_vkResetEvent)(VkDevice device, VkEvent event); +typedef VkResult (VKAPI_PTR *PFN_vkCreateQueryPool)(VkDevice device, const VkQueryPoolCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkQueryPool* pQueryPool); +typedef void (VKAPI_PTR *PFN_vkDestroyQueryPool)(VkDevice device, VkQueryPool queryPool, const VkAllocationCallbacks* pAllocator); +typedef VkResult (VKAPI_PTR *PFN_vkGetQueryPoolResults)(VkDevice device, VkQueryPool queryPool, uint32_t firstQuery, uint32_t queryCount, size_t dataSize, void* pData, VkDeviceSize stride, VkQueryResultFlags flags); +typedef VkResult (VKAPI_PTR *PFN_vkCreateBuffer)(VkDevice device, const VkBufferCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkBuffer* pBuffer); +typedef void (VKAPI_PTR *PFN_vkDestroyBuffer)(VkDevice device, VkBuffer buffer, const VkAllocationCallbacks* pAllocator); +typedef VkResult (VKAPI_PTR *PFN_vkCreateBufferView)(VkDevice device, const VkBufferViewCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkBufferView* pView); +typedef void (VKAPI_PTR *PFN_vkDestroyBufferView)(VkDevice device, VkBufferView bufferView, const VkAllocationCallbacks* pAllocator); +typedef VkResult (VKAPI_PTR *PFN_vkCreateImage)(VkDevice device, const VkImageCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkImage* pImage); +typedef void (VKAPI_PTR *PFN_vkDestroyImage)(VkDevice device, VkImage image, const VkAllocationCallbacks* pAllocator); +typedef void (VKAPI_PTR *PFN_vkGetImageSubresourceLayout)(VkDevice device, VkImage image, const VkImageSubresource* pSubresource, VkSubresourceLayout* pLayout); +typedef VkResult (VKAPI_PTR *PFN_vkCreateImageView)(VkDevice device, const VkImageViewCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkImageView* pView); +typedef void (VKAPI_PTR *PFN_vkDestroyImageView)(VkDevice device, VkImageView imageView, const VkAllocationCallbacks* pAllocator); +typedef VkResult (VKAPI_PTR *PFN_vkCreateShaderModule)(VkDevice device, const VkShaderModuleCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkShaderModule* pShaderModule); +typedef void (VKAPI_PTR *PFN_vkDestroyShaderModule)(VkDevice device, VkShaderModule shaderModule, const VkAllocationCallbacks* pAllocator); +typedef VkResult (VKAPI_PTR *PFN_vkCreatePipelineCache)(VkDevice device, const VkPipelineCacheCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkPipelineCache* pPipelineCache); +typedef void (VKAPI_PTR *PFN_vkDestroyPipelineCache)(VkDevice device, VkPipelineCache pipelineCache, const VkAllocationCallbacks* pAllocator); +typedef VkResult (VKAPI_PTR *PFN_vkGetPipelineCacheData)(VkDevice device, VkPipelineCache pipelineCache, size_t* pDataSize, void* pData); +typedef VkResult (VKAPI_PTR *PFN_vkMergePipelineCaches)(VkDevice device, VkPipelineCache dstCache, uint32_t srcCacheCount, const VkPipelineCache* pSrcCaches); +typedef VkResult (VKAPI_PTR *PFN_vkCreateGraphicsPipelines)(VkDevice device, VkPipelineCache pipelineCache, uint32_t createInfoCount, const VkGraphicsPipelineCreateInfo* pCreateInfos, const VkAllocationCallbacks* pAllocator, VkPipeline* pPipelines); +typedef VkResult (VKAPI_PTR *PFN_vkCreateComputePipelines)(VkDevice device, VkPipelineCache pipelineCache, uint32_t createInfoCount, const VkComputePipelineCreateInfo* pCreateInfos, const VkAllocationCallbacks* pAllocator, VkPipeline* pPipelines); +typedef void (VKAPI_PTR *PFN_vkDestroyPipeline)(VkDevice device, VkPipeline pipeline, const VkAllocationCallbacks* pAllocator); +typedef VkResult (VKAPI_PTR *PFN_vkCreatePipelineLayout)(VkDevice device, const VkPipelineLayoutCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkPipelineLayout* pPipelineLayout); +typedef void (VKAPI_PTR *PFN_vkDestroyPipelineLayout)(VkDevice device, VkPipelineLayout pipelineLayout, const VkAllocationCallbacks* pAllocator); +typedef VkResult (VKAPI_PTR *PFN_vkCreateSampler)(VkDevice device, const VkSamplerCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkSampler* pSampler); +typedef void (VKAPI_PTR *PFN_vkDestroySampler)(VkDevice device, VkSampler sampler, const VkAllocationCallbacks* pAllocator); +typedef VkResult (VKAPI_PTR *PFN_vkCreateDescriptorSetLayout)(VkDevice device, const VkDescriptorSetLayoutCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkDescriptorSetLayout* pSetLayout); +typedef void (VKAPI_PTR *PFN_vkDestroyDescriptorSetLayout)(VkDevice device, VkDescriptorSetLayout descriptorSetLayout, const VkAllocationCallbacks* pAllocator); +typedef VkResult (VKAPI_PTR *PFN_vkCreateDescriptorPool)(VkDevice device, const VkDescriptorPoolCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkDescriptorPool* pDescriptorPool); +typedef void (VKAPI_PTR *PFN_vkDestroyDescriptorPool)(VkDevice device, VkDescriptorPool descriptorPool, const VkAllocationCallbacks* pAllocator); +typedef VkResult (VKAPI_PTR *PFN_vkResetDescriptorPool)(VkDevice device, VkDescriptorPool descriptorPool, VkDescriptorPoolResetFlags flags); +typedef VkResult (VKAPI_PTR *PFN_vkAllocateDescriptorSets)(VkDevice device, const VkDescriptorSetAllocateInfo* pAllocateInfo, VkDescriptorSet* pDescriptorSets); +typedef VkResult (VKAPI_PTR *PFN_vkFreeDescriptorSets)(VkDevice device, VkDescriptorPool descriptorPool, uint32_t descriptorSetCount, const VkDescriptorSet* pDescriptorSets); +typedef void (VKAPI_PTR *PFN_vkUpdateDescriptorSets)(VkDevice device, uint32_t descriptorWriteCount, const VkWriteDescriptorSet* pDescriptorWrites, uint32_t descriptorCopyCount, const VkCopyDescriptorSet* pDescriptorCopies); +typedef VkResult (VKAPI_PTR *PFN_vkCreateFramebuffer)(VkDevice device, const VkFramebufferCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkFramebuffer* pFramebuffer); +typedef void (VKAPI_PTR *PFN_vkDestroyFramebuffer)(VkDevice device, VkFramebuffer framebuffer, const VkAllocationCallbacks* pAllocator); +typedef VkResult (VKAPI_PTR *PFN_vkCreateRenderPass)(VkDevice device, const VkRenderPassCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkRenderPass* pRenderPass); +typedef void (VKAPI_PTR *PFN_vkDestroyRenderPass)(VkDevice device, VkRenderPass renderPass, const VkAllocationCallbacks* pAllocator); +typedef void (VKAPI_PTR *PFN_vkGetRenderAreaGranularity)(VkDevice device, VkRenderPass renderPass, VkExtent2D* pGranularity); +typedef VkResult (VKAPI_PTR *PFN_vkCreateCommandPool)(VkDevice device, const VkCommandPoolCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkCommandPool* pCommandPool); +typedef void (VKAPI_PTR *PFN_vkDestroyCommandPool)(VkDevice device, VkCommandPool commandPool, const VkAllocationCallbacks* pAllocator); +typedef VkResult (VKAPI_PTR *PFN_vkResetCommandPool)(VkDevice device, VkCommandPool commandPool, VkCommandPoolResetFlags flags); +typedef VkResult (VKAPI_PTR *PFN_vkAllocateCommandBuffers)(VkDevice device, const VkCommandBufferAllocateInfo* pAllocateInfo, VkCommandBuffer* pCommandBuffers); +typedef void (VKAPI_PTR *PFN_vkFreeCommandBuffers)(VkDevice device, VkCommandPool commandPool, uint32_t commandBufferCount, const VkCommandBuffer* pCommandBuffers); +typedef VkResult (VKAPI_PTR *PFN_vkBeginCommandBuffer)(VkCommandBuffer commandBuffer, const VkCommandBufferBeginInfo* pBeginInfo); +typedef VkResult (VKAPI_PTR *PFN_vkEndCommandBuffer)(VkCommandBuffer commandBuffer); +typedef VkResult (VKAPI_PTR *PFN_vkResetCommandBuffer)(VkCommandBuffer commandBuffer, VkCommandBufferResetFlags flags); +typedef void (VKAPI_PTR *PFN_vkCmdBindPipeline)(VkCommandBuffer commandBuffer, VkPipelineBindPoint pipelineBindPoint, VkPipeline pipeline); +typedef void (VKAPI_PTR *PFN_vkCmdSetViewport)(VkCommandBuffer commandBuffer, uint32_t firstViewport, uint32_t viewportCount, const VkViewport* pViewports); +typedef void (VKAPI_PTR *PFN_vkCmdSetScissor)(VkCommandBuffer commandBuffer, uint32_t firstScissor, uint32_t scissorCount, const VkRect2D* pScissors); +typedef void (VKAPI_PTR *PFN_vkCmdSetLineWidth)(VkCommandBuffer commandBuffer, float lineWidth); +typedef void (VKAPI_PTR *PFN_vkCmdSetDepthBias)(VkCommandBuffer commandBuffer, float depthBiasConstantFactor, float depthBiasClamp, float depthBiasSlopeFactor); +typedef void (VKAPI_PTR *PFN_vkCmdSetBlendConstants)(VkCommandBuffer commandBuffer, const float blendConstants[4]); +typedef void (VKAPI_PTR *PFN_vkCmdSetDepthBounds)(VkCommandBuffer commandBuffer, float minDepthBounds, float maxDepthBounds); +typedef void (VKAPI_PTR *PFN_vkCmdSetStencilCompareMask)(VkCommandBuffer commandBuffer, VkStencilFaceFlags faceMask, uint32_t compareMask); +typedef void (VKAPI_PTR *PFN_vkCmdSetStencilWriteMask)(VkCommandBuffer commandBuffer, VkStencilFaceFlags faceMask, uint32_t writeMask); +typedef void (VKAPI_PTR *PFN_vkCmdSetStencilReference)(VkCommandBuffer commandBuffer, VkStencilFaceFlags faceMask, uint32_t reference); +typedef void (VKAPI_PTR *PFN_vkCmdBindDescriptorSets)(VkCommandBuffer commandBuffer, VkPipelineBindPoint pipelineBindPoint, VkPipelineLayout layout, uint32_t firstSet, uint32_t descriptorSetCount, const VkDescriptorSet* pDescriptorSets, uint32_t dynamicOffsetCount, const uint32_t* pDynamicOffsets); +typedef void (VKAPI_PTR *PFN_vkCmdBindIndexBuffer)(VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset, VkIndexType indexType); +typedef void (VKAPI_PTR *PFN_vkCmdBindVertexBuffers)(VkCommandBuffer commandBuffer, uint32_t firstBinding, uint32_t bindingCount, const VkBuffer* pBuffers, const VkDeviceSize* pOffsets); +typedef void (VKAPI_PTR *PFN_vkCmdDraw)(VkCommandBuffer commandBuffer, uint32_t vertexCount, uint32_t instanceCount, uint32_t firstVertex, uint32_t firstInstance); +typedef void (VKAPI_PTR *PFN_vkCmdDrawIndexed)(VkCommandBuffer commandBuffer, uint32_t indexCount, uint32_t instanceCount, uint32_t firstIndex, int32_t vertexOffset, uint32_t firstInstance); +typedef void (VKAPI_PTR *PFN_vkCmdDrawIndirect)(VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset, uint32_t drawCount, uint32_t stride); +typedef void (VKAPI_PTR *PFN_vkCmdDrawIndexedIndirect)(VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset, uint32_t drawCount, uint32_t stride); +typedef void (VKAPI_PTR *PFN_vkCmdDispatch)(VkCommandBuffer commandBuffer, uint32_t groupCountX, uint32_t groupCountY, uint32_t groupCountZ); +typedef void (VKAPI_PTR *PFN_vkCmdDispatchIndirect)(VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset); +typedef void (VKAPI_PTR *PFN_vkCmdCopyBuffer)(VkCommandBuffer commandBuffer, VkBuffer srcBuffer, VkBuffer dstBuffer, uint32_t regionCount, const VkBufferCopy* pRegions); +typedef void (VKAPI_PTR *PFN_vkCmdCopyImage)(VkCommandBuffer commandBuffer, VkImage srcImage, VkImageLayout srcImageLayout, VkImage dstImage, VkImageLayout dstImageLayout, uint32_t regionCount, const VkImageCopy* pRegions); +typedef void (VKAPI_PTR *PFN_vkCmdBlitImage)(VkCommandBuffer commandBuffer, VkImage srcImage, VkImageLayout srcImageLayout, VkImage dstImage, VkImageLayout dstImageLayout, uint32_t regionCount, const VkImageBlit* pRegions, VkFilter filter); +typedef void (VKAPI_PTR *PFN_vkCmdCopyBufferToImage)(VkCommandBuffer commandBuffer, VkBuffer srcBuffer, VkImage dstImage, VkImageLayout dstImageLayout, uint32_t regionCount, const VkBufferImageCopy* pRegions); +typedef void (VKAPI_PTR *PFN_vkCmdCopyImageToBuffer)(VkCommandBuffer commandBuffer, VkImage srcImage, VkImageLayout srcImageLayout, VkBuffer dstBuffer, uint32_t regionCount, const VkBufferImageCopy* pRegions); +typedef void (VKAPI_PTR *PFN_vkCmdUpdateBuffer)(VkCommandBuffer commandBuffer, VkBuffer dstBuffer, VkDeviceSize dstOffset, VkDeviceSize dataSize, const void* pData); +typedef void (VKAPI_PTR *PFN_vkCmdFillBuffer)(VkCommandBuffer commandBuffer, VkBuffer dstBuffer, VkDeviceSize dstOffset, VkDeviceSize size, uint32_t data); +typedef void (VKAPI_PTR *PFN_vkCmdClearColorImage)(VkCommandBuffer commandBuffer, VkImage image, VkImageLayout imageLayout, const VkClearColorValue* pColor, uint32_t rangeCount, const VkImageSubresourceRange* pRanges); +typedef void (VKAPI_PTR *PFN_vkCmdClearDepthStencilImage)(VkCommandBuffer commandBuffer, VkImage image, VkImageLayout imageLayout, const VkClearDepthStencilValue* pDepthStencil, uint32_t rangeCount, const VkImageSubresourceRange* pRanges); +typedef void (VKAPI_PTR *PFN_vkCmdClearAttachments)(VkCommandBuffer commandBuffer, uint32_t attachmentCount, const VkClearAttachment* pAttachments, uint32_t rectCount, const VkClearRect* pRects); +typedef void (VKAPI_PTR *PFN_vkCmdResolveImage)(VkCommandBuffer commandBuffer, VkImage srcImage, VkImageLayout srcImageLayout, VkImage dstImage, VkImageLayout dstImageLayout, uint32_t regionCount, const VkImageResolve* pRegions); +typedef void (VKAPI_PTR *PFN_vkCmdSetEvent)(VkCommandBuffer commandBuffer, VkEvent event, VkPipelineStageFlags stageMask); +typedef void (VKAPI_PTR *PFN_vkCmdResetEvent)(VkCommandBuffer commandBuffer, VkEvent event, VkPipelineStageFlags stageMask); +typedef void (VKAPI_PTR *PFN_vkCmdWaitEvents)(VkCommandBuffer commandBuffer, uint32_t eventCount, const VkEvent* pEvents, VkPipelineStageFlags srcStageMask, VkPipelineStageFlags dstStageMask, uint32_t memoryBarrierCount, const VkMemoryBarrier* pMemoryBarriers, uint32_t bufferMemoryBarrierCount, const VkBufferMemoryBarrier* pBufferMemoryBarriers, uint32_t imageMemoryBarrierCount, const VkImageMemoryBarrier* pImageMemoryBarriers); +typedef void (VKAPI_PTR *PFN_vkCmdPipelineBarrier)(VkCommandBuffer commandBuffer, VkPipelineStageFlags srcStageMask, VkPipelineStageFlags dstStageMask, VkDependencyFlags dependencyFlags, uint32_t memoryBarrierCount, const VkMemoryBarrier* pMemoryBarriers, uint32_t bufferMemoryBarrierCount, const VkBufferMemoryBarrier* pBufferMemoryBarriers, uint32_t imageMemoryBarrierCount, const VkImageMemoryBarrier* pImageMemoryBarriers); +typedef void (VKAPI_PTR *PFN_vkCmdBeginQuery)(VkCommandBuffer commandBuffer, VkQueryPool queryPool, uint32_t query, VkQueryControlFlags flags); +typedef void (VKAPI_PTR *PFN_vkCmdEndQuery)(VkCommandBuffer commandBuffer, VkQueryPool queryPool, uint32_t query); +typedef void (VKAPI_PTR *PFN_vkCmdResetQueryPool)(VkCommandBuffer commandBuffer, VkQueryPool queryPool, uint32_t firstQuery, uint32_t queryCount); +typedef void (VKAPI_PTR *PFN_vkCmdWriteTimestamp)(VkCommandBuffer commandBuffer, VkPipelineStageFlagBits pipelineStage, VkQueryPool queryPool, uint32_t query); +typedef void (VKAPI_PTR *PFN_vkCmdCopyQueryPoolResults)(VkCommandBuffer commandBuffer, VkQueryPool queryPool, uint32_t firstQuery, uint32_t queryCount, VkBuffer dstBuffer, VkDeviceSize dstOffset, VkDeviceSize stride, VkQueryResultFlags flags); +typedef void (VKAPI_PTR *PFN_vkCmdPushConstants)(VkCommandBuffer commandBuffer, VkPipelineLayout layout, VkShaderStageFlags stageFlags, uint32_t offset, uint32_t size, const void* pValues); +typedef void (VKAPI_PTR *PFN_vkCmdBeginRenderPass)(VkCommandBuffer commandBuffer, const VkRenderPassBeginInfo* pRenderPassBegin, VkSubpassContents contents); +typedef void (VKAPI_PTR *PFN_vkCmdNextSubpass)(VkCommandBuffer commandBuffer, VkSubpassContents contents); +typedef void (VKAPI_PTR *PFN_vkCmdEndRenderPass)(VkCommandBuffer commandBuffer); +typedef void (VKAPI_PTR *PFN_vkCmdExecuteCommands)(VkCommandBuffer commandBuffer, uint32_t commandBufferCount, const VkCommandBuffer* pCommandBuffers); + +#ifndef VK_NO_PROTOTYPES +VKAPI_ATTR VkResult VKAPI_CALL vkCreateInstance( + const VkInstanceCreateInfo* pCreateInfo, + const VkAllocationCallbacks* pAllocator, + VkInstance* pInstance); + +VKAPI_ATTR void VKAPI_CALL vkDestroyInstance( + VkInstance instance, + const VkAllocationCallbacks* pAllocator); + +VKAPI_ATTR VkResult VKAPI_CALL vkEnumeratePhysicalDevices( + VkInstance instance, + uint32_t* pPhysicalDeviceCount, + VkPhysicalDevice* pPhysicalDevices); + +VKAPI_ATTR void VKAPI_CALL vkGetPhysicalDeviceFeatures( + VkPhysicalDevice physicalDevice, + VkPhysicalDeviceFeatures* pFeatures); + +VKAPI_ATTR void VKAPI_CALL vkGetPhysicalDeviceFormatProperties( + VkPhysicalDevice physicalDevice, + VkFormat format, + VkFormatProperties* pFormatProperties); + +VKAPI_ATTR VkResult VKAPI_CALL vkGetPhysicalDeviceImageFormatProperties( + VkPhysicalDevice physicalDevice, + VkFormat format, + VkImageType type, + VkImageTiling tiling, + VkImageUsageFlags usage, + VkImageCreateFlags flags, + VkImageFormatProperties* pImageFormatProperties); + +VKAPI_ATTR void VKAPI_CALL vkGetPhysicalDeviceProperties( + VkPhysicalDevice physicalDevice, + VkPhysicalDeviceProperties* pProperties); + +VKAPI_ATTR void VKAPI_CALL vkGetPhysicalDeviceQueueFamilyProperties( + VkPhysicalDevice physicalDevice, + uint32_t* pQueueFamilyPropertyCount, + VkQueueFamilyProperties* pQueueFamilyProperties); + +VKAPI_ATTR void VKAPI_CALL vkGetPhysicalDeviceMemoryProperties( + VkPhysicalDevice physicalDevice, + VkPhysicalDeviceMemoryProperties* pMemoryProperties); + +VKAPI_ATTR PFN_vkVoidFunction VKAPI_CALL vkGetInstanceProcAddr( + VkInstance instance, + const char* pName); + +VKAPI_ATTR PFN_vkVoidFunction VKAPI_CALL vkGetDeviceProcAddr( + VkDevice device, + const char* pName); + +VKAPI_ATTR VkResult VKAPI_CALL vkCreateDevice( + VkPhysicalDevice physicalDevice, + const VkDeviceCreateInfo* pCreateInfo, + const VkAllocationCallbacks* pAllocator, + VkDevice* pDevice); + +VKAPI_ATTR void VKAPI_CALL vkDestroyDevice( + VkDevice device, + const VkAllocationCallbacks* pAllocator); + +VKAPI_ATTR VkResult VKAPI_CALL vkEnumerateInstanceExtensionProperties( + const char* pLayerName, + uint32_t* pPropertyCount, + VkExtensionProperties* pProperties); + +VKAPI_ATTR VkResult VKAPI_CALL vkEnumerateDeviceExtensionProperties( + VkPhysicalDevice physicalDevice, + const char* pLayerName, + uint32_t* pPropertyCount, + VkExtensionProperties* pProperties); + +VKAPI_ATTR VkResult VKAPI_CALL vkEnumerateInstanceLayerProperties( + uint32_t* pPropertyCount, + VkLayerProperties* pProperties); + +VKAPI_ATTR VkResult VKAPI_CALL vkEnumerateDeviceLayerProperties( + VkPhysicalDevice physicalDevice, + uint32_t* pPropertyCount, + VkLayerProperties* pProperties); + +VKAPI_ATTR void VKAPI_CALL vkGetDeviceQueue( + VkDevice device, + uint32_t queueFamilyIndex, + uint32_t queueIndex, + VkQueue* pQueue); + +VKAPI_ATTR VkResult VKAPI_CALL vkQueueSubmit( + VkQueue queue, + uint32_t submitCount, + const VkSubmitInfo* pSubmits, + VkFence fence); + +VKAPI_ATTR VkResult VKAPI_CALL vkQueueWaitIdle( + VkQueue queue); + +VKAPI_ATTR VkResult VKAPI_CALL vkDeviceWaitIdle( + VkDevice device); + +VKAPI_ATTR VkResult VKAPI_CALL vkAllocateMemory( + VkDevice device, + const VkMemoryAllocateInfo* pAllocateInfo, + const VkAllocationCallbacks* pAllocator, + VkDeviceMemory* pMemory); + +VKAPI_ATTR void VKAPI_CALL vkFreeMemory( + VkDevice device, + VkDeviceMemory memory, + const VkAllocationCallbacks* pAllocator); + +VKAPI_ATTR VkResult VKAPI_CALL vkMapMemory( + VkDevice device, + VkDeviceMemory memory, + VkDeviceSize offset, + VkDeviceSize size, + VkMemoryMapFlags flags, + void** ppData); + +VKAPI_ATTR void VKAPI_CALL vkUnmapMemory( + VkDevice device, + VkDeviceMemory memory); + +VKAPI_ATTR VkResult VKAPI_CALL vkFlushMappedMemoryRanges( + VkDevice device, + uint32_t memoryRangeCount, + const VkMappedMemoryRange* pMemoryRanges); + +VKAPI_ATTR VkResult VKAPI_CALL vkInvalidateMappedMemoryRanges( + VkDevice device, + uint32_t memoryRangeCount, + const VkMappedMemoryRange* pMemoryRanges); + +VKAPI_ATTR void VKAPI_CALL vkGetDeviceMemoryCommitment( + VkDevice device, + VkDeviceMemory memory, + VkDeviceSize* pCommittedMemoryInBytes); + +VKAPI_ATTR VkResult VKAPI_CALL vkBindBufferMemory( + VkDevice device, + VkBuffer buffer, + VkDeviceMemory memory, + VkDeviceSize memoryOffset); + +VKAPI_ATTR VkResult VKAPI_CALL vkBindImageMemory( + VkDevice device, + VkImage image, + VkDeviceMemory memory, + VkDeviceSize memoryOffset); + +VKAPI_ATTR void VKAPI_CALL vkGetBufferMemoryRequirements( + VkDevice device, + VkBuffer buffer, + VkMemoryRequirements* pMemoryRequirements); + +VKAPI_ATTR void VKAPI_CALL vkGetImageMemoryRequirements( + VkDevice device, + VkImage image, + VkMemoryRequirements* pMemoryRequirements); + +VKAPI_ATTR void VKAPI_CALL vkGetImageSparseMemoryRequirements( + VkDevice device, + VkImage image, + uint32_t* pSparseMemoryRequirementCount, + VkSparseImageMemoryRequirements* pSparseMemoryRequirements); + +VKAPI_ATTR void VKAPI_CALL vkGetPhysicalDeviceSparseImageFormatProperties( + VkPhysicalDevice physicalDevice, + VkFormat format, + VkImageType type, + VkSampleCountFlagBits samples, + VkImageUsageFlags usage, + VkImageTiling tiling, + uint32_t* pPropertyCount, + VkSparseImageFormatProperties* pProperties); + +VKAPI_ATTR VkResult VKAPI_CALL vkQueueBindSparse( + VkQueue queue, + uint32_t bindInfoCount, + const VkBindSparseInfo* pBindInfo, + VkFence fence); + +VKAPI_ATTR VkResult VKAPI_CALL vkCreateFence( + VkDevice device, + const VkFenceCreateInfo* pCreateInfo, + const VkAllocationCallbacks* pAllocator, + VkFence* pFence); + +VKAPI_ATTR void VKAPI_CALL vkDestroyFence( + VkDevice device, + VkFence fence, + const VkAllocationCallbacks* pAllocator); + +VKAPI_ATTR VkResult VKAPI_CALL vkResetFences( + VkDevice device, + uint32_t fenceCount, + const VkFence* pFences); + +VKAPI_ATTR VkResult VKAPI_CALL vkGetFenceStatus( + VkDevice device, + VkFence fence); + +VKAPI_ATTR VkResult VKAPI_CALL vkWaitForFences( + VkDevice device, + uint32_t fenceCount, + const VkFence* pFences, + VkBool32 waitAll, + uint64_t timeout); + +VKAPI_ATTR VkResult VKAPI_CALL vkCreateSemaphore( + VkDevice device, + const VkSemaphoreCreateInfo* pCreateInfo, + const VkAllocationCallbacks* pAllocator, + VkSemaphore* pSemaphore); + +VKAPI_ATTR void VKAPI_CALL vkDestroySemaphore( + VkDevice device, + VkSemaphore semaphore, + const VkAllocationCallbacks* pAllocator); + +VKAPI_ATTR VkResult VKAPI_CALL vkCreateEvent( + VkDevice device, + const VkEventCreateInfo* pCreateInfo, + const VkAllocationCallbacks* pAllocator, + VkEvent* pEvent); + +VKAPI_ATTR void VKAPI_CALL vkDestroyEvent( + VkDevice device, + VkEvent event, + const VkAllocationCallbacks* pAllocator); + +VKAPI_ATTR VkResult VKAPI_CALL vkGetEventStatus( + VkDevice device, + VkEvent event); + +VKAPI_ATTR VkResult VKAPI_CALL vkSetEvent( + VkDevice device, + VkEvent event); + +VKAPI_ATTR VkResult VKAPI_CALL vkResetEvent( + VkDevice device, + VkEvent event); + +VKAPI_ATTR VkResult VKAPI_CALL vkCreateQueryPool( + VkDevice device, + const VkQueryPoolCreateInfo* pCreateInfo, + const VkAllocationCallbacks* pAllocator, + VkQueryPool* pQueryPool); + +VKAPI_ATTR void VKAPI_CALL vkDestroyQueryPool( + VkDevice device, + VkQueryPool queryPool, + const VkAllocationCallbacks* pAllocator); + +VKAPI_ATTR VkResult VKAPI_CALL vkGetQueryPoolResults( + VkDevice device, + VkQueryPool queryPool, + uint32_t firstQuery, + uint32_t queryCount, + size_t dataSize, + void* pData, + VkDeviceSize stride, + VkQueryResultFlags flags); + +VKAPI_ATTR VkResult VKAPI_CALL vkCreateBuffer( + VkDevice device, + const VkBufferCreateInfo* pCreateInfo, + const VkAllocationCallbacks* pAllocator, + VkBuffer* pBuffer); + +VKAPI_ATTR void VKAPI_CALL vkDestroyBuffer( + VkDevice device, + VkBuffer buffer, + const VkAllocationCallbacks* pAllocator); + +VKAPI_ATTR VkResult VKAPI_CALL vkCreateBufferView( + VkDevice device, + const VkBufferViewCreateInfo* pCreateInfo, + const VkAllocationCallbacks* pAllocator, + VkBufferView* pView); + +VKAPI_ATTR void VKAPI_CALL vkDestroyBufferView( + VkDevice device, + VkBufferView bufferView, + const VkAllocationCallbacks* pAllocator); + +VKAPI_ATTR VkResult VKAPI_CALL vkCreateImage( + VkDevice device, + const VkImageCreateInfo* pCreateInfo, + const VkAllocationCallbacks* pAllocator, + VkImage* pImage); + +VKAPI_ATTR void VKAPI_CALL vkDestroyImage( + VkDevice device, + VkImage image, + const VkAllocationCallbacks* pAllocator); + +VKAPI_ATTR void VKAPI_CALL vkGetImageSubresourceLayout( + VkDevice device, + VkImage image, + const VkImageSubresource* pSubresource, + VkSubresourceLayout* pLayout); + +VKAPI_ATTR VkResult VKAPI_CALL vkCreateImageView( + VkDevice device, + const VkImageViewCreateInfo* pCreateInfo, + const VkAllocationCallbacks* pAllocator, + VkImageView* pView); + +VKAPI_ATTR void VKAPI_CALL vkDestroyImageView( + VkDevice device, + VkImageView imageView, + const VkAllocationCallbacks* pAllocator); + +VKAPI_ATTR VkResult VKAPI_CALL vkCreateShaderModule( + VkDevice device, + const VkShaderModuleCreateInfo* pCreateInfo, + const VkAllocationCallbacks* pAllocator, + VkShaderModule* pShaderModule); + +VKAPI_ATTR void VKAPI_CALL vkDestroyShaderModule( + VkDevice device, + VkShaderModule shaderModule, + const VkAllocationCallbacks* pAllocator); + +VKAPI_ATTR VkResult VKAPI_CALL vkCreatePipelineCache( + VkDevice device, + const VkPipelineCacheCreateInfo* pCreateInfo, + const VkAllocationCallbacks* pAllocator, + VkPipelineCache* pPipelineCache); + +VKAPI_ATTR void VKAPI_CALL vkDestroyPipelineCache( + VkDevice device, + VkPipelineCache pipelineCache, + const VkAllocationCallbacks* pAllocator); + +VKAPI_ATTR VkResult VKAPI_CALL vkGetPipelineCacheData( + VkDevice device, + VkPipelineCache pipelineCache, + size_t* pDataSize, + void* pData); + +VKAPI_ATTR VkResult VKAPI_CALL vkMergePipelineCaches( + VkDevice device, + VkPipelineCache dstCache, + uint32_t srcCacheCount, + const VkPipelineCache* pSrcCaches); + +VKAPI_ATTR VkResult VKAPI_CALL vkCreateGraphicsPipelines( + VkDevice device, + VkPipelineCache pipelineCache, + uint32_t createInfoCount, + const VkGraphicsPipelineCreateInfo* pCreateInfos, + const VkAllocationCallbacks* pAllocator, + VkPipeline* pPipelines); + +VKAPI_ATTR VkResult VKAPI_CALL vkCreateComputePipelines( + VkDevice device, + VkPipelineCache pipelineCache, + uint32_t createInfoCount, + const VkComputePipelineCreateInfo* pCreateInfos, + const VkAllocationCallbacks* pAllocator, + VkPipeline* pPipelines); + +VKAPI_ATTR void VKAPI_CALL vkDestroyPipeline( + VkDevice device, + VkPipeline pipeline, + const VkAllocationCallbacks* pAllocator); + +VKAPI_ATTR VkResult VKAPI_CALL vkCreatePipelineLayout( + VkDevice device, + const VkPipelineLayoutCreateInfo* pCreateInfo, + const VkAllocationCallbacks* pAllocator, + VkPipelineLayout* pPipelineLayout); + +VKAPI_ATTR void VKAPI_CALL vkDestroyPipelineLayout( + VkDevice device, + VkPipelineLayout pipelineLayout, + const VkAllocationCallbacks* pAllocator); + +VKAPI_ATTR VkResult VKAPI_CALL vkCreateSampler( + VkDevice device, + const VkSamplerCreateInfo* pCreateInfo, + const VkAllocationCallbacks* pAllocator, + VkSampler* pSampler); + +VKAPI_ATTR void VKAPI_CALL vkDestroySampler( + VkDevice device, + VkSampler sampler, + const VkAllocationCallbacks* pAllocator); + +VKAPI_ATTR VkResult VKAPI_CALL vkCreateDescriptorSetLayout( + VkDevice device, + const VkDescriptorSetLayoutCreateInfo* pCreateInfo, + const VkAllocationCallbacks* pAllocator, + VkDescriptorSetLayout* pSetLayout); + +VKAPI_ATTR void VKAPI_CALL vkDestroyDescriptorSetLayout( + VkDevice device, + VkDescriptorSetLayout descriptorSetLayout, + const VkAllocationCallbacks* pAllocator); + +VKAPI_ATTR VkResult VKAPI_CALL vkCreateDescriptorPool( + VkDevice device, + const VkDescriptorPoolCreateInfo* pCreateInfo, + const VkAllocationCallbacks* pAllocator, + VkDescriptorPool* pDescriptorPool); + +VKAPI_ATTR void VKAPI_CALL vkDestroyDescriptorPool( + VkDevice device, + VkDescriptorPool descriptorPool, + const VkAllocationCallbacks* pAllocator); + +VKAPI_ATTR VkResult VKAPI_CALL vkResetDescriptorPool( + VkDevice device, + VkDescriptorPool descriptorPool, + VkDescriptorPoolResetFlags flags); + +VKAPI_ATTR VkResult VKAPI_CALL vkAllocateDescriptorSets( + VkDevice device, + const VkDescriptorSetAllocateInfo* pAllocateInfo, + VkDescriptorSet* pDescriptorSets); + +VKAPI_ATTR VkResult VKAPI_CALL vkFreeDescriptorSets( + VkDevice device, + VkDescriptorPool descriptorPool, + uint32_t descriptorSetCount, + const VkDescriptorSet* pDescriptorSets); + +VKAPI_ATTR void VKAPI_CALL vkUpdateDescriptorSets( + VkDevice device, + uint32_t descriptorWriteCount, + const VkWriteDescriptorSet* pDescriptorWrites, + uint32_t descriptorCopyCount, + const VkCopyDescriptorSet* pDescriptorCopies); + +VKAPI_ATTR VkResult VKAPI_CALL vkCreateFramebuffer( + VkDevice device, + const VkFramebufferCreateInfo* pCreateInfo, + const VkAllocationCallbacks* pAllocator, + VkFramebuffer* pFramebuffer); + +VKAPI_ATTR void VKAPI_CALL vkDestroyFramebuffer( + VkDevice device, + VkFramebuffer framebuffer, + const VkAllocationCallbacks* pAllocator); + +VKAPI_ATTR VkResult VKAPI_CALL vkCreateRenderPass( + VkDevice device, + const VkRenderPassCreateInfo* pCreateInfo, + const VkAllocationCallbacks* pAllocator, + VkRenderPass* pRenderPass); + +VKAPI_ATTR void VKAPI_CALL vkDestroyRenderPass( + VkDevice device, + VkRenderPass renderPass, + const VkAllocationCallbacks* pAllocator); + +VKAPI_ATTR void VKAPI_CALL vkGetRenderAreaGranularity( + VkDevice device, + VkRenderPass renderPass, + VkExtent2D* pGranularity); + +VKAPI_ATTR VkResult VKAPI_CALL vkCreateCommandPool( + VkDevice device, + const VkCommandPoolCreateInfo* pCreateInfo, + const VkAllocationCallbacks* pAllocator, + VkCommandPool* pCommandPool); + +VKAPI_ATTR void VKAPI_CALL vkDestroyCommandPool( + VkDevice device, + VkCommandPool commandPool, + const VkAllocationCallbacks* pAllocator); + +VKAPI_ATTR VkResult VKAPI_CALL vkResetCommandPool( + VkDevice device, + VkCommandPool commandPool, + VkCommandPoolResetFlags flags); + +VKAPI_ATTR VkResult VKAPI_CALL vkAllocateCommandBuffers( + VkDevice device, + const VkCommandBufferAllocateInfo* pAllocateInfo, + VkCommandBuffer* pCommandBuffers); + +VKAPI_ATTR void VKAPI_CALL vkFreeCommandBuffers( + VkDevice device, + VkCommandPool commandPool, + uint32_t commandBufferCount, + const VkCommandBuffer* pCommandBuffers); + +VKAPI_ATTR VkResult VKAPI_CALL vkBeginCommandBuffer( + VkCommandBuffer commandBuffer, + const VkCommandBufferBeginInfo* pBeginInfo); + +VKAPI_ATTR VkResult VKAPI_CALL vkEndCommandBuffer( + VkCommandBuffer commandBuffer); + +VKAPI_ATTR VkResult VKAPI_CALL vkResetCommandBuffer( + VkCommandBuffer commandBuffer, + VkCommandBufferResetFlags flags); + +VKAPI_ATTR void VKAPI_CALL vkCmdBindPipeline( + VkCommandBuffer commandBuffer, + VkPipelineBindPoint pipelineBindPoint, + VkPipeline pipeline); + +VKAPI_ATTR void VKAPI_CALL vkCmdSetViewport( + VkCommandBuffer commandBuffer, + uint32_t firstViewport, + uint32_t viewportCount, + const VkViewport* pViewports); + +VKAPI_ATTR void VKAPI_CALL vkCmdSetScissor( + VkCommandBuffer commandBuffer, + uint32_t firstScissor, + uint32_t scissorCount, + const VkRect2D* pScissors); + +VKAPI_ATTR void VKAPI_CALL vkCmdSetLineWidth( + VkCommandBuffer commandBuffer, + float lineWidth); + +VKAPI_ATTR void VKAPI_CALL vkCmdSetDepthBias( + VkCommandBuffer commandBuffer, + float depthBiasConstantFactor, + float depthBiasClamp, + float depthBiasSlopeFactor); + +VKAPI_ATTR void VKAPI_CALL vkCmdSetBlendConstants( + VkCommandBuffer commandBuffer, + const float blendConstants[4]); + +VKAPI_ATTR void VKAPI_CALL vkCmdSetDepthBounds( + VkCommandBuffer commandBuffer, + float minDepthBounds, + float maxDepthBounds); + +VKAPI_ATTR void VKAPI_CALL vkCmdSetStencilCompareMask( + VkCommandBuffer commandBuffer, + VkStencilFaceFlags faceMask, + uint32_t compareMask); + +VKAPI_ATTR void VKAPI_CALL vkCmdSetStencilWriteMask( + VkCommandBuffer commandBuffer, + VkStencilFaceFlags faceMask, + uint32_t writeMask); + +VKAPI_ATTR void VKAPI_CALL vkCmdSetStencilReference( + VkCommandBuffer commandBuffer, + VkStencilFaceFlags faceMask, + uint32_t reference); + +VKAPI_ATTR void VKAPI_CALL vkCmdBindDescriptorSets( + VkCommandBuffer commandBuffer, + VkPipelineBindPoint pipelineBindPoint, + VkPipelineLayout layout, + uint32_t firstSet, + uint32_t descriptorSetCount, + const VkDescriptorSet* pDescriptorSets, + uint32_t dynamicOffsetCount, + const uint32_t* pDynamicOffsets); + +VKAPI_ATTR void VKAPI_CALL vkCmdBindIndexBuffer( + VkCommandBuffer commandBuffer, + VkBuffer buffer, + VkDeviceSize offset, + VkIndexType indexType); + +VKAPI_ATTR void VKAPI_CALL vkCmdBindVertexBuffers( + VkCommandBuffer commandBuffer, + uint32_t firstBinding, + uint32_t bindingCount, + const VkBuffer* pBuffers, + const VkDeviceSize* pOffsets); + +VKAPI_ATTR void VKAPI_CALL vkCmdDraw( + VkCommandBuffer commandBuffer, + uint32_t vertexCount, + uint32_t instanceCount, + uint32_t firstVertex, + uint32_t firstInstance); + +VKAPI_ATTR void VKAPI_CALL vkCmdDrawIndexed( + VkCommandBuffer commandBuffer, + uint32_t indexCount, + uint32_t instanceCount, + uint32_t firstIndex, + int32_t vertexOffset, + uint32_t firstInstance); + +VKAPI_ATTR void VKAPI_CALL vkCmdDrawIndirect( + VkCommandBuffer commandBuffer, + VkBuffer buffer, + VkDeviceSize offset, + uint32_t drawCount, + uint32_t stride); + +VKAPI_ATTR void VKAPI_CALL vkCmdDrawIndexedIndirect( + VkCommandBuffer commandBuffer, + VkBuffer buffer, + VkDeviceSize offset, + uint32_t drawCount, + uint32_t stride); + +VKAPI_ATTR void VKAPI_CALL vkCmdDispatch( + VkCommandBuffer commandBuffer, + uint32_t groupCountX, + uint32_t groupCountY, + uint32_t groupCountZ); + +VKAPI_ATTR void VKAPI_CALL vkCmdDispatchIndirect( + VkCommandBuffer commandBuffer, + VkBuffer buffer, + VkDeviceSize offset); + +VKAPI_ATTR void VKAPI_CALL vkCmdCopyBuffer( + VkCommandBuffer commandBuffer, + VkBuffer srcBuffer, + VkBuffer dstBuffer, + uint32_t regionCount, + const VkBufferCopy* pRegions); + +VKAPI_ATTR void VKAPI_CALL vkCmdCopyImage( + VkCommandBuffer commandBuffer, + VkImage srcImage, + VkImageLayout srcImageLayout, + VkImage dstImage, + VkImageLayout dstImageLayout, + uint32_t regionCount, + const VkImageCopy* pRegions); + +VKAPI_ATTR void VKAPI_CALL vkCmdBlitImage( + VkCommandBuffer commandBuffer, + VkImage srcImage, + VkImageLayout srcImageLayout, + VkImage dstImage, + VkImageLayout dstImageLayout, + uint32_t regionCount, + const VkImageBlit* pRegions, + VkFilter filter); + +VKAPI_ATTR void VKAPI_CALL vkCmdCopyBufferToImage( + VkCommandBuffer commandBuffer, + VkBuffer srcBuffer, + VkImage dstImage, + VkImageLayout dstImageLayout, + uint32_t regionCount, + const VkBufferImageCopy* pRegions); + +VKAPI_ATTR void VKAPI_CALL vkCmdCopyImageToBuffer( + VkCommandBuffer commandBuffer, + VkImage srcImage, + VkImageLayout srcImageLayout, + VkBuffer dstBuffer, + uint32_t regionCount, + const VkBufferImageCopy* pRegions); + +VKAPI_ATTR void VKAPI_CALL vkCmdUpdateBuffer( + VkCommandBuffer commandBuffer, + VkBuffer dstBuffer, + VkDeviceSize dstOffset, + VkDeviceSize dataSize, + const void* pData); + +VKAPI_ATTR void VKAPI_CALL vkCmdFillBuffer( + VkCommandBuffer commandBuffer, + VkBuffer dstBuffer, + VkDeviceSize dstOffset, + VkDeviceSize size, + uint32_t data); + +VKAPI_ATTR void VKAPI_CALL vkCmdClearColorImage( + VkCommandBuffer commandBuffer, + VkImage image, + VkImageLayout imageLayout, + const VkClearColorValue* pColor, + uint32_t rangeCount, + const VkImageSubresourceRange* pRanges); + +VKAPI_ATTR void VKAPI_CALL vkCmdClearDepthStencilImage( + VkCommandBuffer commandBuffer, + VkImage image, + VkImageLayout imageLayout, + const VkClearDepthStencilValue* pDepthStencil, + uint32_t rangeCount, + const VkImageSubresourceRange* pRanges); + +VKAPI_ATTR void VKAPI_CALL vkCmdClearAttachments( + VkCommandBuffer commandBuffer, + uint32_t attachmentCount, + const VkClearAttachment* pAttachments, + uint32_t rectCount, + const VkClearRect* pRects); + +VKAPI_ATTR void VKAPI_CALL vkCmdResolveImage( + VkCommandBuffer commandBuffer, + VkImage srcImage, + VkImageLayout srcImageLayout, + VkImage dstImage, + VkImageLayout dstImageLayout, + uint32_t regionCount, + const VkImageResolve* pRegions); + +VKAPI_ATTR void VKAPI_CALL vkCmdSetEvent( + VkCommandBuffer commandBuffer, + VkEvent event, + VkPipelineStageFlags stageMask); + +VKAPI_ATTR void VKAPI_CALL vkCmdResetEvent( + VkCommandBuffer commandBuffer, + VkEvent event, + VkPipelineStageFlags stageMask); + +VKAPI_ATTR void VKAPI_CALL vkCmdWaitEvents( + VkCommandBuffer commandBuffer, + uint32_t eventCount, + const VkEvent* pEvents, + VkPipelineStageFlags srcStageMask, + VkPipelineStageFlags dstStageMask, + uint32_t memoryBarrierCount, + const VkMemoryBarrier* pMemoryBarriers, + uint32_t bufferMemoryBarrierCount, + const VkBufferMemoryBarrier* pBufferMemoryBarriers, + uint32_t imageMemoryBarrierCount, + const VkImageMemoryBarrier* pImageMemoryBarriers); + +VKAPI_ATTR void VKAPI_CALL vkCmdPipelineBarrier( + VkCommandBuffer commandBuffer, + VkPipelineStageFlags srcStageMask, + VkPipelineStageFlags dstStageMask, + VkDependencyFlags dependencyFlags, + uint32_t memoryBarrierCount, + const VkMemoryBarrier* pMemoryBarriers, + uint32_t bufferMemoryBarrierCount, + const VkBufferMemoryBarrier* pBufferMemoryBarriers, + uint32_t imageMemoryBarrierCount, + const VkImageMemoryBarrier* pImageMemoryBarriers); + +VKAPI_ATTR void VKAPI_CALL vkCmdBeginQuery( + VkCommandBuffer commandBuffer, + VkQueryPool queryPool, + uint32_t query, + VkQueryControlFlags flags); + +VKAPI_ATTR void VKAPI_CALL vkCmdEndQuery( + VkCommandBuffer commandBuffer, + VkQueryPool queryPool, + uint32_t query); + +VKAPI_ATTR void VKAPI_CALL vkCmdResetQueryPool( + VkCommandBuffer commandBuffer, + VkQueryPool queryPool, + uint32_t firstQuery, + uint32_t queryCount); + +VKAPI_ATTR void VKAPI_CALL vkCmdWriteTimestamp( + VkCommandBuffer commandBuffer, + VkPipelineStageFlagBits pipelineStage, + VkQueryPool queryPool, + uint32_t query); + +VKAPI_ATTR void VKAPI_CALL vkCmdCopyQueryPoolResults( + VkCommandBuffer commandBuffer, + VkQueryPool queryPool, + uint32_t firstQuery, + uint32_t queryCount, + VkBuffer dstBuffer, + VkDeviceSize dstOffset, + VkDeviceSize stride, + VkQueryResultFlags flags); + +VKAPI_ATTR void VKAPI_CALL vkCmdPushConstants( + VkCommandBuffer commandBuffer, + VkPipelineLayout layout, + VkShaderStageFlags stageFlags, + uint32_t offset, + uint32_t size, + const void* pValues); + +VKAPI_ATTR void VKAPI_CALL vkCmdBeginRenderPass( + VkCommandBuffer commandBuffer, + const VkRenderPassBeginInfo* pRenderPassBegin, + VkSubpassContents contents); + +VKAPI_ATTR void VKAPI_CALL vkCmdNextSubpass( + VkCommandBuffer commandBuffer, + VkSubpassContents contents); + +VKAPI_ATTR void VKAPI_CALL vkCmdEndRenderPass( + VkCommandBuffer commandBuffer); + +VKAPI_ATTR void VKAPI_CALL vkCmdExecuteCommands( + VkCommandBuffer commandBuffer, + uint32_t commandBufferCount, + const VkCommandBuffer* pCommandBuffers); +#endif + +#define VK_VERSION_1_1 1 +// Vulkan 1.1 version number +#define VK_API_VERSION_1_1 VK_MAKE_VERSION(1, 1, 0)// Patch version should always be set to 0 + + +VK_DEFINE_NON_DISPATCHABLE_HANDLE(VkSamplerYcbcrConversion) +VK_DEFINE_NON_DISPATCHABLE_HANDLE(VkDescriptorUpdateTemplate) + +#define VK_MAX_DEVICE_GROUP_SIZE 32 +#define VK_LUID_SIZE 8 +#define VK_QUEUE_FAMILY_EXTERNAL (~0U-1) + + +typedef enum VkPointClippingBehavior { + VK_POINT_CLIPPING_BEHAVIOR_ALL_CLIP_PLANES = 0, + VK_POINT_CLIPPING_BEHAVIOR_USER_CLIP_PLANES_ONLY = 1, + VK_POINT_CLIPPING_BEHAVIOR_ALL_CLIP_PLANES_KHR = VK_POINT_CLIPPING_BEHAVIOR_ALL_CLIP_PLANES, + VK_POINT_CLIPPING_BEHAVIOR_USER_CLIP_PLANES_ONLY_KHR = VK_POINT_CLIPPING_BEHAVIOR_USER_CLIP_PLANES_ONLY, + VK_POINT_CLIPPING_BEHAVIOR_BEGIN_RANGE = VK_POINT_CLIPPING_BEHAVIOR_ALL_CLIP_PLANES, + VK_POINT_CLIPPING_BEHAVIOR_END_RANGE = VK_POINT_CLIPPING_BEHAVIOR_USER_CLIP_PLANES_ONLY, + VK_POINT_CLIPPING_BEHAVIOR_RANGE_SIZE = (VK_POINT_CLIPPING_BEHAVIOR_USER_CLIP_PLANES_ONLY - VK_POINT_CLIPPING_BEHAVIOR_ALL_CLIP_PLANES + 1), + VK_POINT_CLIPPING_BEHAVIOR_MAX_ENUM = 0x7FFFFFFF +} VkPointClippingBehavior; + +typedef enum VkTessellationDomainOrigin { + VK_TESSELLATION_DOMAIN_ORIGIN_UPPER_LEFT = 0, + VK_TESSELLATION_DOMAIN_ORIGIN_LOWER_LEFT = 1, + VK_TESSELLATION_DOMAIN_ORIGIN_UPPER_LEFT_KHR = VK_TESSELLATION_DOMAIN_ORIGIN_UPPER_LEFT, + VK_TESSELLATION_DOMAIN_ORIGIN_LOWER_LEFT_KHR = VK_TESSELLATION_DOMAIN_ORIGIN_LOWER_LEFT, + VK_TESSELLATION_DOMAIN_ORIGIN_BEGIN_RANGE = VK_TESSELLATION_DOMAIN_ORIGIN_UPPER_LEFT, + VK_TESSELLATION_DOMAIN_ORIGIN_END_RANGE = VK_TESSELLATION_DOMAIN_ORIGIN_LOWER_LEFT, + VK_TESSELLATION_DOMAIN_ORIGIN_RANGE_SIZE = (VK_TESSELLATION_DOMAIN_ORIGIN_LOWER_LEFT - VK_TESSELLATION_DOMAIN_ORIGIN_UPPER_LEFT + 1), + VK_TESSELLATION_DOMAIN_ORIGIN_MAX_ENUM = 0x7FFFFFFF +} VkTessellationDomainOrigin; + +typedef enum VkSamplerYcbcrModelConversion { + VK_SAMPLER_YCBCR_MODEL_CONVERSION_RGB_IDENTITY = 0, + VK_SAMPLER_YCBCR_MODEL_CONVERSION_YCBCR_IDENTITY = 1, + VK_SAMPLER_YCBCR_MODEL_CONVERSION_YCBCR_709 = 2, + VK_SAMPLER_YCBCR_MODEL_CONVERSION_YCBCR_601 = 3, + VK_SAMPLER_YCBCR_MODEL_CONVERSION_YCBCR_2020 = 4, + VK_SAMPLER_YCBCR_MODEL_CONVERSION_RGB_IDENTITY_KHR = VK_SAMPLER_YCBCR_MODEL_CONVERSION_RGB_IDENTITY, + VK_SAMPLER_YCBCR_MODEL_CONVERSION_YCBCR_IDENTITY_KHR = VK_SAMPLER_YCBCR_MODEL_CONVERSION_YCBCR_IDENTITY, + VK_SAMPLER_YCBCR_MODEL_CONVERSION_YCBCR_709_KHR = VK_SAMPLER_YCBCR_MODEL_CONVERSION_YCBCR_709, + VK_SAMPLER_YCBCR_MODEL_CONVERSION_YCBCR_601_KHR = VK_SAMPLER_YCBCR_MODEL_CONVERSION_YCBCR_601, + VK_SAMPLER_YCBCR_MODEL_CONVERSION_YCBCR_2020_KHR = VK_SAMPLER_YCBCR_MODEL_CONVERSION_YCBCR_2020, + VK_SAMPLER_YCBCR_MODEL_CONVERSION_BEGIN_RANGE = VK_SAMPLER_YCBCR_MODEL_CONVERSION_RGB_IDENTITY, + VK_SAMPLER_YCBCR_MODEL_CONVERSION_END_RANGE = VK_SAMPLER_YCBCR_MODEL_CONVERSION_YCBCR_2020, + VK_SAMPLER_YCBCR_MODEL_CONVERSION_RANGE_SIZE = (VK_SAMPLER_YCBCR_MODEL_CONVERSION_YCBCR_2020 - VK_SAMPLER_YCBCR_MODEL_CONVERSION_RGB_IDENTITY + 1), + VK_SAMPLER_YCBCR_MODEL_CONVERSION_MAX_ENUM = 0x7FFFFFFF +} VkSamplerYcbcrModelConversion; + +typedef enum VkSamplerYcbcrRange { + VK_SAMPLER_YCBCR_RANGE_ITU_FULL = 0, + VK_SAMPLER_YCBCR_RANGE_ITU_NARROW = 1, + VK_SAMPLER_YCBCR_RANGE_ITU_FULL_KHR = VK_SAMPLER_YCBCR_RANGE_ITU_FULL, + VK_SAMPLER_YCBCR_RANGE_ITU_NARROW_KHR = VK_SAMPLER_YCBCR_RANGE_ITU_NARROW, + VK_SAMPLER_YCBCR_RANGE_BEGIN_RANGE = VK_SAMPLER_YCBCR_RANGE_ITU_FULL, + VK_SAMPLER_YCBCR_RANGE_END_RANGE = VK_SAMPLER_YCBCR_RANGE_ITU_NARROW, + VK_SAMPLER_YCBCR_RANGE_RANGE_SIZE = (VK_SAMPLER_YCBCR_RANGE_ITU_NARROW - VK_SAMPLER_YCBCR_RANGE_ITU_FULL + 1), + VK_SAMPLER_YCBCR_RANGE_MAX_ENUM = 0x7FFFFFFF +} VkSamplerYcbcrRange; + +typedef enum VkChromaLocation { + VK_CHROMA_LOCATION_COSITED_EVEN = 0, + VK_CHROMA_LOCATION_MIDPOINT = 1, + VK_CHROMA_LOCATION_COSITED_EVEN_KHR = VK_CHROMA_LOCATION_COSITED_EVEN, + VK_CHROMA_LOCATION_MIDPOINT_KHR = VK_CHROMA_LOCATION_MIDPOINT, + VK_CHROMA_LOCATION_BEGIN_RANGE = VK_CHROMA_LOCATION_COSITED_EVEN, + VK_CHROMA_LOCATION_END_RANGE = VK_CHROMA_LOCATION_MIDPOINT, + VK_CHROMA_LOCATION_RANGE_SIZE = (VK_CHROMA_LOCATION_MIDPOINT - VK_CHROMA_LOCATION_COSITED_EVEN + 1), + VK_CHROMA_LOCATION_MAX_ENUM = 0x7FFFFFFF +} VkChromaLocation; + +typedef enum VkDescriptorUpdateTemplateType { + VK_DESCRIPTOR_UPDATE_TEMPLATE_TYPE_DESCRIPTOR_SET = 0, + VK_DESCRIPTOR_UPDATE_TEMPLATE_TYPE_PUSH_DESCRIPTORS_KHR = 1, + VK_DESCRIPTOR_UPDATE_TEMPLATE_TYPE_DESCRIPTOR_SET_KHR = VK_DESCRIPTOR_UPDATE_TEMPLATE_TYPE_DESCRIPTOR_SET, + VK_DESCRIPTOR_UPDATE_TEMPLATE_TYPE_BEGIN_RANGE = VK_DESCRIPTOR_UPDATE_TEMPLATE_TYPE_DESCRIPTOR_SET, + VK_DESCRIPTOR_UPDATE_TEMPLATE_TYPE_END_RANGE = VK_DESCRIPTOR_UPDATE_TEMPLATE_TYPE_DESCRIPTOR_SET, + VK_DESCRIPTOR_UPDATE_TEMPLATE_TYPE_RANGE_SIZE = (VK_DESCRIPTOR_UPDATE_TEMPLATE_TYPE_DESCRIPTOR_SET - VK_DESCRIPTOR_UPDATE_TEMPLATE_TYPE_DESCRIPTOR_SET + 1), + VK_DESCRIPTOR_UPDATE_TEMPLATE_TYPE_MAX_ENUM = 0x7FFFFFFF +} VkDescriptorUpdateTemplateType; + + +typedef enum VkSubgroupFeatureFlagBits { + VK_SUBGROUP_FEATURE_BASIC_BIT = 0x00000001, + VK_SUBGROUP_FEATURE_VOTE_BIT = 0x00000002, + VK_SUBGROUP_FEATURE_ARITHMETIC_BIT = 0x00000004, + VK_SUBGROUP_FEATURE_BALLOT_BIT = 0x00000008, + VK_SUBGROUP_FEATURE_SHUFFLE_BIT = 0x00000010, + VK_SUBGROUP_FEATURE_SHUFFLE_RELATIVE_BIT = 0x00000020, + VK_SUBGROUP_FEATURE_CLUSTERED_BIT = 0x00000040, + VK_SUBGROUP_FEATURE_QUAD_BIT = 0x00000080, + VK_SUBGROUP_FEATURE_PARTITIONED_BIT_NV = 0x00000100, + VK_SUBGROUP_FEATURE_FLAG_BITS_MAX_ENUM = 0x7FFFFFFF +} VkSubgroupFeatureFlagBits; +typedef VkFlags VkSubgroupFeatureFlags; + +typedef enum VkPeerMemoryFeatureFlagBits { + VK_PEER_MEMORY_FEATURE_COPY_SRC_BIT = 0x00000001, + VK_PEER_MEMORY_FEATURE_COPY_DST_BIT = 0x00000002, + VK_PEER_MEMORY_FEATURE_GENERIC_SRC_BIT = 0x00000004, + VK_PEER_MEMORY_FEATURE_GENERIC_DST_BIT = 0x00000008, + VK_PEER_MEMORY_FEATURE_COPY_SRC_BIT_KHR = VK_PEER_MEMORY_FEATURE_COPY_SRC_BIT, + VK_PEER_MEMORY_FEATURE_COPY_DST_BIT_KHR = VK_PEER_MEMORY_FEATURE_COPY_DST_BIT, + VK_PEER_MEMORY_FEATURE_GENERIC_SRC_BIT_KHR = VK_PEER_MEMORY_FEATURE_GENERIC_SRC_BIT, + VK_PEER_MEMORY_FEATURE_GENERIC_DST_BIT_KHR = VK_PEER_MEMORY_FEATURE_GENERIC_DST_BIT, + VK_PEER_MEMORY_FEATURE_FLAG_BITS_MAX_ENUM = 0x7FFFFFFF +} VkPeerMemoryFeatureFlagBits; +typedef VkFlags VkPeerMemoryFeatureFlags; + +typedef enum VkMemoryAllocateFlagBits { + VK_MEMORY_ALLOCATE_DEVICE_MASK_BIT = 0x00000001, + VK_MEMORY_ALLOCATE_DEVICE_MASK_BIT_KHR = VK_MEMORY_ALLOCATE_DEVICE_MASK_BIT, + VK_MEMORY_ALLOCATE_FLAG_BITS_MAX_ENUM = 0x7FFFFFFF +} VkMemoryAllocateFlagBits; +typedef VkFlags VkMemoryAllocateFlags; +typedef VkFlags VkCommandPoolTrimFlags; +typedef VkFlags VkDescriptorUpdateTemplateCreateFlags; + +typedef enum VkExternalMemoryHandleTypeFlagBits { + VK_EXTERNAL_MEMORY_HANDLE_TYPE_OPAQUE_FD_BIT = 0x00000001, + VK_EXTERNAL_MEMORY_HANDLE_TYPE_OPAQUE_WIN32_BIT = 0x00000002, + VK_EXTERNAL_MEMORY_HANDLE_TYPE_OPAQUE_WIN32_KMT_BIT = 0x00000004, + VK_EXTERNAL_MEMORY_HANDLE_TYPE_D3D11_TEXTURE_BIT = 0x00000008, + VK_EXTERNAL_MEMORY_HANDLE_TYPE_D3D11_TEXTURE_KMT_BIT = 0x00000010, + VK_EXTERNAL_MEMORY_HANDLE_TYPE_D3D12_HEAP_BIT = 0x00000020, + VK_EXTERNAL_MEMORY_HANDLE_TYPE_D3D12_RESOURCE_BIT = 0x00000040, + VK_EXTERNAL_MEMORY_HANDLE_TYPE_DMA_BUF_BIT_EXT = 0x00000200, + VK_EXTERNAL_MEMORY_HANDLE_TYPE_ANDROID_HARDWARE_BUFFER_BIT_ANDROID = 0x00000400, + VK_EXTERNAL_MEMORY_HANDLE_TYPE_HOST_ALLOCATION_BIT_EXT = 0x00000080, + VK_EXTERNAL_MEMORY_HANDLE_TYPE_HOST_MAPPED_FOREIGN_MEMORY_BIT_EXT = 0x00000100, + VK_EXTERNAL_MEMORY_HANDLE_TYPE_OPAQUE_FD_BIT_KHR = VK_EXTERNAL_MEMORY_HANDLE_TYPE_OPAQUE_FD_BIT, + VK_EXTERNAL_MEMORY_HANDLE_TYPE_OPAQUE_WIN32_BIT_KHR = VK_EXTERNAL_MEMORY_HANDLE_TYPE_OPAQUE_WIN32_BIT, + VK_EXTERNAL_MEMORY_HANDLE_TYPE_OPAQUE_WIN32_KMT_BIT_KHR = VK_EXTERNAL_MEMORY_HANDLE_TYPE_OPAQUE_WIN32_KMT_BIT, + VK_EXTERNAL_MEMORY_HANDLE_TYPE_D3D11_TEXTURE_BIT_KHR = VK_EXTERNAL_MEMORY_HANDLE_TYPE_D3D11_TEXTURE_BIT, + VK_EXTERNAL_MEMORY_HANDLE_TYPE_D3D11_TEXTURE_KMT_BIT_KHR = VK_EXTERNAL_MEMORY_HANDLE_TYPE_D3D11_TEXTURE_KMT_BIT, + VK_EXTERNAL_MEMORY_HANDLE_TYPE_D3D12_HEAP_BIT_KHR = VK_EXTERNAL_MEMORY_HANDLE_TYPE_D3D12_HEAP_BIT, + VK_EXTERNAL_MEMORY_HANDLE_TYPE_D3D12_RESOURCE_BIT_KHR = VK_EXTERNAL_MEMORY_HANDLE_TYPE_D3D12_RESOURCE_BIT, + VK_EXTERNAL_MEMORY_HANDLE_TYPE_FLAG_BITS_MAX_ENUM = 0x7FFFFFFF +} VkExternalMemoryHandleTypeFlagBits; +typedef VkFlags VkExternalMemoryHandleTypeFlags; + +typedef enum VkExternalMemoryFeatureFlagBits { + VK_EXTERNAL_MEMORY_FEATURE_DEDICATED_ONLY_BIT = 0x00000001, + VK_EXTERNAL_MEMORY_FEATURE_EXPORTABLE_BIT = 0x00000002, + VK_EXTERNAL_MEMORY_FEATURE_IMPORTABLE_BIT = 0x00000004, + VK_EXTERNAL_MEMORY_FEATURE_DEDICATED_ONLY_BIT_KHR = VK_EXTERNAL_MEMORY_FEATURE_DEDICATED_ONLY_BIT, + VK_EXTERNAL_MEMORY_FEATURE_EXPORTABLE_BIT_KHR = VK_EXTERNAL_MEMORY_FEATURE_EXPORTABLE_BIT, + VK_EXTERNAL_MEMORY_FEATURE_IMPORTABLE_BIT_KHR = VK_EXTERNAL_MEMORY_FEATURE_IMPORTABLE_BIT, + VK_EXTERNAL_MEMORY_FEATURE_FLAG_BITS_MAX_ENUM = 0x7FFFFFFF +} VkExternalMemoryFeatureFlagBits; +typedef VkFlags VkExternalMemoryFeatureFlags; + +typedef enum VkExternalFenceHandleTypeFlagBits { + VK_EXTERNAL_FENCE_HANDLE_TYPE_OPAQUE_FD_BIT = 0x00000001, + VK_EXTERNAL_FENCE_HANDLE_TYPE_OPAQUE_WIN32_BIT = 0x00000002, + VK_EXTERNAL_FENCE_HANDLE_TYPE_OPAQUE_WIN32_KMT_BIT = 0x00000004, + VK_EXTERNAL_FENCE_HANDLE_TYPE_SYNC_FD_BIT = 0x00000008, + VK_EXTERNAL_FENCE_HANDLE_TYPE_OPAQUE_FD_BIT_KHR = VK_EXTERNAL_FENCE_HANDLE_TYPE_OPAQUE_FD_BIT, + VK_EXTERNAL_FENCE_HANDLE_TYPE_OPAQUE_WIN32_BIT_KHR = VK_EXTERNAL_FENCE_HANDLE_TYPE_OPAQUE_WIN32_BIT, + VK_EXTERNAL_FENCE_HANDLE_TYPE_OPAQUE_WIN32_KMT_BIT_KHR = VK_EXTERNAL_FENCE_HANDLE_TYPE_OPAQUE_WIN32_KMT_BIT, + VK_EXTERNAL_FENCE_HANDLE_TYPE_SYNC_FD_BIT_KHR = VK_EXTERNAL_FENCE_HANDLE_TYPE_SYNC_FD_BIT, + VK_EXTERNAL_FENCE_HANDLE_TYPE_FLAG_BITS_MAX_ENUM = 0x7FFFFFFF +} VkExternalFenceHandleTypeFlagBits; +typedef VkFlags VkExternalFenceHandleTypeFlags; + +typedef enum VkExternalFenceFeatureFlagBits { + VK_EXTERNAL_FENCE_FEATURE_EXPORTABLE_BIT = 0x00000001, + VK_EXTERNAL_FENCE_FEATURE_IMPORTABLE_BIT = 0x00000002, + VK_EXTERNAL_FENCE_FEATURE_EXPORTABLE_BIT_KHR = VK_EXTERNAL_FENCE_FEATURE_EXPORTABLE_BIT, + VK_EXTERNAL_FENCE_FEATURE_IMPORTABLE_BIT_KHR = VK_EXTERNAL_FENCE_FEATURE_IMPORTABLE_BIT, + VK_EXTERNAL_FENCE_FEATURE_FLAG_BITS_MAX_ENUM = 0x7FFFFFFF +} VkExternalFenceFeatureFlagBits; +typedef VkFlags VkExternalFenceFeatureFlags; + +typedef enum VkFenceImportFlagBits { + VK_FENCE_IMPORT_TEMPORARY_BIT = 0x00000001, + VK_FENCE_IMPORT_TEMPORARY_BIT_KHR = VK_FENCE_IMPORT_TEMPORARY_BIT, + VK_FENCE_IMPORT_FLAG_BITS_MAX_ENUM = 0x7FFFFFFF +} VkFenceImportFlagBits; +typedef VkFlags VkFenceImportFlags; + +typedef enum VkSemaphoreImportFlagBits { + VK_SEMAPHORE_IMPORT_TEMPORARY_BIT = 0x00000001, + VK_SEMAPHORE_IMPORT_TEMPORARY_BIT_KHR = VK_SEMAPHORE_IMPORT_TEMPORARY_BIT, + VK_SEMAPHORE_IMPORT_FLAG_BITS_MAX_ENUM = 0x7FFFFFFF +} VkSemaphoreImportFlagBits; +typedef VkFlags VkSemaphoreImportFlags; + +typedef enum VkExternalSemaphoreHandleTypeFlagBits { + VK_EXTERNAL_SEMAPHORE_HANDLE_TYPE_OPAQUE_FD_BIT = 0x00000001, + VK_EXTERNAL_SEMAPHORE_HANDLE_TYPE_OPAQUE_WIN32_BIT = 0x00000002, + VK_EXTERNAL_SEMAPHORE_HANDLE_TYPE_OPAQUE_WIN32_KMT_BIT = 0x00000004, + VK_EXTERNAL_SEMAPHORE_HANDLE_TYPE_D3D12_FENCE_BIT = 0x00000008, + VK_EXTERNAL_SEMAPHORE_HANDLE_TYPE_SYNC_FD_BIT = 0x00000010, + VK_EXTERNAL_SEMAPHORE_HANDLE_TYPE_OPAQUE_FD_BIT_KHR = VK_EXTERNAL_SEMAPHORE_HANDLE_TYPE_OPAQUE_FD_BIT, + VK_EXTERNAL_SEMAPHORE_HANDLE_TYPE_OPAQUE_WIN32_BIT_KHR = VK_EXTERNAL_SEMAPHORE_HANDLE_TYPE_OPAQUE_WIN32_BIT, + VK_EXTERNAL_SEMAPHORE_HANDLE_TYPE_OPAQUE_WIN32_KMT_BIT_KHR = VK_EXTERNAL_SEMAPHORE_HANDLE_TYPE_OPAQUE_WIN32_KMT_BIT, + VK_EXTERNAL_SEMAPHORE_HANDLE_TYPE_D3D12_FENCE_BIT_KHR = VK_EXTERNAL_SEMAPHORE_HANDLE_TYPE_D3D12_FENCE_BIT, + VK_EXTERNAL_SEMAPHORE_HANDLE_TYPE_SYNC_FD_BIT_KHR = VK_EXTERNAL_SEMAPHORE_HANDLE_TYPE_SYNC_FD_BIT, + VK_EXTERNAL_SEMAPHORE_HANDLE_TYPE_FLAG_BITS_MAX_ENUM = 0x7FFFFFFF +} VkExternalSemaphoreHandleTypeFlagBits; +typedef VkFlags VkExternalSemaphoreHandleTypeFlags; + +typedef enum VkExternalSemaphoreFeatureFlagBits { + VK_EXTERNAL_SEMAPHORE_FEATURE_EXPORTABLE_BIT = 0x00000001, + VK_EXTERNAL_SEMAPHORE_FEATURE_IMPORTABLE_BIT = 0x00000002, + VK_EXTERNAL_SEMAPHORE_FEATURE_EXPORTABLE_BIT_KHR = VK_EXTERNAL_SEMAPHORE_FEATURE_EXPORTABLE_BIT, + VK_EXTERNAL_SEMAPHORE_FEATURE_IMPORTABLE_BIT_KHR = VK_EXTERNAL_SEMAPHORE_FEATURE_IMPORTABLE_BIT, + VK_EXTERNAL_SEMAPHORE_FEATURE_FLAG_BITS_MAX_ENUM = 0x7FFFFFFF +} VkExternalSemaphoreFeatureFlagBits; +typedef VkFlags VkExternalSemaphoreFeatureFlags; + +typedef struct VkPhysicalDeviceSubgroupProperties { + VkStructureType sType; + void* pNext; + uint32_t subgroupSize; + VkShaderStageFlags supportedStages; + VkSubgroupFeatureFlags supportedOperations; + VkBool32 quadOperationsInAllStages; +} VkPhysicalDeviceSubgroupProperties; + +typedef struct VkBindBufferMemoryInfo { + VkStructureType sType; + const void* pNext; + VkBuffer buffer; + VkDeviceMemory memory; + VkDeviceSize memoryOffset; +} VkBindBufferMemoryInfo; + +typedef struct VkBindImageMemoryInfo { + VkStructureType sType; + const void* pNext; + VkImage image; + VkDeviceMemory memory; + VkDeviceSize memoryOffset; +} VkBindImageMemoryInfo; + +typedef struct VkPhysicalDevice16BitStorageFeatures { + VkStructureType sType; + void* pNext; + VkBool32 storageBuffer16BitAccess; + VkBool32 uniformAndStorageBuffer16BitAccess; + VkBool32 storagePushConstant16; + VkBool32 storageInputOutput16; +} VkPhysicalDevice16BitStorageFeatures; + +typedef struct VkMemoryDedicatedRequirements { + VkStructureType sType; + void* pNext; + VkBool32 prefersDedicatedAllocation; + VkBool32 requiresDedicatedAllocation; +} VkMemoryDedicatedRequirements; + +typedef struct VkMemoryDedicatedAllocateInfo { + VkStructureType sType; + const void* pNext; + VkImage image; + VkBuffer buffer; +} VkMemoryDedicatedAllocateInfo; + +typedef struct VkMemoryAllocateFlagsInfo { + VkStructureType sType; + const void* pNext; + VkMemoryAllocateFlags flags; + uint32_t deviceMask; +} VkMemoryAllocateFlagsInfo; + +typedef struct VkDeviceGroupRenderPassBeginInfo { + VkStructureType sType; + const void* pNext; + uint32_t deviceMask; + uint32_t deviceRenderAreaCount; + const VkRect2D* pDeviceRenderAreas; +} VkDeviceGroupRenderPassBeginInfo; + +typedef struct VkDeviceGroupCommandBufferBeginInfo { + VkStructureType sType; + const void* pNext; + uint32_t deviceMask; +} VkDeviceGroupCommandBufferBeginInfo; + +typedef struct VkDeviceGroupSubmitInfo { + VkStructureType sType; + const void* pNext; + uint32_t waitSemaphoreCount; + const uint32_t* pWaitSemaphoreDeviceIndices; + uint32_t commandBufferCount; + const uint32_t* pCommandBufferDeviceMasks; + uint32_t signalSemaphoreCount; + const uint32_t* pSignalSemaphoreDeviceIndices; +} VkDeviceGroupSubmitInfo; + +typedef struct VkDeviceGroupBindSparseInfo { + VkStructureType sType; + const void* pNext; + uint32_t resourceDeviceIndex; + uint32_t memoryDeviceIndex; +} VkDeviceGroupBindSparseInfo; + +typedef struct VkBindBufferMemoryDeviceGroupInfo { + VkStructureType sType; + const void* pNext; + uint32_t deviceIndexCount; + const uint32_t* pDeviceIndices; +} VkBindBufferMemoryDeviceGroupInfo; + +typedef struct VkBindImageMemoryDeviceGroupInfo { + VkStructureType sType; + const void* pNext; + uint32_t deviceIndexCount; + const uint32_t* pDeviceIndices; + uint32_t splitInstanceBindRegionCount; + const VkRect2D* pSplitInstanceBindRegions; +} VkBindImageMemoryDeviceGroupInfo; + +typedef struct VkPhysicalDeviceGroupProperties { + VkStructureType sType; + void* pNext; + uint32_t physicalDeviceCount; + VkPhysicalDevice physicalDevices[VK_MAX_DEVICE_GROUP_SIZE]; + VkBool32 subsetAllocation; +} VkPhysicalDeviceGroupProperties; + +typedef struct VkDeviceGroupDeviceCreateInfo { + VkStructureType sType; + const void* pNext; + uint32_t physicalDeviceCount; + const VkPhysicalDevice* pPhysicalDevices; +} VkDeviceGroupDeviceCreateInfo; + +typedef struct VkBufferMemoryRequirementsInfo2 { + VkStructureType sType; + const void* pNext; + VkBuffer buffer; +} VkBufferMemoryRequirementsInfo2; + +typedef struct VkImageMemoryRequirementsInfo2 { + VkStructureType sType; + const void* pNext; + VkImage image; +} VkImageMemoryRequirementsInfo2; + +typedef struct VkImageSparseMemoryRequirementsInfo2 { + VkStructureType sType; + const void* pNext; + VkImage image; +} VkImageSparseMemoryRequirementsInfo2; + +typedef struct VkMemoryRequirements2 { + VkStructureType sType; + void* pNext; + VkMemoryRequirements memoryRequirements; +} VkMemoryRequirements2; + +typedef VkMemoryRequirements2 VkMemoryRequirements2KHR; + +typedef struct VkSparseImageMemoryRequirements2 { + VkStructureType sType; + void* pNext; + VkSparseImageMemoryRequirements memoryRequirements; +} VkSparseImageMemoryRequirements2; + +typedef struct VkPhysicalDeviceFeatures2 { + VkStructureType sType; + void* pNext; + VkPhysicalDeviceFeatures features; +} VkPhysicalDeviceFeatures2; + +typedef struct VkPhysicalDeviceProperties2 { + VkStructureType sType; + void* pNext; + VkPhysicalDeviceProperties properties; +} VkPhysicalDeviceProperties2; + +typedef struct VkFormatProperties2 { + VkStructureType sType; + void* pNext; + VkFormatProperties formatProperties; +} VkFormatProperties2; + +typedef struct VkImageFormatProperties2 { + VkStructureType sType; + void* pNext; + VkImageFormatProperties imageFormatProperties; +} VkImageFormatProperties2; + +typedef struct VkPhysicalDeviceImageFormatInfo2 { + VkStructureType sType; + const void* pNext; + VkFormat format; + VkImageType type; + VkImageTiling tiling; + VkImageUsageFlags usage; + VkImageCreateFlags flags; +} VkPhysicalDeviceImageFormatInfo2; + +typedef struct VkQueueFamilyProperties2 { + VkStructureType sType; + void* pNext; + VkQueueFamilyProperties queueFamilyProperties; +} VkQueueFamilyProperties2; + +typedef struct VkPhysicalDeviceMemoryProperties2 { + VkStructureType sType; + void* pNext; + VkPhysicalDeviceMemoryProperties memoryProperties; +} VkPhysicalDeviceMemoryProperties2; + +typedef struct VkSparseImageFormatProperties2 { + VkStructureType sType; + void* pNext; + VkSparseImageFormatProperties properties; +} VkSparseImageFormatProperties2; + +typedef struct VkPhysicalDeviceSparseImageFormatInfo2 { + VkStructureType sType; + const void* pNext; + VkFormat format; + VkImageType type; + VkSampleCountFlagBits samples; + VkImageUsageFlags usage; + VkImageTiling tiling; +} VkPhysicalDeviceSparseImageFormatInfo2; + +typedef struct VkPhysicalDevicePointClippingProperties { + VkStructureType sType; + void* pNext; + VkPointClippingBehavior pointClippingBehavior; +} VkPhysicalDevicePointClippingProperties; + +typedef struct VkInputAttachmentAspectReference { + uint32_t subpass; + uint32_t inputAttachmentIndex; + VkImageAspectFlags aspectMask; +} VkInputAttachmentAspectReference; + +typedef struct VkRenderPassInputAttachmentAspectCreateInfo { + VkStructureType sType; + const void* pNext; + uint32_t aspectReferenceCount; + const VkInputAttachmentAspectReference* pAspectReferences; +} VkRenderPassInputAttachmentAspectCreateInfo; + +typedef struct VkImageViewUsageCreateInfo { + VkStructureType sType; + const void* pNext; + VkImageUsageFlags usage; +} VkImageViewUsageCreateInfo; + +typedef struct VkPipelineTessellationDomainOriginStateCreateInfo { + VkStructureType sType; + const void* pNext; + VkTessellationDomainOrigin domainOrigin; +} VkPipelineTessellationDomainOriginStateCreateInfo; + +typedef struct VkRenderPassMultiviewCreateInfo { + VkStructureType sType; + const void* pNext; + uint32_t subpassCount; + const uint32_t* pViewMasks; + uint32_t dependencyCount; + const int32_t* pViewOffsets; + uint32_t correlationMaskCount; + const uint32_t* pCorrelationMasks; +} VkRenderPassMultiviewCreateInfo; + +typedef struct VkPhysicalDeviceMultiviewFeatures { + VkStructureType sType; + void* pNext; + VkBool32 multiview; + VkBool32 multiviewGeometryShader; + VkBool32 multiviewTessellationShader; +} VkPhysicalDeviceMultiviewFeatures; + +typedef struct VkPhysicalDeviceMultiviewProperties { + VkStructureType sType; + void* pNext; + uint32_t maxMultiviewViewCount; + uint32_t maxMultiviewInstanceIndex; +} VkPhysicalDeviceMultiviewProperties; + +typedef struct VkPhysicalDeviceVariablePointerFeatures { + VkStructureType sType; + void* pNext; + VkBool32 variablePointersStorageBuffer; + VkBool32 variablePointers; +} VkPhysicalDeviceVariablePointerFeatures; + +typedef struct VkPhysicalDeviceProtectedMemoryFeatures { + VkStructureType sType; + void* pNext; + VkBool32 protectedMemory; +} VkPhysicalDeviceProtectedMemoryFeatures; + +typedef struct VkPhysicalDeviceProtectedMemoryProperties { + VkStructureType sType; + void* pNext; + VkBool32 protectedNoFault; +} VkPhysicalDeviceProtectedMemoryProperties; + +typedef struct VkDeviceQueueInfo2 { + VkStructureType sType; + const void* pNext; + VkDeviceQueueCreateFlags flags; + uint32_t queueFamilyIndex; + uint32_t queueIndex; +} VkDeviceQueueInfo2; + +typedef struct VkProtectedSubmitInfo { + VkStructureType sType; + const void* pNext; + VkBool32 protectedSubmit; +} VkProtectedSubmitInfo; + +typedef struct VkSamplerYcbcrConversionCreateInfo { + VkStructureType sType; + const void* pNext; + VkFormat format; + VkSamplerYcbcrModelConversion ycbcrModel; + VkSamplerYcbcrRange ycbcrRange; + VkComponentMapping components; + VkChromaLocation xChromaOffset; + VkChromaLocation yChromaOffset; + VkFilter chromaFilter; + VkBool32 forceExplicitReconstruction; +} VkSamplerYcbcrConversionCreateInfo; + +typedef struct VkSamplerYcbcrConversionInfo { + VkStructureType sType; + const void* pNext; + VkSamplerYcbcrConversion conversion; +} VkSamplerYcbcrConversionInfo; + +typedef struct VkBindImagePlaneMemoryInfo { + VkStructureType sType; + const void* pNext; + VkImageAspectFlagBits planeAspect; +} VkBindImagePlaneMemoryInfo; + +typedef struct VkImagePlaneMemoryRequirementsInfo { + VkStructureType sType; + const void* pNext; + VkImageAspectFlagBits planeAspect; +} VkImagePlaneMemoryRequirementsInfo; + +typedef struct VkPhysicalDeviceSamplerYcbcrConversionFeatures { + VkStructureType sType; + void* pNext; + VkBool32 samplerYcbcrConversion; +} VkPhysicalDeviceSamplerYcbcrConversionFeatures; + +typedef struct VkSamplerYcbcrConversionImageFormatProperties { + VkStructureType sType; + void* pNext; + uint32_t combinedImageSamplerDescriptorCount; +} VkSamplerYcbcrConversionImageFormatProperties; + +typedef struct VkDescriptorUpdateTemplateEntry { + uint32_t dstBinding; + uint32_t dstArrayElement; + uint32_t descriptorCount; + VkDescriptorType descriptorType; + size_t offset; + size_t stride; +} VkDescriptorUpdateTemplateEntry; + +typedef struct VkDescriptorUpdateTemplateCreateInfo { + VkStructureType sType; + void* pNext; + VkDescriptorUpdateTemplateCreateFlags flags; + uint32_t descriptorUpdateEntryCount; + const VkDescriptorUpdateTemplateEntry* pDescriptorUpdateEntries; + VkDescriptorUpdateTemplateType templateType; + VkDescriptorSetLayout descriptorSetLayout; + VkPipelineBindPoint pipelineBindPoint; + VkPipelineLayout pipelineLayout; + uint32_t set; +} VkDescriptorUpdateTemplateCreateInfo; + +typedef struct VkExternalMemoryProperties { + VkExternalMemoryFeatureFlags externalMemoryFeatures; + VkExternalMemoryHandleTypeFlags exportFromImportedHandleTypes; + VkExternalMemoryHandleTypeFlags compatibleHandleTypes; +} VkExternalMemoryProperties; + +typedef struct VkPhysicalDeviceExternalImageFormatInfo { + VkStructureType sType; + const void* pNext; + VkExternalMemoryHandleTypeFlagBits handleType; +} VkPhysicalDeviceExternalImageFormatInfo; + +typedef struct VkExternalImageFormatProperties { + VkStructureType sType; + void* pNext; + VkExternalMemoryProperties externalMemoryProperties; +} VkExternalImageFormatProperties; + +typedef struct VkPhysicalDeviceExternalBufferInfo { + VkStructureType sType; + const void* pNext; + VkBufferCreateFlags flags; + VkBufferUsageFlags usage; + VkExternalMemoryHandleTypeFlagBits handleType; +} VkPhysicalDeviceExternalBufferInfo; + +typedef struct VkExternalBufferProperties { + VkStructureType sType; + void* pNext; + VkExternalMemoryProperties externalMemoryProperties; +} VkExternalBufferProperties; + +typedef struct VkPhysicalDeviceIDProperties { + VkStructureType sType; + void* pNext; + uint8_t deviceUUID[VK_UUID_SIZE]; + uint8_t driverUUID[VK_UUID_SIZE]; + uint8_t deviceLUID[VK_LUID_SIZE]; + uint32_t deviceNodeMask; + VkBool32 deviceLUIDValid; +} VkPhysicalDeviceIDProperties; + +typedef struct VkExternalMemoryImageCreateInfo { + VkStructureType sType; + const void* pNext; + VkExternalMemoryHandleTypeFlags handleTypes; +} VkExternalMemoryImageCreateInfo; + +typedef struct VkExternalMemoryBufferCreateInfo { + VkStructureType sType; + const void* pNext; + VkExternalMemoryHandleTypeFlags handleTypes; +} VkExternalMemoryBufferCreateInfo; + +typedef struct VkExportMemoryAllocateInfo { + VkStructureType sType; + const void* pNext; + VkExternalMemoryHandleTypeFlags handleTypes; +} VkExportMemoryAllocateInfo; + +typedef struct VkPhysicalDeviceExternalFenceInfo { + VkStructureType sType; + const void* pNext; + VkExternalFenceHandleTypeFlagBits handleType; +} VkPhysicalDeviceExternalFenceInfo; + +typedef struct VkExternalFenceProperties { + VkStructureType sType; + void* pNext; + VkExternalFenceHandleTypeFlags exportFromImportedHandleTypes; + VkExternalFenceHandleTypeFlags compatibleHandleTypes; + VkExternalFenceFeatureFlags externalFenceFeatures; +} VkExternalFenceProperties; + +typedef struct VkExportFenceCreateInfo { + VkStructureType sType; + const void* pNext; + VkExternalFenceHandleTypeFlags handleTypes; +} VkExportFenceCreateInfo; + +typedef struct VkExportSemaphoreCreateInfo { + VkStructureType sType; + const void* pNext; + VkExternalSemaphoreHandleTypeFlags handleTypes; +} VkExportSemaphoreCreateInfo; + +typedef struct VkPhysicalDeviceExternalSemaphoreInfo { + VkStructureType sType; + const void* pNext; + VkExternalSemaphoreHandleTypeFlagBits handleType; +} VkPhysicalDeviceExternalSemaphoreInfo; + +typedef struct VkExternalSemaphoreProperties { + VkStructureType sType; + void* pNext; + VkExternalSemaphoreHandleTypeFlags exportFromImportedHandleTypes; + VkExternalSemaphoreHandleTypeFlags compatibleHandleTypes; + VkExternalSemaphoreFeatureFlags externalSemaphoreFeatures; +} VkExternalSemaphoreProperties; + +typedef struct VkPhysicalDeviceMaintenance3Properties { + VkStructureType sType; + void* pNext; + uint32_t maxPerSetDescriptors; + VkDeviceSize maxMemoryAllocationSize; +} VkPhysicalDeviceMaintenance3Properties; + +typedef struct VkDescriptorSetLayoutSupport { + VkStructureType sType; + void* pNext; + VkBool32 supported; +} VkDescriptorSetLayoutSupport; + +typedef struct VkPhysicalDeviceShaderDrawParameterFeatures { + VkStructureType sType; + void* pNext; + VkBool32 shaderDrawParameters; +} VkPhysicalDeviceShaderDrawParameterFeatures; + + +typedef VkResult (VKAPI_PTR *PFN_vkEnumerateInstanceVersion)(uint32_t* pApiVersion); +typedef VkResult (VKAPI_PTR *PFN_vkBindBufferMemory2)(VkDevice device, uint32_t bindInfoCount, const VkBindBufferMemoryInfo* pBindInfos); +typedef VkResult (VKAPI_PTR *PFN_vkBindImageMemory2)(VkDevice device, uint32_t bindInfoCount, const VkBindImageMemoryInfo* pBindInfos); +typedef void (VKAPI_PTR *PFN_vkGetDeviceGroupPeerMemoryFeatures)(VkDevice device, uint32_t heapIndex, uint32_t localDeviceIndex, uint32_t remoteDeviceIndex, VkPeerMemoryFeatureFlags* pPeerMemoryFeatures); +typedef void (VKAPI_PTR *PFN_vkCmdSetDeviceMask)(VkCommandBuffer commandBuffer, uint32_t deviceMask); +typedef void (VKAPI_PTR *PFN_vkCmdDispatchBase)(VkCommandBuffer commandBuffer, uint32_t baseGroupX, uint32_t baseGroupY, uint32_t baseGroupZ, uint32_t groupCountX, uint32_t groupCountY, uint32_t groupCountZ); +typedef VkResult (VKAPI_PTR *PFN_vkEnumeratePhysicalDeviceGroups)(VkInstance instance, uint32_t* pPhysicalDeviceGroupCount, VkPhysicalDeviceGroupProperties* pPhysicalDeviceGroupProperties); +typedef void (VKAPI_PTR *PFN_vkGetImageMemoryRequirements2)(VkDevice device, const VkImageMemoryRequirementsInfo2* pInfo, VkMemoryRequirements2* pMemoryRequirements); +typedef void (VKAPI_PTR *PFN_vkGetBufferMemoryRequirements2)(VkDevice device, const VkBufferMemoryRequirementsInfo2* pInfo, VkMemoryRequirements2* pMemoryRequirements); +typedef void (VKAPI_PTR *PFN_vkGetImageSparseMemoryRequirements2)(VkDevice device, const VkImageSparseMemoryRequirementsInfo2* pInfo, uint32_t* pSparseMemoryRequirementCount, VkSparseImageMemoryRequirements2* pSparseMemoryRequirements); +typedef void (VKAPI_PTR *PFN_vkGetPhysicalDeviceFeatures2)(VkPhysicalDevice physicalDevice, VkPhysicalDeviceFeatures2* pFeatures); +typedef void (VKAPI_PTR *PFN_vkGetPhysicalDeviceProperties2)(VkPhysicalDevice physicalDevice, VkPhysicalDeviceProperties2* pProperties); +typedef void (VKAPI_PTR *PFN_vkGetPhysicalDeviceFormatProperties2)(VkPhysicalDevice physicalDevice, VkFormat format, VkFormatProperties2* pFormatProperties); +typedef VkResult (VKAPI_PTR *PFN_vkGetPhysicalDeviceImageFormatProperties2)(VkPhysicalDevice physicalDevice, const VkPhysicalDeviceImageFormatInfo2* pImageFormatInfo, VkImageFormatProperties2* pImageFormatProperties); +typedef void (VKAPI_PTR *PFN_vkGetPhysicalDeviceQueueFamilyProperties2)(VkPhysicalDevice physicalDevice, uint32_t* pQueueFamilyPropertyCount, VkQueueFamilyProperties2* pQueueFamilyProperties); +typedef void (VKAPI_PTR *PFN_vkGetPhysicalDeviceMemoryProperties2)(VkPhysicalDevice physicalDevice, VkPhysicalDeviceMemoryProperties2* pMemoryProperties); +typedef void (VKAPI_PTR *PFN_vkGetPhysicalDeviceSparseImageFormatProperties2)(VkPhysicalDevice physicalDevice, const VkPhysicalDeviceSparseImageFormatInfo2* pFormatInfo, uint32_t* pPropertyCount, VkSparseImageFormatProperties2* pProperties); +typedef void (VKAPI_PTR *PFN_vkTrimCommandPool)(VkDevice device, VkCommandPool commandPool, VkCommandPoolTrimFlags flags); +typedef void (VKAPI_PTR *PFN_vkGetDeviceQueue2)(VkDevice device, const VkDeviceQueueInfo2* pQueueInfo, VkQueue* pQueue); +typedef VkResult (VKAPI_PTR *PFN_vkCreateSamplerYcbcrConversion)(VkDevice device, const VkSamplerYcbcrConversionCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkSamplerYcbcrConversion* pYcbcrConversion); +typedef void (VKAPI_PTR *PFN_vkDestroySamplerYcbcrConversion)(VkDevice device, VkSamplerYcbcrConversion ycbcrConversion, const VkAllocationCallbacks* pAllocator); +typedef VkResult (VKAPI_PTR *PFN_vkCreateDescriptorUpdateTemplate)(VkDevice device, const VkDescriptorUpdateTemplateCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkDescriptorUpdateTemplate* pDescriptorUpdateTemplate); +typedef void (VKAPI_PTR *PFN_vkDestroyDescriptorUpdateTemplate)(VkDevice device, VkDescriptorUpdateTemplate descriptorUpdateTemplate, const VkAllocationCallbacks* pAllocator); +typedef void (VKAPI_PTR *PFN_vkUpdateDescriptorSetWithTemplate)(VkDevice device, VkDescriptorSet descriptorSet, VkDescriptorUpdateTemplate descriptorUpdateTemplate, const void* pData); +typedef void (VKAPI_PTR *PFN_vkGetPhysicalDeviceExternalBufferProperties)(VkPhysicalDevice physicalDevice, const VkPhysicalDeviceExternalBufferInfo* pExternalBufferInfo, VkExternalBufferProperties* pExternalBufferProperties); +typedef void (VKAPI_PTR *PFN_vkGetPhysicalDeviceExternalFenceProperties)(VkPhysicalDevice physicalDevice, const VkPhysicalDeviceExternalFenceInfo* pExternalFenceInfo, VkExternalFenceProperties* pExternalFenceProperties); +typedef void (VKAPI_PTR *PFN_vkGetPhysicalDeviceExternalSemaphoreProperties)(VkPhysicalDevice physicalDevice, const VkPhysicalDeviceExternalSemaphoreInfo* pExternalSemaphoreInfo, VkExternalSemaphoreProperties* pExternalSemaphoreProperties); +typedef void (VKAPI_PTR *PFN_vkGetDescriptorSetLayoutSupport)(VkDevice device, const VkDescriptorSetLayoutCreateInfo* pCreateInfo, VkDescriptorSetLayoutSupport* pSupport); + +#ifndef VK_NO_PROTOTYPES +VKAPI_ATTR VkResult VKAPI_CALL vkEnumerateInstanceVersion( + uint32_t* pApiVersion); + +VKAPI_ATTR VkResult VKAPI_CALL vkBindBufferMemory2( + VkDevice device, + uint32_t bindInfoCount, + const VkBindBufferMemoryInfo* pBindInfos); + +VKAPI_ATTR VkResult VKAPI_CALL vkBindImageMemory2( + VkDevice device, + uint32_t bindInfoCount, + const VkBindImageMemoryInfo* pBindInfos); + +VKAPI_ATTR void VKAPI_CALL vkGetDeviceGroupPeerMemoryFeatures( + VkDevice device, + uint32_t heapIndex, + uint32_t localDeviceIndex, + uint32_t remoteDeviceIndex, + VkPeerMemoryFeatureFlags* pPeerMemoryFeatures); + +VKAPI_ATTR void VKAPI_CALL vkCmdSetDeviceMask( + VkCommandBuffer commandBuffer, + uint32_t deviceMask); + +VKAPI_ATTR void VKAPI_CALL vkCmdDispatchBase( + VkCommandBuffer commandBuffer, + uint32_t baseGroupX, + uint32_t baseGroupY, + uint32_t baseGroupZ, + uint32_t groupCountX, + uint32_t groupCountY, + uint32_t groupCountZ); + +VKAPI_ATTR VkResult VKAPI_CALL vkEnumeratePhysicalDeviceGroups( + VkInstance instance, + uint32_t* pPhysicalDeviceGroupCount, + VkPhysicalDeviceGroupProperties* pPhysicalDeviceGroupProperties); + +VKAPI_ATTR void VKAPI_CALL vkGetImageMemoryRequirements2( + VkDevice device, + const VkImageMemoryRequirementsInfo2* pInfo, + VkMemoryRequirements2* pMemoryRequirements); + +VKAPI_ATTR void VKAPI_CALL vkGetBufferMemoryRequirements2( + VkDevice device, + const VkBufferMemoryRequirementsInfo2* pInfo, + VkMemoryRequirements2* pMemoryRequirements); + +VKAPI_ATTR void VKAPI_CALL vkGetImageSparseMemoryRequirements2( + VkDevice device, + const VkImageSparseMemoryRequirementsInfo2* pInfo, + uint32_t* pSparseMemoryRequirementCount, + VkSparseImageMemoryRequirements2* pSparseMemoryRequirements); + +VKAPI_ATTR void VKAPI_CALL vkGetPhysicalDeviceFeatures2( + VkPhysicalDevice physicalDevice, + VkPhysicalDeviceFeatures2* pFeatures); + +VKAPI_ATTR void VKAPI_CALL vkGetPhysicalDeviceProperties2( + VkPhysicalDevice physicalDevice, + VkPhysicalDeviceProperties2* pProperties); + +VKAPI_ATTR void VKAPI_CALL vkGetPhysicalDeviceFormatProperties2( + VkPhysicalDevice physicalDevice, + VkFormat format, + VkFormatProperties2* pFormatProperties); + +VKAPI_ATTR VkResult VKAPI_CALL vkGetPhysicalDeviceImageFormatProperties2( + VkPhysicalDevice physicalDevice, + const VkPhysicalDeviceImageFormatInfo2* pImageFormatInfo, + VkImageFormatProperties2* pImageFormatProperties); + +VKAPI_ATTR void VKAPI_CALL vkGetPhysicalDeviceQueueFamilyProperties2( + VkPhysicalDevice physicalDevice, + uint32_t* pQueueFamilyPropertyCount, + VkQueueFamilyProperties2* pQueueFamilyProperties); + +VKAPI_ATTR void VKAPI_CALL vkGetPhysicalDeviceMemoryProperties2( + VkPhysicalDevice physicalDevice, + VkPhysicalDeviceMemoryProperties2* pMemoryProperties); + +VKAPI_ATTR void VKAPI_CALL vkGetPhysicalDeviceSparseImageFormatProperties2( + VkPhysicalDevice physicalDevice, + const VkPhysicalDeviceSparseImageFormatInfo2* pFormatInfo, + uint32_t* pPropertyCount, + VkSparseImageFormatProperties2* pProperties); + +VKAPI_ATTR void VKAPI_CALL vkTrimCommandPool( + VkDevice device, + VkCommandPool commandPool, + VkCommandPoolTrimFlags flags); + +VKAPI_ATTR void VKAPI_CALL vkGetDeviceQueue2( + VkDevice device, + const VkDeviceQueueInfo2* pQueueInfo, + VkQueue* pQueue); + +VKAPI_ATTR VkResult VKAPI_CALL vkCreateSamplerYcbcrConversion( + VkDevice device, + const VkSamplerYcbcrConversionCreateInfo* pCreateInfo, + const VkAllocationCallbacks* pAllocator, + VkSamplerYcbcrConversion* pYcbcrConversion); + +VKAPI_ATTR void VKAPI_CALL vkDestroySamplerYcbcrConversion( + VkDevice device, + VkSamplerYcbcrConversion ycbcrConversion, + const VkAllocationCallbacks* pAllocator); + +VKAPI_ATTR VkResult VKAPI_CALL vkCreateDescriptorUpdateTemplate( + VkDevice device, + const VkDescriptorUpdateTemplateCreateInfo* pCreateInfo, + const VkAllocationCallbacks* pAllocator, + VkDescriptorUpdateTemplate* pDescriptorUpdateTemplate); + +VKAPI_ATTR void VKAPI_CALL vkDestroyDescriptorUpdateTemplate( + VkDevice device, + VkDescriptorUpdateTemplate descriptorUpdateTemplate, + const VkAllocationCallbacks* pAllocator); + +VKAPI_ATTR void VKAPI_CALL vkUpdateDescriptorSetWithTemplate( + VkDevice device, + VkDescriptorSet descriptorSet, + VkDescriptorUpdateTemplate descriptorUpdateTemplate, + const void* pData); + +VKAPI_ATTR void VKAPI_CALL vkGetPhysicalDeviceExternalBufferProperties( + VkPhysicalDevice physicalDevice, + const VkPhysicalDeviceExternalBufferInfo* pExternalBufferInfo, + VkExternalBufferProperties* pExternalBufferProperties); + +VKAPI_ATTR void VKAPI_CALL vkGetPhysicalDeviceExternalFenceProperties( + VkPhysicalDevice physicalDevice, + const VkPhysicalDeviceExternalFenceInfo* pExternalFenceInfo, + VkExternalFenceProperties* pExternalFenceProperties); + +VKAPI_ATTR void VKAPI_CALL vkGetPhysicalDeviceExternalSemaphoreProperties( + VkPhysicalDevice physicalDevice, + const VkPhysicalDeviceExternalSemaphoreInfo* pExternalSemaphoreInfo, + VkExternalSemaphoreProperties* pExternalSemaphoreProperties); + +VKAPI_ATTR void VKAPI_CALL vkGetDescriptorSetLayoutSupport( + VkDevice device, + const VkDescriptorSetLayoutCreateInfo* pCreateInfo, + VkDescriptorSetLayoutSupport* pSupport); +#endif + +#define VK_KHR_surface 1 +VK_DEFINE_NON_DISPATCHABLE_HANDLE(VkSurfaceKHR) + +#define VK_KHR_SURFACE_SPEC_VERSION 25 +#define VK_KHR_SURFACE_EXTENSION_NAME "VK_KHR_surface" + + +typedef enum VkColorSpaceKHR { + VK_COLOR_SPACE_SRGB_NONLINEAR_KHR = 0, + VK_COLOR_SPACE_DISPLAY_P3_NONLINEAR_EXT = 1000104001, + VK_COLOR_SPACE_EXTENDED_SRGB_LINEAR_EXT = 1000104002, + VK_COLOR_SPACE_DCI_P3_LINEAR_EXT = 1000104003, + VK_COLOR_SPACE_DCI_P3_NONLINEAR_EXT = 1000104004, + VK_COLOR_SPACE_BT709_LINEAR_EXT = 1000104005, + VK_COLOR_SPACE_BT709_NONLINEAR_EXT = 1000104006, + VK_COLOR_SPACE_BT2020_LINEAR_EXT = 1000104007, + VK_COLOR_SPACE_HDR10_ST2084_EXT = 1000104008, + VK_COLOR_SPACE_DOLBYVISION_EXT = 1000104009, + VK_COLOR_SPACE_HDR10_HLG_EXT = 1000104010, + VK_COLOR_SPACE_ADOBERGB_LINEAR_EXT = 1000104011, + VK_COLOR_SPACE_ADOBERGB_NONLINEAR_EXT = 1000104012, + VK_COLOR_SPACE_PASS_THROUGH_EXT = 1000104013, + VK_COLOR_SPACE_EXTENDED_SRGB_NONLINEAR_EXT = 1000104014, + VK_COLORSPACE_SRGB_NONLINEAR_KHR = VK_COLOR_SPACE_SRGB_NONLINEAR_KHR, + VK_COLOR_SPACE_BEGIN_RANGE_KHR = VK_COLOR_SPACE_SRGB_NONLINEAR_KHR, + VK_COLOR_SPACE_END_RANGE_KHR = VK_COLOR_SPACE_SRGB_NONLINEAR_KHR, + VK_COLOR_SPACE_RANGE_SIZE_KHR = (VK_COLOR_SPACE_SRGB_NONLINEAR_KHR - VK_COLOR_SPACE_SRGB_NONLINEAR_KHR + 1), + VK_COLOR_SPACE_MAX_ENUM_KHR = 0x7FFFFFFF +} VkColorSpaceKHR; + +typedef enum VkPresentModeKHR { + VK_PRESENT_MODE_IMMEDIATE_KHR = 0, + VK_PRESENT_MODE_MAILBOX_KHR = 1, + VK_PRESENT_MODE_FIFO_KHR = 2, + VK_PRESENT_MODE_FIFO_RELAXED_KHR = 3, + VK_PRESENT_MODE_SHARED_DEMAND_REFRESH_KHR = 1000111000, + VK_PRESENT_MODE_SHARED_CONTINUOUS_REFRESH_KHR = 1000111001, + VK_PRESENT_MODE_BEGIN_RANGE_KHR = VK_PRESENT_MODE_IMMEDIATE_KHR, + VK_PRESENT_MODE_END_RANGE_KHR = VK_PRESENT_MODE_FIFO_RELAXED_KHR, + VK_PRESENT_MODE_RANGE_SIZE_KHR = (VK_PRESENT_MODE_FIFO_RELAXED_KHR - VK_PRESENT_MODE_IMMEDIATE_KHR + 1), + VK_PRESENT_MODE_MAX_ENUM_KHR = 0x7FFFFFFF +} VkPresentModeKHR; + + +typedef enum VkSurfaceTransformFlagBitsKHR { + VK_SURFACE_TRANSFORM_IDENTITY_BIT_KHR = 0x00000001, + VK_SURFACE_TRANSFORM_ROTATE_90_BIT_KHR = 0x00000002, + VK_SURFACE_TRANSFORM_ROTATE_180_BIT_KHR = 0x00000004, + VK_SURFACE_TRANSFORM_ROTATE_270_BIT_KHR = 0x00000008, + VK_SURFACE_TRANSFORM_HORIZONTAL_MIRROR_BIT_KHR = 0x00000010, + VK_SURFACE_TRANSFORM_HORIZONTAL_MIRROR_ROTATE_90_BIT_KHR = 0x00000020, + VK_SURFACE_TRANSFORM_HORIZONTAL_MIRROR_ROTATE_180_BIT_KHR = 0x00000040, + VK_SURFACE_TRANSFORM_HORIZONTAL_MIRROR_ROTATE_270_BIT_KHR = 0x00000080, + VK_SURFACE_TRANSFORM_INHERIT_BIT_KHR = 0x00000100, + VK_SURFACE_TRANSFORM_FLAG_BITS_MAX_ENUM_KHR = 0x7FFFFFFF +} VkSurfaceTransformFlagBitsKHR; +typedef VkFlags VkSurfaceTransformFlagsKHR; + +typedef enum VkCompositeAlphaFlagBitsKHR { + VK_COMPOSITE_ALPHA_OPAQUE_BIT_KHR = 0x00000001, + VK_COMPOSITE_ALPHA_PRE_MULTIPLIED_BIT_KHR = 0x00000002, + VK_COMPOSITE_ALPHA_POST_MULTIPLIED_BIT_KHR = 0x00000004, + VK_COMPOSITE_ALPHA_INHERIT_BIT_KHR = 0x00000008, + VK_COMPOSITE_ALPHA_FLAG_BITS_MAX_ENUM_KHR = 0x7FFFFFFF +} VkCompositeAlphaFlagBitsKHR; +typedef VkFlags VkCompositeAlphaFlagsKHR; + +typedef struct VkSurfaceCapabilitiesKHR { + uint32_t minImageCount; + uint32_t maxImageCount; + VkExtent2D currentExtent; + VkExtent2D minImageExtent; + VkExtent2D maxImageExtent; + uint32_t maxImageArrayLayers; + VkSurfaceTransformFlagsKHR supportedTransforms; + VkSurfaceTransformFlagBitsKHR currentTransform; + VkCompositeAlphaFlagsKHR supportedCompositeAlpha; + VkImageUsageFlags supportedUsageFlags; +} VkSurfaceCapabilitiesKHR; + +typedef struct VkSurfaceFormatKHR { + VkFormat format; + VkColorSpaceKHR colorSpace; +} VkSurfaceFormatKHR; + + +typedef void (VKAPI_PTR *PFN_vkDestroySurfaceKHR)(VkInstance instance, VkSurfaceKHR surface, const VkAllocationCallbacks* pAllocator); +typedef VkResult (VKAPI_PTR *PFN_vkGetPhysicalDeviceSurfaceSupportKHR)(VkPhysicalDevice physicalDevice, uint32_t queueFamilyIndex, VkSurfaceKHR surface, VkBool32* pSupported); +typedef VkResult (VKAPI_PTR *PFN_vkGetPhysicalDeviceSurfaceCapabilitiesKHR)(VkPhysicalDevice physicalDevice, VkSurfaceKHR surface, VkSurfaceCapabilitiesKHR* pSurfaceCapabilities); +typedef VkResult (VKAPI_PTR *PFN_vkGetPhysicalDeviceSurfaceFormatsKHR)(VkPhysicalDevice physicalDevice, VkSurfaceKHR surface, uint32_t* pSurfaceFormatCount, VkSurfaceFormatKHR* pSurfaceFormats); +typedef VkResult (VKAPI_PTR *PFN_vkGetPhysicalDeviceSurfacePresentModesKHR)(VkPhysicalDevice physicalDevice, VkSurfaceKHR surface, uint32_t* pPresentModeCount, VkPresentModeKHR* pPresentModes); + +#ifndef VK_NO_PROTOTYPES +VKAPI_ATTR void VKAPI_CALL vkDestroySurfaceKHR( + VkInstance instance, + VkSurfaceKHR surface, + const VkAllocationCallbacks* pAllocator); + +VKAPI_ATTR VkResult VKAPI_CALL vkGetPhysicalDeviceSurfaceSupportKHR( + VkPhysicalDevice physicalDevice, + uint32_t queueFamilyIndex, + VkSurfaceKHR surface, + VkBool32* pSupported); + +VKAPI_ATTR VkResult VKAPI_CALL vkGetPhysicalDeviceSurfaceCapabilitiesKHR( + VkPhysicalDevice physicalDevice, + VkSurfaceKHR surface, + VkSurfaceCapabilitiesKHR* pSurfaceCapabilities); + +VKAPI_ATTR VkResult VKAPI_CALL vkGetPhysicalDeviceSurfaceFormatsKHR( + VkPhysicalDevice physicalDevice, + VkSurfaceKHR surface, + uint32_t* pSurfaceFormatCount, + VkSurfaceFormatKHR* pSurfaceFormats); + +VKAPI_ATTR VkResult VKAPI_CALL vkGetPhysicalDeviceSurfacePresentModesKHR( + VkPhysicalDevice physicalDevice, + VkSurfaceKHR surface, + uint32_t* pPresentModeCount, + VkPresentModeKHR* pPresentModes); +#endif + +#define VK_KHR_swapchain 1 +VK_DEFINE_NON_DISPATCHABLE_HANDLE(VkSwapchainKHR) + +#define VK_KHR_SWAPCHAIN_SPEC_VERSION 70 +#define VK_KHR_SWAPCHAIN_EXTENSION_NAME "VK_KHR_swapchain" + + +typedef enum VkSwapchainCreateFlagBitsKHR { + VK_SWAPCHAIN_CREATE_SPLIT_INSTANCE_BIND_REGIONS_BIT_KHR = 0x00000001, + VK_SWAPCHAIN_CREATE_PROTECTED_BIT_KHR = 0x00000002, + VK_SWAPCHAIN_CREATE_FLAG_BITS_MAX_ENUM_KHR = 0x7FFFFFFF +} VkSwapchainCreateFlagBitsKHR; +typedef VkFlags VkSwapchainCreateFlagsKHR; + +typedef enum VkDeviceGroupPresentModeFlagBitsKHR { + VK_DEVICE_GROUP_PRESENT_MODE_LOCAL_BIT_KHR = 0x00000001, + VK_DEVICE_GROUP_PRESENT_MODE_REMOTE_BIT_KHR = 0x00000002, + VK_DEVICE_GROUP_PRESENT_MODE_SUM_BIT_KHR = 0x00000004, + VK_DEVICE_GROUP_PRESENT_MODE_LOCAL_MULTI_DEVICE_BIT_KHR = 0x00000008, + VK_DEVICE_GROUP_PRESENT_MODE_FLAG_BITS_MAX_ENUM_KHR = 0x7FFFFFFF +} VkDeviceGroupPresentModeFlagBitsKHR; +typedef VkFlags VkDeviceGroupPresentModeFlagsKHR; + +typedef struct VkSwapchainCreateInfoKHR { + VkStructureType sType; + const void* pNext; + VkSwapchainCreateFlagsKHR flags; + VkSurfaceKHR surface; + uint32_t minImageCount; + VkFormat imageFormat; + VkColorSpaceKHR imageColorSpace; + VkExtent2D imageExtent; + uint32_t imageArrayLayers; + VkImageUsageFlags imageUsage; + VkSharingMode imageSharingMode; + uint32_t queueFamilyIndexCount; + const uint32_t* pQueueFamilyIndices; + VkSurfaceTransformFlagBitsKHR preTransform; + VkCompositeAlphaFlagBitsKHR compositeAlpha; + VkPresentModeKHR presentMode; + VkBool32 clipped; + VkSwapchainKHR oldSwapchain; +} VkSwapchainCreateInfoKHR; + +typedef struct VkPresentInfoKHR { + VkStructureType sType; + const void* pNext; + uint32_t waitSemaphoreCount; + const VkSemaphore* pWaitSemaphores; + uint32_t swapchainCount; + const VkSwapchainKHR* pSwapchains; + const uint32_t* pImageIndices; + VkResult* pResults; +} VkPresentInfoKHR; + +typedef struct VkImageSwapchainCreateInfoKHR { + VkStructureType sType; + const void* pNext; + VkSwapchainKHR swapchain; +} VkImageSwapchainCreateInfoKHR; + +typedef struct VkBindImageMemorySwapchainInfoKHR { + VkStructureType sType; + const void* pNext; + VkSwapchainKHR swapchain; + uint32_t imageIndex; +} VkBindImageMemorySwapchainInfoKHR; + +typedef struct VkAcquireNextImageInfoKHR { + VkStructureType sType; + const void* pNext; + VkSwapchainKHR swapchain; + uint64_t timeout; + VkSemaphore semaphore; + VkFence fence; + uint32_t deviceMask; +} VkAcquireNextImageInfoKHR; + +typedef struct VkDeviceGroupPresentCapabilitiesKHR { + VkStructureType sType; + const void* pNext; + uint32_t presentMask[VK_MAX_DEVICE_GROUP_SIZE]; + VkDeviceGroupPresentModeFlagsKHR modes; +} VkDeviceGroupPresentCapabilitiesKHR; + +typedef struct VkDeviceGroupPresentInfoKHR { + VkStructureType sType; + const void* pNext; + uint32_t swapchainCount; + const uint32_t* pDeviceMasks; + VkDeviceGroupPresentModeFlagBitsKHR mode; +} VkDeviceGroupPresentInfoKHR; + +typedef struct VkDeviceGroupSwapchainCreateInfoKHR { + VkStructureType sType; + const void* pNext; + VkDeviceGroupPresentModeFlagsKHR modes; +} VkDeviceGroupSwapchainCreateInfoKHR; + + +typedef VkResult (VKAPI_PTR *PFN_vkCreateSwapchainKHR)(VkDevice device, const VkSwapchainCreateInfoKHR* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkSwapchainKHR* pSwapchain); +typedef void (VKAPI_PTR *PFN_vkDestroySwapchainKHR)(VkDevice device, VkSwapchainKHR swapchain, const VkAllocationCallbacks* pAllocator); +typedef VkResult (VKAPI_PTR *PFN_vkGetSwapchainImagesKHR)(VkDevice device, VkSwapchainKHR swapchain, uint32_t* pSwapchainImageCount, VkImage* pSwapchainImages); +typedef VkResult (VKAPI_PTR *PFN_vkAcquireNextImageKHR)(VkDevice device, VkSwapchainKHR swapchain, uint64_t timeout, VkSemaphore semaphore, VkFence fence, uint32_t* pImageIndex); +typedef VkResult (VKAPI_PTR *PFN_vkQueuePresentKHR)(VkQueue queue, const VkPresentInfoKHR* pPresentInfo); +typedef VkResult (VKAPI_PTR *PFN_vkGetDeviceGroupPresentCapabilitiesKHR)(VkDevice device, VkDeviceGroupPresentCapabilitiesKHR* pDeviceGroupPresentCapabilities); +typedef VkResult (VKAPI_PTR *PFN_vkGetDeviceGroupSurfacePresentModesKHR)(VkDevice device, VkSurfaceKHR surface, VkDeviceGroupPresentModeFlagsKHR* pModes); +typedef VkResult (VKAPI_PTR *PFN_vkGetPhysicalDevicePresentRectanglesKHR)(VkPhysicalDevice physicalDevice, VkSurfaceKHR surface, uint32_t* pRectCount, VkRect2D* pRects); +typedef VkResult (VKAPI_PTR *PFN_vkAcquireNextImage2KHR)(VkDevice device, const VkAcquireNextImageInfoKHR* pAcquireInfo, uint32_t* pImageIndex); + +#ifndef VK_NO_PROTOTYPES +VKAPI_ATTR VkResult VKAPI_CALL vkCreateSwapchainKHR( + VkDevice device, + const VkSwapchainCreateInfoKHR* pCreateInfo, + const VkAllocationCallbacks* pAllocator, + VkSwapchainKHR* pSwapchain); + +VKAPI_ATTR void VKAPI_CALL vkDestroySwapchainKHR( + VkDevice device, + VkSwapchainKHR swapchain, + const VkAllocationCallbacks* pAllocator); + +VKAPI_ATTR VkResult VKAPI_CALL vkGetSwapchainImagesKHR( + VkDevice device, + VkSwapchainKHR swapchain, + uint32_t* pSwapchainImageCount, + VkImage* pSwapchainImages); + +VKAPI_ATTR VkResult VKAPI_CALL vkAcquireNextImageKHR( + VkDevice device, + VkSwapchainKHR swapchain, + uint64_t timeout, + VkSemaphore semaphore, + VkFence fence, + uint32_t* pImageIndex); + +VKAPI_ATTR VkResult VKAPI_CALL vkQueuePresentKHR( + VkQueue queue, + const VkPresentInfoKHR* pPresentInfo); + +VKAPI_ATTR VkResult VKAPI_CALL vkGetDeviceGroupPresentCapabilitiesKHR( + VkDevice device, + VkDeviceGroupPresentCapabilitiesKHR* pDeviceGroupPresentCapabilities); + +VKAPI_ATTR VkResult VKAPI_CALL vkGetDeviceGroupSurfacePresentModesKHR( + VkDevice device, + VkSurfaceKHR surface, + VkDeviceGroupPresentModeFlagsKHR* pModes); + +VKAPI_ATTR VkResult VKAPI_CALL vkGetPhysicalDevicePresentRectanglesKHR( + VkPhysicalDevice physicalDevice, + VkSurfaceKHR surface, + uint32_t* pRectCount, + VkRect2D* pRects); + +VKAPI_ATTR VkResult VKAPI_CALL vkAcquireNextImage2KHR( + VkDevice device, + const VkAcquireNextImageInfoKHR* pAcquireInfo, + uint32_t* pImageIndex); +#endif + +#define VK_KHR_display 1 +VK_DEFINE_NON_DISPATCHABLE_HANDLE(VkDisplayKHR) +VK_DEFINE_NON_DISPATCHABLE_HANDLE(VkDisplayModeKHR) + +#define VK_KHR_DISPLAY_SPEC_VERSION 21 +#define VK_KHR_DISPLAY_EXTENSION_NAME "VK_KHR_display" + + +typedef enum VkDisplayPlaneAlphaFlagBitsKHR { + VK_DISPLAY_PLANE_ALPHA_OPAQUE_BIT_KHR = 0x00000001, + VK_DISPLAY_PLANE_ALPHA_GLOBAL_BIT_KHR = 0x00000002, + VK_DISPLAY_PLANE_ALPHA_PER_PIXEL_BIT_KHR = 0x00000004, + VK_DISPLAY_PLANE_ALPHA_PER_PIXEL_PREMULTIPLIED_BIT_KHR = 0x00000008, + VK_DISPLAY_PLANE_ALPHA_FLAG_BITS_MAX_ENUM_KHR = 0x7FFFFFFF +} VkDisplayPlaneAlphaFlagBitsKHR; +typedef VkFlags VkDisplayPlaneAlphaFlagsKHR; +typedef VkFlags VkDisplayModeCreateFlagsKHR; +typedef VkFlags VkDisplaySurfaceCreateFlagsKHR; + +typedef struct VkDisplayPropertiesKHR { + VkDisplayKHR display; + const char* displayName; + VkExtent2D physicalDimensions; + VkExtent2D physicalResolution; + VkSurfaceTransformFlagsKHR supportedTransforms; + VkBool32 planeReorderPossible; + VkBool32 persistentContent; +} VkDisplayPropertiesKHR; + +typedef struct VkDisplayModeParametersKHR { + VkExtent2D visibleRegion; + uint32_t refreshRate; +} VkDisplayModeParametersKHR; + +typedef struct VkDisplayModePropertiesKHR { + VkDisplayModeKHR displayMode; + VkDisplayModeParametersKHR parameters; +} VkDisplayModePropertiesKHR; + +typedef struct VkDisplayModeCreateInfoKHR { + VkStructureType sType; + const void* pNext; + VkDisplayModeCreateFlagsKHR flags; + VkDisplayModeParametersKHR parameters; +} VkDisplayModeCreateInfoKHR; + +typedef struct VkDisplayPlaneCapabilitiesKHR { + VkDisplayPlaneAlphaFlagsKHR supportedAlpha; + VkOffset2D minSrcPosition; + VkOffset2D maxSrcPosition; + VkExtent2D minSrcExtent; + VkExtent2D maxSrcExtent; + VkOffset2D minDstPosition; + VkOffset2D maxDstPosition; + VkExtent2D minDstExtent; + VkExtent2D maxDstExtent; +} VkDisplayPlaneCapabilitiesKHR; + +typedef struct VkDisplayPlanePropertiesKHR { + VkDisplayKHR currentDisplay; + uint32_t currentStackIndex; +} VkDisplayPlanePropertiesKHR; + +typedef struct VkDisplaySurfaceCreateInfoKHR { + VkStructureType sType; + const void* pNext; + VkDisplaySurfaceCreateFlagsKHR flags; + VkDisplayModeKHR displayMode; + uint32_t planeIndex; + uint32_t planeStackIndex; + VkSurfaceTransformFlagBitsKHR transform; + float globalAlpha; + VkDisplayPlaneAlphaFlagBitsKHR alphaMode; + VkExtent2D imageExtent; +} VkDisplaySurfaceCreateInfoKHR; + + +typedef VkResult (VKAPI_PTR *PFN_vkGetPhysicalDeviceDisplayPropertiesKHR)(VkPhysicalDevice physicalDevice, uint32_t* pPropertyCount, VkDisplayPropertiesKHR* pProperties); +typedef VkResult (VKAPI_PTR *PFN_vkGetPhysicalDeviceDisplayPlanePropertiesKHR)(VkPhysicalDevice physicalDevice, uint32_t* pPropertyCount, VkDisplayPlanePropertiesKHR* pProperties); +typedef VkResult (VKAPI_PTR *PFN_vkGetDisplayPlaneSupportedDisplaysKHR)(VkPhysicalDevice physicalDevice, uint32_t planeIndex, uint32_t* pDisplayCount, VkDisplayKHR* pDisplays); +typedef VkResult (VKAPI_PTR *PFN_vkGetDisplayModePropertiesKHR)(VkPhysicalDevice physicalDevice, VkDisplayKHR display, uint32_t* pPropertyCount, VkDisplayModePropertiesKHR* pProperties); +typedef VkResult (VKAPI_PTR *PFN_vkCreateDisplayModeKHR)(VkPhysicalDevice physicalDevice, VkDisplayKHR display, const VkDisplayModeCreateInfoKHR* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkDisplayModeKHR* pMode); +typedef VkResult (VKAPI_PTR *PFN_vkGetDisplayPlaneCapabilitiesKHR)(VkPhysicalDevice physicalDevice, VkDisplayModeKHR mode, uint32_t planeIndex, VkDisplayPlaneCapabilitiesKHR* pCapabilities); +typedef VkResult (VKAPI_PTR *PFN_vkCreateDisplayPlaneSurfaceKHR)(VkInstance instance, const VkDisplaySurfaceCreateInfoKHR* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkSurfaceKHR* pSurface); + +#ifndef VK_NO_PROTOTYPES +VKAPI_ATTR VkResult VKAPI_CALL vkGetPhysicalDeviceDisplayPropertiesKHR( + VkPhysicalDevice physicalDevice, + uint32_t* pPropertyCount, + VkDisplayPropertiesKHR* pProperties); + +VKAPI_ATTR VkResult VKAPI_CALL vkGetPhysicalDeviceDisplayPlanePropertiesKHR( + VkPhysicalDevice physicalDevice, + uint32_t* pPropertyCount, + VkDisplayPlanePropertiesKHR* pProperties); + +VKAPI_ATTR VkResult VKAPI_CALL vkGetDisplayPlaneSupportedDisplaysKHR( + VkPhysicalDevice physicalDevice, + uint32_t planeIndex, + uint32_t* pDisplayCount, + VkDisplayKHR* pDisplays); + +VKAPI_ATTR VkResult VKAPI_CALL vkGetDisplayModePropertiesKHR( + VkPhysicalDevice physicalDevice, + VkDisplayKHR display, + uint32_t* pPropertyCount, + VkDisplayModePropertiesKHR* pProperties); + +VKAPI_ATTR VkResult VKAPI_CALL vkCreateDisplayModeKHR( + VkPhysicalDevice physicalDevice, + VkDisplayKHR display, + const VkDisplayModeCreateInfoKHR* pCreateInfo, + const VkAllocationCallbacks* pAllocator, + VkDisplayModeKHR* pMode); + +VKAPI_ATTR VkResult VKAPI_CALL vkGetDisplayPlaneCapabilitiesKHR( + VkPhysicalDevice physicalDevice, + VkDisplayModeKHR mode, + uint32_t planeIndex, + VkDisplayPlaneCapabilitiesKHR* pCapabilities); + +VKAPI_ATTR VkResult VKAPI_CALL vkCreateDisplayPlaneSurfaceKHR( + VkInstance instance, + const VkDisplaySurfaceCreateInfoKHR* pCreateInfo, + const VkAllocationCallbacks* pAllocator, + VkSurfaceKHR* pSurface); +#endif + +#define VK_KHR_display_swapchain 1 +#define VK_KHR_DISPLAY_SWAPCHAIN_SPEC_VERSION 9 +#define VK_KHR_DISPLAY_SWAPCHAIN_EXTENSION_NAME "VK_KHR_display_swapchain" + +typedef struct VkDisplayPresentInfoKHR { + VkStructureType sType; + const void* pNext; + VkRect2D srcRect; + VkRect2D dstRect; + VkBool32 persistent; +} VkDisplayPresentInfoKHR; + + +typedef VkResult (VKAPI_PTR *PFN_vkCreateSharedSwapchainsKHR)(VkDevice device, uint32_t swapchainCount, const VkSwapchainCreateInfoKHR* pCreateInfos, const VkAllocationCallbacks* pAllocator, VkSwapchainKHR* pSwapchains); + +#ifndef VK_NO_PROTOTYPES +VKAPI_ATTR VkResult VKAPI_CALL vkCreateSharedSwapchainsKHR( + VkDevice device, + uint32_t swapchainCount, + const VkSwapchainCreateInfoKHR* pCreateInfos, + const VkAllocationCallbacks* pAllocator, + VkSwapchainKHR* pSwapchains); +#endif + +#define VK_KHR_sampler_mirror_clamp_to_edge 1 +#define VK_KHR_SAMPLER_MIRROR_CLAMP_TO_EDGE_SPEC_VERSION 1 +#define VK_KHR_SAMPLER_MIRROR_CLAMP_TO_EDGE_EXTENSION_NAME "VK_KHR_sampler_mirror_clamp_to_edge" + + +#define VK_KHR_multiview 1 +#define VK_KHR_MULTIVIEW_SPEC_VERSION 1 +#define VK_KHR_MULTIVIEW_EXTENSION_NAME "VK_KHR_multiview" + +typedef VkRenderPassMultiviewCreateInfo VkRenderPassMultiviewCreateInfoKHR; + +typedef VkPhysicalDeviceMultiviewFeatures VkPhysicalDeviceMultiviewFeaturesKHR; + +typedef VkPhysicalDeviceMultiviewProperties VkPhysicalDeviceMultiviewPropertiesKHR; + + + +#define VK_KHR_get_physical_device_properties2 1 +#define VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_SPEC_VERSION 1 +#define VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME "VK_KHR_get_physical_device_properties2" + +typedef VkPhysicalDeviceFeatures2 VkPhysicalDeviceFeatures2KHR; + +typedef VkPhysicalDeviceProperties2 VkPhysicalDeviceProperties2KHR; + +typedef VkFormatProperties2 VkFormatProperties2KHR; + +typedef VkImageFormatProperties2 VkImageFormatProperties2KHR; + +typedef VkPhysicalDeviceImageFormatInfo2 VkPhysicalDeviceImageFormatInfo2KHR; + +typedef VkQueueFamilyProperties2 VkQueueFamilyProperties2KHR; + +typedef VkPhysicalDeviceMemoryProperties2 VkPhysicalDeviceMemoryProperties2KHR; + +typedef VkSparseImageFormatProperties2 VkSparseImageFormatProperties2KHR; + +typedef VkPhysicalDeviceSparseImageFormatInfo2 VkPhysicalDeviceSparseImageFormatInfo2KHR; + + +typedef void (VKAPI_PTR *PFN_vkGetPhysicalDeviceFeatures2KHR)(VkPhysicalDevice physicalDevice, VkPhysicalDeviceFeatures2* pFeatures); +typedef void (VKAPI_PTR *PFN_vkGetPhysicalDeviceProperties2KHR)(VkPhysicalDevice physicalDevice, VkPhysicalDeviceProperties2* pProperties); +typedef void (VKAPI_PTR *PFN_vkGetPhysicalDeviceFormatProperties2KHR)(VkPhysicalDevice physicalDevice, VkFormat format, VkFormatProperties2* pFormatProperties); +typedef VkResult (VKAPI_PTR *PFN_vkGetPhysicalDeviceImageFormatProperties2KHR)(VkPhysicalDevice physicalDevice, const VkPhysicalDeviceImageFormatInfo2* pImageFormatInfo, VkImageFormatProperties2* pImageFormatProperties); +typedef void (VKAPI_PTR *PFN_vkGetPhysicalDeviceQueueFamilyProperties2KHR)(VkPhysicalDevice physicalDevice, uint32_t* pQueueFamilyPropertyCount, VkQueueFamilyProperties2* pQueueFamilyProperties); +typedef void (VKAPI_PTR *PFN_vkGetPhysicalDeviceMemoryProperties2KHR)(VkPhysicalDevice physicalDevice, VkPhysicalDeviceMemoryProperties2* pMemoryProperties); +typedef void (VKAPI_PTR *PFN_vkGetPhysicalDeviceSparseImageFormatProperties2KHR)(VkPhysicalDevice physicalDevice, const VkPhysicalDeviceSparseImageFormatInfo2* pFormatInfo, uint32_t* pPropertyCount, VkSparseImageFormatProperties2* pProperties); + +#ifndef VK_NO_PROTOTYPES +VKAPI_ATTR void VKAPI_CALL vkGetPhysicalDeviceFeatures2KHR( + VkPhysicalDevice physicalDevice, + VkPhysicalDeviceFeatures2* pFeatures); + +VKAPI_ATTR void VKAPI_CALL vkGetPhysicalDeviceProperties2KHR( + VkPhysicalDevice physicalDevice, + VkPhysicalDeviceProperties2* pProperties); + +VKAPI_ATTR void VKAPI_CALL vkGetPhysicalDeviceFormatProperties2KHR( + VkPhysicalDevice physicalDevice, + VkFormat format, + VkFormatProperties2* pFormatProperties); + +VKAPI_ATTR VkResult VKAPI_CALL vkGetPhysicalDeviceImageFormatProperties2KHR( + VkPhysicalDevice physicalDevice, + const VkPhysicalDeviceImageFormatInfo2* pImageFormatInfo, + VkImageFormatProperties2* pImageFormatProperties); + +VKAPI_ATTR void VKAPI_CALL vkGetPhysicalDeviceQueueFamilyProperties2KHR( + VkPhysicalDevice physicalDevice, + uint32_t* pQueueFamilyPropertyCount, + VkQueueFamilyProperties2* pQueueFamilyProperties); + +VKAPI_ATTR void VKAPI_CALL vkGetPhysicalDeviceMemoryProperties2KHR( + VkPhysicalDevice physicalDevice, + VkPhysicalDeviceMemoryProperties2* pMemoryProperties); + +VKAPI_ATTR void VKAPI_CALL vkGetPhysicalDeviceSparseImageFormatProperties2KHR( + VkPhysicalDevice physicalDevice, + const VkPhysicalDeviceSparseImageFormatInfo2* pFormatInfo, + uint32_t* pPropertyCount, + VkSparseImageFormatProperties2* pProperties); +#endif + +#define VK_KHR_device_group 1 +#define VK_KHR_DEVICE_GROUP_SPEC_VERSION 3 +#define VK_KHR_DEVICE_GROUP_EXTENSION_NAME "VK_KHR_device_group" + +typedef VkPeerMemoryFeatureFlags VkPeerMemoryFeatureFlagsKHR; + +typedef VkPeerMemoryFeatureFlagBits VkPeerMemoryFeatureFlagBitsKHR; + +typedef VkMemoryAllocateFlags VkMemoryAllocateFlagsKHR; + +typedef VkMemoryAllocateFlagBits VkMemoryAllocateFlagBitsKHR; + + +typedef VkMemoryAllocateFlagsInfo VkMemoryAllocateFlagsInfoKHR; + +typedef VkDeviceGroupRenderPassBeginInfo VkDeviceGroupRenderPassBeginInfoKHR; + +typedef VkDeviceGroupCommandBufferBeginInfo VkDeviceGroupCommandBufferBeginInfoKHR; + +typedef VkDeviceGroupSubmitInfo VkDeviceGroupSubmitInfoKHR; + +typedef VkDeviceGroupBindSparseInfo VkDeviceGroupBindSparseInfoKHR; + +typedef VkBindBufferMemoryDeviceGroupInfo VkBindBufferMemoryDeviceGroupInfoKHR; + +typedef VkBindImageMemoryDeviceGroupInfo VkBindImageMemoryDeviceGroupInfoKHR; + + +typedef void (VKAPI_PTR *PFN_vkGetDeviceGroupPeerMemoryFeaturesKHR)(VkDevice device, uint32_t heapIndex, uint32_t localDeviceIndex, uint32_t remoteDeviceIndex, VkPeerMemoryFeatureFlags* pPeerMemoryFeatures); +typedef void (VKAPI_PTR *PFN_vkCmdSetDeviceMaskKHR)(VkCommandBuffer commandBuffer, uint32_t deviceMask); +typedef void (VKAPI_PTR *PFN_vkCmdDispatchBaseKHR)(VkCommandBuffer commandBuffer, uint32_t baseGroupX, uint32_t baseGroupY, uint32_t baseGroupZ, uint32_t groupCountX, uint32_t groupCountY, uint32_t groupCountZ); + +#ifndef VK_NO_PROTOTYPES +VKAPI_ATTR void VKAPI_CALL vkGetDeviceGroupPeerMemoryFeaturesKHR( + VkDevice device, + uint32_t heapIndex, + uint32_t localDeviceIndex, + uint32_t remoteDeviceIndex, + VkPeerMemoryFeatureFlags* pPeerMemoryFeatures); + +VKAPI_ATTR void VKAPI_CALL vkCmdSetDeviceMaskKHR( + VkCommandBuffer commandBuffer, + uint32_t deviceMask); + +VKAPI_ATTR void VKAPI_CALL vkCmdDispatchBaseKHR( + VkCommandBuffer commandBuffer, + uint32_t baseGroupX, + uint32_t baseGroupY, + uint32_t baseGroupZ, + uint32_t groupCountX, + uint32_t groupCountY, + uint32_t groupCountZ); +#endif + +#define VK_KHR_shader_draw_parameters 1 +#define VK_KHR_SHADER_DRAW_PARAMETERS_SPEC_VERSION 1 +#define VK_KHR_SHADER_DRAW_PARAMETERS_EXTENSION_NAME "VK_KHR_shader_draw_parameters" + + +#define VK_KHR_maintenance1 1 +#define VK_KHR_MAINTENANCE1_SPEC_VERSION 2 +#define VK_KHR_MAINTENANCE1_EXTENSION_NAME "VK_KHR_maintenance1" + +typedef VkCommandPoolTrimFlags VkCommandPoolTrimFlagsKHR; + + +typedef void (VKAPI_PTR *PFN_vkTrimCommandPoolKHR)(VkDevice device, VkCommandPool commandPool, VkCommandPoolTrimFlags flags); + +#ifndef VK_NO_PROTOTYPES +VKAPI_ATTR void VKAPI_CALL vkTrimCommandPoolKHR( + VkDevice device, + VkCommandPool commandPool, + VkCommandPoolTrimFlags flags); +#endif + +#define VK_KHR_device_group_creation 1 +#define VK_KHR_DEVICE_GROUP_CREATION_SPEC_VERSION 1 +#define VK_KHR_DEVICE_GROUP_CREATION_EXTENSION_NAME "VK_KHR_device_group_creation" +#define VK_MAX_DEVICE_GROUP_SIZE_KHR VK_MAX_DEVICE_GROUP_SIZE + +typedef VkPhysicalDeviceGroupProperties VkPhysicalDeviceGroupPropertiesKHR; + +typedef VkDeviceGroupDeviceCreateInfo VkDeviceGroupDeviceCreateInfoKHR; + + +typedef VkResult (VKAPI_PTR *PFN_vkEnumeratePhysicalDeviceGroupsKHR)(VkInstance instance, uint32_t* pPhysicalDeviceGroupCount, VkPhysicalDeviceGroupProperties* pPhysicalDeviceGroupProperties); + +#ifndef VK_NO_PROTOTYPES +VKAPI_ATTR VkResult VKAPI_CALL vkEnumeratePhysicalDeviceGroupsKHR( + VkInstance instance, + uint32_t* pPhysicalDeviceGroupCount, + VkPhysicalDeviceGroupProperties* pPhysicalDeviceGroupProperties); +#endif + +#define VK_KHR_external_memory_capabilities 1 +#define VK_KHR_EXTERNAL_MEMORY_CAPABILITIES_SPEC_VERSION 1 +#define VK_KHR_EXTERNAL_MEMORY_CAPABILITIES_EXTENSION_NAME "VK_KHR_external_memory_capabilities" +#define VK_LUID_SIZE_KHR VK_LUID_SIZE + +typedef VkExternalMemoryHandleTypeFlags VkExternalMemoryHandleTypeFlagsKHR; + +typedef VkExternalMemoryHandleTypeFlagBits VkExternalMemoryHandleTypeFlagBitsKHR; + +typedef VkExternalMemoryFeatureFlags VkExternalMemoryFeatureFlagsKHR; + +typedef VkExternalMemoryFeatureFlagBits VkExternalMemoryFeatureFlagBitsKHR; + + +typedef VkExternalMemoryProperties VkExternalMemoryPropertiesKHR; + +typedef VkPhysicalDeviceExternalImageFormatInfo VkPhysicalDeviceExternalImageFormatInfoKHR; + +typedef VkExternalImageFormatProperties VkExternalImageFormatPropertiesKHR; + +typedef VkPhysicalDeviceExternalBufferInfo VkPhysicalDeviceExternalBufferInfoKHR; + +typedef VkExternalBufferProperties VkExternalBufferPropertiesKHR; + +typedef VkPhysicalDeviceIDProperties VkPhysicalDeviceIDPropertiesKHR; + + +typedef void (VKAPI_PTR *PFN_vkGetPhysicalDeviceExternalBufferPropertiesKHR)(VkPhysicalDevice physicalDevice, const VkPhysicalDeviceExternalBufferInfo* pExternalBufferInfo, VkExternalBufferProperties* pExternalBufferProperties); + +#ifndef VK_NO_PROTOTYPES +VKAPI_ATTR void VKAPI_CALL vkGetPhysicalDeviceExternalBufferPropertiesKHR( + VkPhysicalDevice physicalDevice, + const VkPhysicalDeviceExternalBufferInfo* pExternalBufferInfo, + VkExternalBufferProperties* pExternalBufferProperties); +#endif + +#define VK_KHR_external_memory 1 +#define VK_KHR_EXTERNAL_MEMORY_SPEC_VERSION 1 +#define VK_KHR_EXTERNAL_MEMORY_EXTENSION_NAME "VK_KHR_external_memory" +#define VK_QUEUE_FAMILY_EXTERNAL_KHR VK_QUEUE_FAMILY_EXTERNAL + +typedef VkExternalMemoryImageCreateInfo VkExternalMemoryImageCreateInfoKHR; + +typedef VkExternalMemoryBufferCreateInfo VkExternalMemoryBufferCreateInfoKHR; + +typedef VkExportMemoryAllocateInfo VkExportMemoryAllocateInfoKHR; + + + +#define VK_KHR_external_memory_fd 1 +#define VK_KHR_EXTERNAL_MEMORY_FD_SPEC_VERSION 1 +#define VK_KHR_EXTERNAL_MEMORY_FD_EXTENSION_NAME "VK_KHR_external_memory_fd" + +typedef struct VkImportMemoryFdInfoKHR { + VkStructureType sType; + const void* pNext; + VkExternalMemoryHandleTypeFlagBits handleType; + int fd; +} VkImportMemoryFdInfoKHR; + +typedef struct VkMemoryFdPropertiesKHR { + VkStructureType sType; + void* pNext; + uint32_t memoryTypeBits; +} VkMemoryFdPropertiesKHR; + +typedef struct VkMemoryGetFdInfoKHR { + VkStructureType sType; + const void* pNext; + VkDeviceMemory memory; + VkExternalMemoryHandleTypeFlagBits handleType; +} VkMemoryGetFdInfoKHR; + + +typedef VkResult (VKAPI_PTR *PFN_vkGetMemoryFdKHR)(VkDevice device, const VkMemoryGetFdInfoKHR* pGetFdInfo, int* pFd); +typedef VkResult (VKAPI_PTR *PFN_vkGetMemoryFdPropertiesKHR)(VkDevice device, VkExternalMemoryHandleTypeFlagBits handleType, int fd, VkMemoryFdPropertiesKHR* pMemoryFdProperties); + +#ifndef VK_NO_PROTOTYPES +VKAPI_ATTR VkResult VKAPI_CALL vkGetMemoryFdKHR( + VkDevice device, + const VkMemoryGetFdInfoKHR* pGetFdInfo, + int* pFd); + +VKAPI_ATTR VkResult VKAPI_CALL vkGetMemoryFdPropertiesKHR( + VkDevice device, + VkExternalMemoryHandleTypeFlagBits handleType, + int fd, + VkMemoryFdPropertiesKHR* pMemoryFdProperties); +#endif + +#define VK_KHR_external_semaphore_capabilities 1 +#define VK_KHR_EXTERNAL_SEMAPHORE_CAPABILITIES_SPEC_VERSION 1 +#define VK_KHR_EXTERNAL_SEMAPHORE_CAPABILITIES_EXTENSION_NAME "VK_KHR_external_semaphore_capabilities" + +typedef VkExternalSemaphoreHandleTypeFlags VkExternalSemaphoreHandleTypeFlagsKHR; + +typedef VkExternalSemaphoreHandleTypeFlagBits VkExternalSemaphoreHandleTypeFlagBitsKHR; + +typedef VkExternalSemaphoreFeatureFlags VkExternalSemaphoreFeatureFlagsKHR; + +typedef VkExternalSemaphoreFeatureFlagBits VkExternalSemaphoreFeatureFlagBitsKHR; + + +typedef VkPhysicalDeviceExternalSemaphoreInfo VkPhysicalDeviceExternalSemaphoreInfoKHR; + +typedef VkExternalSemaphoreProperties VkExternalSemaphorePropertiesKHR; + + +typedef void (VKAPI_PTR *PFN_vkGetPhysicalDeviceExternalSemaphorePropertiesKHR)(VkPhysicalDevice physicalDevice, const VkPhysicalDeviceExternalSemaphoreInfo* pExternalSemaphoreInfo, VkExternalSemaphoreProperties* pExternalSemaphoreProperties); + +#ifndef VK_NO_PROTOTYPES +VKAPI_ATTR void VKAPI_CALL vkGetPhysicalDeviceExternalSemaphorePropertiesKHR( + VkPhysicalDevice physicalDevice, + const VkPhysicalDeviceExternalSemaphoreInfo* pExternalSemaphoreInfo, + VkExternalSemaphoreProperties* pExternalSemaphoreProperties); +#endif + +#define VK_KHR_external_semaphore 1 +#define VK_KHR_EXTERNAL_SEMAPHORE_SPEC_VERSION 1 +#define VK_KHR_EXTERNAL_SEMAPHORE_EXTENSION_NAME "VK_KHR_external_semaphore" + +typedef VkSemaphoreImportFlags VkSemaphoreImportFlagsKHR; + +typedef VkSemaphoreImportFlagBits VkSemaphoreImportFlagBitsKHR; + + +typedef VkExportSemaphoreCreateInfo VkExportSemaphoreCreateInfoKHR; + + + +#define VK_KHR_external_semaphore_fd 1 +#define VK_KHR_EXTERNAL_SEMAPHORE_FD_SPEC_VERSION 1 +#define VK_KHR_EXTERNAL_SEMAPHORE_FD_EXTENSION_NAME "VK_KHR_external_semaphore_fd" + +typedef struct VkImportSemaphoreFdInfoKHR { + VkStructureType sType; + const void* pNext; + VkSemaphore semaphore; + VkSemaphoreImportFlags flags; + VkExternalSemaphoreHandleTypeFlagBits handleType; + int fd; +} VkImportSemaphoreFdInfoKHR; + +typedef struct VkSemaphoreGetFdInfoKHR { + VkStructureType sType; + const void* pNext; + VkSemaphore semaphore; + VkExternalSemaphoreHandleTypeFlagBits handleType; +} VkSemaphoreGetFdInfoKHR; + + +typedef VkResult (VKAPI_PTR *PFN_vkImportSemaphoreFdKHR)(VkDevice device, const VkImportSemaphoreFdInfoKHR* pImportSemaphoreFdInfo); +typedef VkResult (VKAPI_PTR *PFN_vkGetSemaphoreFdKHR)(VkDevice device, const VkSemaphoreGetFdInfoKHR* pGetFdInfo, int* pFd); + +#ifndef VK_NO_PROTOTYPES +VKAPI_ATTR VkResult VKAPI_CALL vkImportSemaphoreFdKHR( + VkDevice device, + const VkImportSemaphoreFdInfoKHR* pImportSemaphoreFdInfo); + +VKAPI_ATTR VkResult VKAPI_CALL vkGetSemaphoreFdKHR( + VkDevice device, + const VkSemaphoreGetFdInfoKHR* pGetFdInfo, + int* pFd); +#endif + +#define VK_KHR_push_descriptor 1 +#define VK_KHR_PUSH_DESCRIPTOR_SPEC_VERSION 2 +#define VK_KHR_PUSH_DESCRIPTOR_EXTENSION_NAME "VK_KHR_push_descriptor" + +typedef struct VkPhysicalDevicePushDescriptorPropertiesKHR { + VkStructureType sType; + void* pNext; + uint32_t maxPushDescriptors; +} VkPhysicalDevicePushDescriptorPropertiesKHR; + + +typedef void (VKAPI_PTR *PFN_vkCmdPushDescriptorSetKHR)(VkCommandBuffer commandBuffer, VkPipelineBindPoint pipelineBindPoint, VkPipelineLayout layout, uint32_t set, uint32_t descriptorWriteCount, const VkWriteDescriptorSet* pDescriptorWrites); +typedef void (VKAPI_PTR *PFN_vkCmdPushDescriptorSetWithTemplateKHR)(VkCommandBuffer commandBuffer, VkDescriptorUpdateTemplate descriptorUpdateTemplate, VkPipelineLayout layout, uint32_t set, const void* pData); + +#ifndef VK_NO_PROTOTYPES +VKAPI_ATTR void VKAPI_CALL vkCmdPushDescriptorSetKHR( + VkCommandBuffer commandBuffer, + VkPipelineBindPoint pipelineBindPoint, + VkPipelineLayout layout, + uint32_t set, + uint32_t descriptorWriteCount, + const VkWriteDescriptorSet* pDescriptorWrites); + +VKAPI_ATTR void VKAPI_CALL vkCmdPushDescriptorSetWithTemplateKHR( + VkCommandBuffer commandBuffer, + VkDescriptorUpdateTemplate descriptorUpdateTemplate, + VkPipelineLayout layout, + uint32_t set, + const void* pData); +#endif + +#define VK_KHR_16bit_storage 1 +#define VK_KHR_16BIT_STORAGE_SPEC_VERSION 1 +#define VK_KHR_16BIT_STORAGE_EXTENSION_NAME "VK_KHR_16bit_storage" + +typedef VkPhysicalDevice16BitStorageFeatures VkPhysicalDevice16BitStorageFeaturesKHR; + + + +#define VK_KHR_incremental_present 1 +#define VK_KHR_INCREMENTAL_PRESENT_SPEC_VERSION 1 +#define VK_KHR_INCREMENTAL_PRESENT_EXTENSION_NAME "VK_KHR_incremental_present" + +typedef struct VkRectLayerKHR { + VkOffset2D offset; + VkExtent2D extent; + uint32_t layer; +} VkRectLayerKHR; + +typedef struct VkPresentRegionKHR { + uint32_t rectangleCount; + const VkRectLayerKHR* pRectangles; +} VkPresentRegionKHR; + +typedef struct VkPresentRegionsKHR { + VkStructureType sType; + const void* pNext; + uint32_t swapchainCount; + const VkPresentRegionKHR* pRegions; +} VkPresentRegionsKHR; + + + +#define VK_KHR_descriptor_update_template 1 +typedef VkDescriptorUpdateTemplate VkDescriptorUpdateTemplateKHR; + + +#define VK_KHR_DESCRIPTOR_UPDATE_TEMPLATE_SPEC_VERSION 1 +#define VK_KHR_DESCRIPTOR_UPDATE_TEMPLATE_EXTENSION_NAME "VK_KHR_descriptor_update_template" + +typedef VkDescriptorUpdateTemplateType VkDescriptorUpdateTemplateTypeKHR; + + +typedef VkDescriptorUpdateTemplateCreateFlags VkDescriptorUpdateTemplateCreateFlagsKHR; + + +typedef VkDescriptorUpdateTemplateEntry VkDescriptorUpdateTemplateEntryKHR; + +typedef VkDescriptorUpdateTemplateCreateInfo VkDescriptorUpdateTemplateCreateInfoKHR; + + +typedef VkResult (VKAPI_PTR *PFN_vkCreateDescriptorUpdateTemplateKHR)(VkDevice device, const VkDescriptorUpdateTemplateCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkDescriptorUpdateTemplate* pDescriptorUpdateTemplate); +typedef void (VKAPI_PTR *PFN_vkDestroyDescriptorUpdateTemplateKHR)(VkDevice device, VkDescriptorUpdateTemplate descriptorUpdateTemplate, const VkAllocationCallbacks* pAllocator); +typedef void (VKAPI_PTR *PFN_vkUpdateDescriptorSetWithTemplateKHR)(VkDevice device, VkDescriptorSet descriptorSet, VkDescriptorUpdateTemplate descriptorUpdateTemplate, const void* pData); + +#ifndef VK_NO_PROTOTYPES +VKAPI_ATTR VkResult VKAPI_CALL vkCreateDescriptorUpdateTemplateKHR( + VkDevice device, + const VkDescriptorUpdateTemplateCreateInfo* pCreateInfo, + const VkAllocationCallbacks* pAllocator, + VkDescriptorUpdateTemplate* pDescriptorUpdateTemplate); + +VKAPI_ATTR void VKAPI_CALL vkDestroyDescriptorUpdateTemplateKHR( + VkDevice device, + VkDescriptorUpdateTemplate descriptorUpdateTemplate, + const VkAllocationCallbacks* pAllocator); + +VKAPI_ATTR void VKAPI_CALL vkUpdateDescriptorSetWithTemplateKHR( + VkDevice device, + VkDescriptorSet descriptorSet, + VkDescriptorUpdateTemplate descriptorUpdateTemplate, + const void* pData); +#endif + +#define VK_KHR_create_renderpass2 1 +#define VK_KHR_CREATE_RENDERPASS_2_SPEC_VERSION 1 +#define VK_KHR_CREATE_RENDERPASS_2_EXTENSION_NAME "VK_KHR_create_renderpass2" + +typedef struct VkAttachmentDescription2KHR { + VkStructureType sType; + const void* pNext; + VkAttachmentDescriptionFlags flags; + VkFormat format; + VkSampleCountFlagBits samples; + VkAttachmentLoadOp loadOp; + VkAttachmentStoreOp storeOp; + VkAttachmentLoadOp stencilLoadOp; + VkAttachmentStoreOp stencilStoreOp; + VkImageLayout initialLayout; + VkImageLayout finalLayout; +} VkAttachmentDescription2KHR; + +typedef struct VkAttachmentReference2KHR { + VkStructureType sType; + const void* pNext; + uint32_t attachment; + VkImageLayout layout; + VkImageAspectFlags aspectMask; +} VkAttachmentReference2KHR; + +typedef struct VkSubpassDescription2KHR { + VkStructureType sType; + const void* pNext; + VkSubpassDescriptionFlags flags; + VkPipelineBindPoint pipelineBindPoint; + uint32_t viewMask; + uint32_t inputAttachmentCount; + const VkAttachmentReference2KHR* pInputAttachments; + uint32_t colorAttachmentCount; + const VkAttachmentReference2KHR* pColorAttachments; + const VkAttachmentReference2KHR* pResolveAttachments; + const VkAttachmentReference2KHR* pDepthStencilAttachment; + uint32_t preserveAttachmentCount; + const uint32_t* pPreserveAttachments; +} VkSubpassDescription2KHR; + +typedef struct VkSubpassDependency2KHR { + VkStructureType sType; + const void* pNext; + uint32_t srcSubpass; + uint32_t dstSubpass; + VkPipelineStageFlags srcStageMask; + VkPipelineStageFlags dstStageMask; + VkAccessFlags srcAccessMask; + VkAccessFlags dstAccessMask; + VkDependencyFlags dependencyFlags; + int32_t viewOffset; +} VkSubpassDependency2KHR; + +typedef struct VkRenderPassCreateInfo2KHR { + VkStructureType sType; + const void* pNext; + VkRenderPassCreateFlags flags; + uint32_t attachmentCount; + const VkAttachmentDescription2KHR* pAttachments; + uint32_t subpassCount; + const VkSubpassDescription2KHR* pSubpasses; + uint32_t dependencyCount; + const VkSubpassDependency2KHR* pDependencies; + uint32_t correlatedViewMaskCount; + const uint32_t* pCorrelatedViewMasks; +} VkRenderPassCreateInfo2KHR; + +typedef struct VkSubpassBeginInfoKHR { + VkStructureType sType; + const void* pNext; + VkSubpassContents contents; +} VkSubpassBeginInfoKHR; + +typedef struct VkSubpassEndInfoKHR { + VkStructureType sType; + const void* pNext; +} VkSubpassEndInfoKHR; + + +typedef VkResult (VKAPI_PTR *PFN_vkCreateRenderPass2KHR)(VkDevice device, const VkRenderPassCreateInfo2KHR* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkRenderPass* pRenderPass); +typedef void (VKAPI_PTR *PFN_vkCmdBeginRenderPass2KHR)(VkCommandBuffer commandBuffer, const VkRenderPassBeginInfo* pRenderPassBegin, const VkSubpassBeginInfoKHR* pSubpassBeginInfo); +typedef void (VKAPI_PTR *PFN_vkCmdNextSubpass2KHR)(VkCommandBuffer commandBuffer, const VkSubpassBeginInfoKHR* pSubpassBeginInfo, const VkSubpassEndInfoKHR* pSubpassEndInfo); +typedef void (VKAPI_PTR *PFN_vkCmdEndRenderPass2KHR)(VkCommandBuffer commandBuffer, const VkSubpassEndInfoKHR* pSubpassEndInfo); + +#ifndef VK_NO_PROTOTYPES +VKAPI_ATTR VkResult VKAPI_CALL vkCreateRenderPass2KHR( + VkDevice device, + const VkRenderPassCreateInfo2KHR* pCreateInfo, + const VkAllocationCallbacks* pAllocator, + VkRenderPass* pRenderPass); + +VKAPI_ATTR void VKAPI_CALL vkCmdBeginRenderPass2KHR( + VkCommandBuffer commandBuffer, + const VkRenderPassBeginInfo* pRenderPassBegin, + const VkSubpassBeginInfoKHR* pSubpassBeginInfo); + +VKAPI_ATTR void VKAPI_CALL vkCmdNextSubpass2KHR( + VkCommandBuffer commandBuffer, + const VkSubpassBeginInfoKHR* pSubpassBeginInfo, + const VkSubpassEndInfoKHR* pSubpassEndInfo); + +VKAPI_ATTR void VKAPI_CALL vkCmdEndRenderPass2KHR( + VkCommandBuffer commandBuffer, + const VkSubpassEndInfoKHR* pSubpassEndInfo); +#endif + +#define VK_KHR_shared_presentable_image 1 +#define VK_KHR_SHARED_PRESENTABLE_IMAGE_SPEC_VERSION 1 +#define VK_KHR_SHARED_PRESENTABLE_IMAGE_EXTENSION_NAME "VK_KHR_shared_presentable_image" + +typedef struct VkSharedPresentSurfaceCapabilitiesKHR { + VkStructureType sType; + void* pNext; + VkImageUsageFlags sharedPresentSupportedUsageFlags; +} VkSharedPresentSurfaceCapabilitiesKHR; + + +typedef VkResult (VKAPI_PTR *PFN_vkGetSwapchainStatusKHR)(VkDevice device, VkSwapchainKHR swapchain); + +#ifndef VK_NO_PROTOTYPES +VKAPI_ATTR VkResult VKAPI_CALL vkGetSwapchainStatusKHR( + VkDevice device, + VkSwapchainKHR swapchain); +#endif + +#define VK_KHR_external_fence_capabilities 1 +#define VK_KHR_EXTERNAL_FENCE_CAPABILITIES_SPEC_VERSION 1 +#define VK_KHR_EXTERNAL_FENCE_CAPABILITIES_EXTENSION_NAME "VK_KHR_external_fence_capabilities" + +typedef VkExternalFenceHandleTypeFlags VkExternalFenceHandleTypeFlagsKHR; + +typedef VkExternalFenceHandleTypeFlagBits VkExternalFenceHandleTypeFlagBitsKHR; + +typedef VkExternalFenceFeatureFlags VkExternalFenceFeatureFlagsKHR; + +typedef VkExternalFenceFeatureFlagBits VkExternalFenceFeatureFlagBitsKHR; + + +typedef VkPhysicalDeviceExternalFenceInfo VkPhysicalDeviceExternalFenceInfoKHR; + +typedef VkExternalFenceProperties VkExternalFencePropertiesKHR; + + +typedef void (VKAPI_PTR *PFN_vkGetPhysicalDeviceExternalFencePropertiesKHR)(VkPhysicalDevice physicalDevice, const VkPhysicalDeviceExternalFenceInfo* pExternalFenceInfo, VkExternalFenceProperties* pExternalFenceProperties); + +#ifndef VK_NO_PROTOTYPES +VKAPI_ATTR void VKAPI_CALL vkGetPhysicalDeviceExternalFencePropertiesKHR( + VkPhysicalDevice physicalDevice, + const VkPhysicalDeviceExternalFenceInfo* pExternalFenceInfo, + VkExternalFenceProperties* pExternalFenceProperties); +#endif + +#define VK_KHR_external_fence 1 +#define VK_KHR_EXTERNAL_FENCE_SPEC_VERSION 1 +#define VK_KHR_EXTERNAL_FENCE_EXTENSION_NAME "VK_KHR_external_fence" + +typedef VkFenceImportFlags VkFenceImportFlagsKHR; + +typedef VkFenceImportFlagBits VkFenceImportFlagBitsKHR; + + +typedef VkExportFenceCreateInfo VkExportFenceCreateInfoKHR; + + + +#define VK_KHR_external_fence_fd 1 +#define VK_KHR_EXTERNAL_FENCE_FD_SPEC_VERSION 1 +#define VK_KHR_EXTERNAL_FENCE_FD_EXTENSION_NAME "VK_KHR_external_fence_fd" + +typedef struct VkImportFenceFdInfoKHR { + VkStructureType sType; + const void* pNext; + VkFence fence; + VkFenceImportFlags flags; + VkExternalFenceHandleTypeFlagBits handleType; + int fd; +} VkImportFenceFdInfoKHR; + +typedef struct VkFenceGetFdInfoKHR { + VkStructureType sType; + const void* pNext; + VkFence fence; + VkExternalFenceHandleTypeFlagBits handleType; +} VkFenceGetFdInfoKHR; + + +typedef VkResult (VKAPI_PTR *PFN_vkImportFenceFdKHR)(VkDevice device, const VkImportFenceFdInfoKHR* pImportFenceFdInfo); +typedef VkResult (VKAPI_PTR *PFN_vkGetFenceFdKHR)(VkDevice device, const VkFenceGetFdInfoKHR* pGetFdInfo, int* pFd); + +#ifndef VK_NO_PROTOTYPES +VKAPI_ATTR VkResult VKAPI_CALL vkImportFenceFdKHR( + VkDevice device, + const VkImportFenceFdInfoKHR* pImportFenceFdInfo); + +VKAPI_ATTR VkResult VKAPI_CALL vkGetFenceFdKHR( + VkDevice device, + const VkFenceGetFdInfoKHR* pGetFdInfo, + int* pFd); +#endif + +#define VK_KHR_maintenance2 1 +#define VK_KHR_MAINTENANCE2_SPEC_VERSION 1 +#define VK_KHR_MAINTENANCE2_EXTENSION_NAME "VK_KHR_maintenance2" + +typedef VkPointClippingBehavior VkPointClippingBehaviorKHR; + +typedef VkTessellationDomainOrigin VkTessellationDomainOriginKHR; + + +typedef VkPhysicalDevicePointClippingProperties VkPhysicalDevicePointClippingPropertiesKHR; + +typedef VkRenderPassInputAttachmentAspectCreateInfo VkRenderPassInputAttachmentAspectCreateInfoKHR; + +typedef VkInputAttachmentAspectReference VkInputAttachmentAspectReferenceKHR; + +typedef VkImageViewUsageCreateInfo VkImageViewUsageCreateInfoKHR; + +typedef VkPipelineTessellationDomainOriginStateCreateInfo VkPipelineTessellationDomainOriginStateCreateInfoKHR; + + + +#define VK_KHR_get_surface_capabilities2 1 +#define VK_KHR_GET_SURFACE_CAPABILITIES_2_SPEC_VERSION 1 +#define VK_KHR_GET_SURFACE_CAPABILITIES_2_EXTENSION_NAME "VK_KHR_get_surface_capabilities2" + +typedef struct VkPhysicalDeviceSurfaceInfo2KHR { + VkStructureType sType; + const void* pNext; + VkSurfaceKHR surface; +} VkPhysicalDeviceSurfaceInfo2KHR; + +typedef struct VkSurfaceCapabilities2KHR { + VkStructureType sType; + void* pNext; + VkSurfaceCapabilitiesKHR surfaceCapabilities; +} VkSurfaceCapabilities2KHR; + +typedef struct VkSurfaceFormat2KHR { + VkStructureType sType; + void* pNext; + VkSurfaceFormatKHR surfaceFormat; +} VkSurfaceFormat2KHR; + + +typedef VkResult (VKAPI_PTR *PFN_vkGetPhysicalDeviceSurfaceCapabilities2KHR)(VkPhysicalDevice physicalDevice, const VkPhysicalDeviceSurfaceInfo2KHR* pSurfaceInfo, VkSurfaceCapabilities2KHR* pSurfaceCapabilities); +typedef VkResult (VKAPI_PTR *PFN_vkGetPhysicalDeviceSurfaceFormats2KHR)(VkPhysicalDevice physicalDevice, const VkPhysicalDeviceSurfaceInfo2KHR* pSurfaceInfo, uint32_t* pSurfaceFormatCount, VkSurfaceFormat2KHR* pSurfaceFormats); + +#ifndef VK_NO_PROTOTYPES +VKAPI_ATTR VkResult VKAPI_CALL vkGetPhysicalDeviceSurfaceCapabilities2KHR( + VkPhysicalDevice physicalDevice, + const VkPhysicalDeviceSurfaceInfo2KHR* pSurfaceInfo, + VkSurfaceCapabilities2KHR* pSurfaceCapabilities); + +VKAPI_ATTR VkResult VKAPI_CALL vkGetPhysicalDeviceSurfaceFormats2KHR( + VkPhysicalDevice physicalDevice, + const VkPhysicalDeviceSurfaceInfo2KHR* pSurfaceInfo, + uint32_t* pSurfaceFormatCount, + VkSurfaceFormat2KHR* pSurfaceFormats); +#endif + +#define VK_KHR_variable_pointers 1 +#define VK_KHR_VARIABLE_POINTERS_SPEC_VERSION 1 +#define VK_KHR_VARIABLE_POINTERS_EXTENSION_NAME "VK_KHR_variable_pointers" + +typedef VkPhysicalDeviceVariablePointerFeatures VkPhysicalDeviceVariablePointerFeaturesKHR; + + + +#define VK_KHR_get_display_properties2 1 +#define VK_KHR_GET_DISPLAY_PROPERTIES_2_SPEC_VERSION 1 +#define VK_KHR_GET_DISPLAY_PROPERTIES_2_EXTENSION_NAME "VK_KHR_get_display_properties2" + +typedef struct VkDisplayProperties2KHR { + VkStructureType sType; + void* pNext; + VkDisplayPropertiesKHR displayProperties; +} VkDisplayProperties2KHR; + +typedef struct VkDisplayPlaneProperties2KHR { + VkStructureType sType; + void* pNext; + VkDisplayPlanePropertiesKHR displayPlaneProperties; +} VkDisplayPlaneProperties2KHR; + +typedef struct VkDisplayModeProperties2KHR { + VkStructureType sType; + void* pNext; + VkDisplayModePropertiesKHR displayModeProperties; +} VkDisplayModeProperties2KHR; + +typedef struct VkDisplayPlaneInfo2KHR { + VkStructureType sType; + const void* pNext; + VkDisplayModeKHR mode; + uint32_t planeIndex; +} VkDisplayPlaneInfo2KHR; + +typedef struct VkDisplayPlaneCapabilities2KHR { + VkStructureType sType; + void* pNext; + VkDisplayPlaneCapabilitiesKHR capabilities; +} VkDisplayPlaneCapabilities2KHR; + + +typedef VkResult (VKAPI_PTR *PFN_vkGetPhysicalDeviceDisplayProperties2KHR)(VkPhysicalDevice physicalDevice, uint32_t* pPropertyCount, VkDisplayProperties2KHR* pProperties); +typedef VkResult (VKAPI_PTR *PFN_vkGetPhysicalDeviceDisplayPlaneProperties2KHR)(VkPhysicalDevice physicalDevice, uint32_t* pPropertyCount, VkDisplayPlaneProperties2KHR* pProperties); +typedef VkResult (VKAPI_PTR *PFN_vkGetDisplayModeProperties2KHR)(VkPhysicalDevice physicalDevice, VkDisplayKHR display, uint32_t* pPropertyCount, VkDisplayModeProperties2KHR* pProperties); +typedef VkResult (VKAPI_PTR *PFN_vkGetDisplayPlaneCapabilities2KHR)(VkPhysicalDevice physicalDevice, const VkDisplayPlaneInfo2KHR* pDisplayPlaneInfo, VkDisplayPlaneCapabilities2KHR* pCapabilities); + +#ifndef VK_NO_PROTOTYPES +VKAPI_ATTR VkResult VKAPI_CALL vkGetPhysicalDeviceDisplayProperties2KHR( + VkPhysicalDevice physicalDevice, + uint32_t* pPropertyCount, + VkDisplayProperties2KHR* pProperties); + +VKAPI_ATTR VkResult VKAPI_CALL vkGetPhysicalDeviceDisplayPlaneProperties2KHR( + VkPhysicalDevice physicalDevice, + uint32_t* pPropertyCount, + VkDisplayPlaneProperties2KHR* pProperties); + +VKAPI_ATTR VkResult VKAPI_CALL vkGetDisplayModeProperties2KHR( + VkPhysicalDevice physicalDevice, + VkDisplayKHR display, + uint32_t* pPropertyCount, + VkDisplayModeProperties2KHR* pProperties); + +VKAPI_ATTR VkResult VKAPI_CALL vkGetDisplayPlaneCapabilities2KHR( + VkPhysicalDevice physicalDevice, + const VkDisplayPlaneInfo2KHR* pDisplayPlaneInfo, + VkDisplayPlaneCapabilities2KHR* pCapabilities); +#endif + +#define VK_KHR_dedicated_allocation 1 +#define VK_KHR_DEDICATED_ALLOCATION_SPEC_VERSION 3 +#define VK_KHR_DEDICATED_ALLOCATION_EXTENSION_NAME "VK_KHR_dedicated_allocation" + +typedef VkMemoryDedicatedRequirements VkMemoryDedicatedRequirementsKHR; + +typedef VkMemoryDedicatedAllocateInfo VkMemoryDedicatedAllocateInfoKHR; + + + +#define VK_KHR_storage_buffer_storage_class 1 +#define VK_KHR_STORAGE_BUFFER_STORAGE_CLASS_SPEC_VERSION 1 +#define VK_KHR_STORAGE_BUFFER_STORAGE_CLASS_EXTENSION_NAME "VK_KHR_storage_buffer_storage_class" + + +#define VK_KHR_relaxed_block_layout 1 +#define VK_KHR_RELAXED_BLOCK_LAYOUT_SPEC_VERSION 1 +#define VK_KHR_RELAXED_BLOCK_LAYOUT_EXTENSION_NAME "VK_KHR_relaxed_block_layout" + + +#define VK_KHR_get_memory_requirements2 1 +#define VK_KHR_GET_MEMORY_REQUIREMENTS_2_SPEC_VERSION 1 +#define VK_KHR_GET_MEMORY_REQUIREMENTS_2_EXTENSION_NAME "VK_KHR_get_memory_requirements2" + +typedef VkBufferMemoryRequirementsInfo2 VkBufferMemoryRequirementsInfo2KHR; + +typedef VkImageMemoryRequirementsInfo2 VkImageMemoryRequirementsInfo2KHR; + +typedef VkImageSparseMemoryRequirementsInfo2 VkImageSparseMemoryRequirementsInfo2KHR; + +typedef VkSparseImageMemoryRequirements2 VkSparseImageMemoryRequirements2KHR; + + +typedef void (VKAPI_PTR *PFN_vkGetImageMemoryRequirements2KHR)(VkDevice device, const VkImageMemoryRequirementsInfo2* pInfo, VkMemoryRequirements2* pMemoryRequirements); +typedef void (VKAPI_PTR *PFN_vkGetBufferMemoryRequirements2KHR)(VkDevice device, const VkBufferMemoryRequirementsInfo2* pInfo, VkMemoryRequirements2* pMemoryRequirements); +typedef void (VKAPI_PTR *PFN_vkGetImageSparseMemoryRequirements2KHR)(VkDevice device, const VkImageSparseMemoryRequirementsInfo2* pInfo, uint32_t* pSparseMemoryRequirementCount, VkSparseImageMemoryRequirements2* pSparseMemoryRequirements); + +#ifndef VK_NO_PROTOTYPES +VKAPI_ATTR void VKAPI_CALL vkGetImageMemoryRequirements2KHR( + VkDevice device, + const VkImageMemoryRequirementsInfo2* pInfo, + VkMemoryRequirements2* pMemoryRequirements); + +VKAPI_ATTR void VKAPI_CALL vkGetBufferMemoryRequirements2KHR( + VkDevice device, + const VkBufferMemoryRequirementsInfo2* pInfo, + VkMemoryRequirements2* pMemoryRequirements); + +VKAPI_ATTR void VKAPI_CALL vkGetImageSparseMemoryRequirements2KHR( + VkDevice device, + const VkImageSparseMemoryRequirementsInfo2* pInfo, + uint32_t* pSparseMemoryRequirementCount, + VkSparseImageMemoryRequirements2* pSparseMemoryRequirements); +#endif + +#define VK_KHR_image_format_list 1 +#define VK_KHR_IMAGE_FORMAT_LIST_SPEC_VERSION 1 +#define VK_KHR_IMAGE_FORMAT_LIST_EXTENSION_NAME "VK_KHR_image_format_list" + +typedef struct VkImageFormatListCreateInfoKHR { + VkStructureType sType; + const void* pNext; + uint32_t viewFormatCount; + const VkFormat* pViewFormats; +} VkImageFormatListCreateInfoKHR; + + + +#define VK_KHR_sampler_ycbcr_conversion 1 +typedef VkSamplerYcbcrConversion VkSamplerYcbcrConversionKHR; + + +#define VK_KHR_SAMPLER_YCBCR_CONVERSION_SPEC_VERSION 1 +#define VK_KHR_SAMPLER_YCBCR_CONVERSION_EXTENSION_NAME "VK_KHR_sampler_ycbcr_conversion" + +typedef VkSamplerYcbcrModelConversion VkSamplerYcbcrModelConversionKHR; + +typedef VkSamplerYcbcrRange VkSamplerYcbcrRangeKHR; + +typedef VkChromaLocation VkChromaLocationKHR; + + +typedef VkSamplerYcbcrConversionCreateInfo VkSamplerYcbcrConversionCreateInfoKHR; + +typedef VkSamplerYcbcrConversionInfo VkSamplerYcbcrConversionInfoKHR; + +typedef VkBindImagePlaneMemoryInfo VkBindImagePlaneMemoryInfoKHR; + +typedef VkImagePlaneMemoryRequirementsInfo VkImagePlaneMemoryRequirementsInfoKHR; + +typedef VkPhysicalDeviceSamplerYcbcrConversionFeatures VkPhysicalDeviceSamplerYcbcrConversionFeaturesKHR; + +typedef VkSamplerYcbcrConversionImageFormatProperties VkSamplerYcbcrConversionImageFormatPropertiesKHR; + + +typedef VkResult (VKAPI_PTR *PFN_vkCreateSamplerYcbcrConversionKHR)(VkDevice device, const VkSamplerYcbcrConversionCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkSamplerYcbcrConversion* pYcbcrConversion); +typedef void (VKAPI_PTR *PFN_vkDestroySamplerYcbcrConversionKHR)(VkDevice device, VkSamplerYcbcrConversion ycbcrConversion, const VkAllocationCallbacks* pAllocator); + +#ifndef VK_NO_PROTOTYPES +VKAPI_ATTR VkResult VKAPI_CALL vkCreateSamplerYcbcrConversionKHR( + VkDevice device, + const VkSamplerYcbcrConversionCreateInfo* pCreateInfo, + const VkAllocationCallbacks* pAllocator, + VkSamplerYcbcrConversion* pYcbcrConversion); + +VKAPI_ATTR void VKAPI_CALL vkDestroySamplerYcbcrConversionKHR( + VkDevice device, + VkSamplerYcbcrConversion ycbcrConversion, + const VkAllocationCallbacks* pAllocator); +#endif + +#define VK_KHR_bind_memory2 1 +#define VK_KHR_BIND_MEMORY_2_SPEC_VERSION 1 +#define VK_KHR_BIND_MEMORY_2_EXTENSION_NAME "VK_KHR_bind_memory2" + +typedef VkBindBufferMemoryInfo VkBindBufferMemoryInfoKHR; + +typedef VkBindImageMemoryInfo VkBindImageMemoryInfoKHR; + + +typedef VkResult (VKAPI_PTR *PFN_vkBindBufferMemory2KHR)(VkDevice device, uint32_t bindInfoCount, const VkBindBufferMemoryInfo* pBindInfos); +typedef VkResult (VKAPI_PTR *PFN_vkBindImageMemory2KHR)(VkDevice device, uint32_t bindInfoCount, const VkBindImageMemoryInfo* pBindInfos); + +#ifndef VK_NO_PROTOTYPES +VKAPI_ATTR VkResult VKAPI_CALL vkBindBufferMemory2KHR( + VkDevice device, + uint32_t bindInfoCount, + const VkBindBufferMemoryInfo* pBindInfos); + +VKAPI_ATTR VkResult VKAPI_CALL vkBindImageMemory2KHR( + VkDevice device, + uint32_t bindInfoCount, + const VkBindImageMemoryInfo* pBindInfos); +#endif + +#define VK_KHR_maintenance3 1 +#define VK_KHR_MAINTENANCE3_SPEC_VERSION 1 +#define VK_KHR_MAINTENANCE3_EXTENSION_NAME "VK_KHR_maintenance3" + +typedef VkPhysicalDeviceMaintenance3Properties VkPhysicalDeviceMaintenance3PropertiesKHR; + +typedef VkDescriptorSetLayoutSupport VkDescriptorSetLayoutSupportKHR; + + +typedef void (VKAPI_PTR *PFN_vkGetDescriptorSetLayoutSupportKHR)(VkDevice device, const VkDescriptorSetLayoutCreateInfo* pCreateInfo, VkDescriptorSetLayoutSupport* pSupport); + +#ifndef VK_NO_PROTOTYPES +VKAPI_ATTR void VKAPI_CALL vkGetDescriptorSetLayoutSupportKHR( + VkDevice device, + const VkDescriptorSetLayoutCreateInfo* pCreateInfo, + VkDescriptorSetLayoutSupport* pSupport); +#endif + +#define VK_KHR_draw_indirect_count 1 +#define VK_KHR_DRAW_INDIRECT_COUNT_SPEC_VERSION 1 +#define VK_KHR_DRAW_INDIRECT_COUNT_EXTENSION_NAME "VK_KHR_draw_indirect_count" + +typedef void (VKAPI_PTR *PFN_vkCmdDrawIndirectCountKHR)(VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset, VkBuffer countBuffer, VkDeviceSize countBufferOffset, uint32_t maxDrawCount, uint32_t stride); +typedef void (VKAPI_PTR *PFN_vkCmdDrawIndexedIndirectCountKHR)(VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset, VkBuffer countBuffer, VkDeviceSize countBufferOffset, uint32_t maxDrawCount, uint32_t stride); + +#ifndef VK_NO_PROTOTYPES +VKAPI_ATTR void VKAPI_CALL vkCmdDrawIndirectCountKHR( + VkCommandBuffer commandBuffer, + VkBuffer buffer, + VkDeviceSize offset, + VkBuffer countBuffer, + VkDeviceSize countBufferOffset, + uint32_t maxDrawCount, + uint32_t stride); + +VKAPI_ATTR void VKAPI_CALL vkCmdDrawIndexedIndirectCountKHR( + VkCommandBuffer commandBuffer, + VkBuffer buffer, + VkDeviceSize offset, + VkBuffer countBuffer, + VkDeviceSize countBufferOffset, + uint32_t maxDrawCount, + uint32_t stride); +#endif + +#define VK_KHR_8bit_storage 1 +#define VK_KHR_8BIT_STORAGE_SPEC_VERSION 1 +#define VK_KHR_8BIT_STORAGE_EXTENSION_NAME "VK_KHR_8bit_storage" + +typedef struct VkPhysicalDevice8BitStorageFeaturesKHR { + VkStructureType sType; + void* pNext; + VkBool32 storageBuffer8BitAccess; + VkBool32 uniformAndStorageBuffer8BitAccess; + VkBool32 storagePushConstant8; +} VkPhysicalDevice8BitStorageFeaturesKHR; + + + +#define VK_KHR_shader_atomic_int64 1 +#define VK_KHR_SHADER_ATOMIC_INT64_SPEC_VERSION 1 +#define VK_KHR_SHADER_ATOMIC_INT64_EXTENSION_NAME "VK_KHR_shader_atomic_int64" + +typedef struct VkPhysicalDeviceShaderAtomicInt64FeaturesKHR { + VkStructureType sType; + void* pNext; + VkBool32 shaderBufferInt64Atomics; + VkBool32 shaderSharedInt64Atomics; +} VkPhysicalDeviceShaderAtomicInt64FeaturesKHR; + + + +#define VK_KHR_driver_properties 1 +#define VK_MAX_DRIVER_NAME_SIZE_KHR 256 +#define VK_MAX_DRIVER_INFO_SIZE_KHR 256 +#define VK_KHR_DRIVER_PROPERTIES_SPEC_VERSION 1 +#define VK_KHR_DRIVER_PROPERTIES_EXTENSION_NAME "VK_KHR_driver_properties" + + +typedef enum VkDriverIdKHR { + VK_DRIVER_ID_AMD_PROPRIETARY_KHR = 1, + VK_DRIVER_ID_AMD_OPEN_SOURCE_KHR = 2, + VK_DRIVER_ID_MESA_RADV_KHR = 3, + VK_DRIVER_ID_NVIDIA_PROPRIETARY_KHR = 4, + VK_DRIVER_ID_INTEL_PROPRIETARY_WINDOWS_KHR = 5, + VK_DRIVER_ID_INTEL_OPEN_SOURCE_MESA_KHR = 6, + VK_DRIVER_ID_IMAGINATION_PROPRIETARY_KHR = 7, + VK_DRIVER_ID_QUALCOMM_PROPRIETARY_KHR = 8, + VK_DRIVER_ID_ARM_PROPRIETARY_KHR = 9, + VK_DRIVER_ID_BEGIN_RANGE_KHR = VK_DRIVER_ID_AMD_PROPRIETARY_KHR, + VK_DRIVER_ID_END_RANGE_KHR = VK_DRIVER_ID_ARM_PROPRIETARY_KHR, + VK_DRIVER_ID_RANGE_SIZE_KHR = (VK_DRIVER_ID_ARM_PROPRIETARY_KHR - VK_DRIVER_ID_AMD_PROPRIETARY_KHR + 1), + VK_DRIVER_ID_MAX_ENUM_KHR = 0x7FFFFFFF +} VkDriverIdKHR; + +typedef struct VkConformanceVersionKHR { + uint8_t major; + uint8_t minor; + uint8_t subminor; + uint8_t patch; +} VkConformanceVersionKHR; + +typedef struct VkPhysicalDeviceDriverPropertiesKHR { + VkStructureType sType; + void* pNext; + VkDriverIdKHR driverID; + char driverName[VK_MAX_DRIVER_NAME_SIZE_KHR]; + char driverInfo[VK_MAX_DRIVER_INFO_SIZE_KHR]; + VkConformanceVersionKHR conformanceVersion; +} VkPhysicalDeviceDriverPropertiesKHR; + + + +#define VK_KHR_vulkan_memory_model 1 +#define VK_KHR_VULKAN_MEMORY_MODEL_SPEC_VERSION 2 +#define VK_KHR_VULKAN_MEMORY_MODEL_EXTENSION_NAME "VK_KHR_vulkan_memory_model" + +typedef struct VkPhysicalDeviceVulkanMemoryModelFeaturesKHR { + VkStructureType sType; + void* pNext; + VkBool32 vulkanMemoryModel; + VkBool32 vulkanMemoryModelDeviceScope; +} VkPhysicalDeviceVulkanMemoryModelFeaturesKHR; + + + +#define VK_EXT_debug_report 1 +VK_DEFINE_NON_DISPATCHABLE_HANDLE(VkDebugReportCallbackEXT) + +#define VK_EXT_DEBUG_REPORT_SPEC_VERSION 9 +#define VK_EXT_DEBUG_REPORT_EXTENSION_NAME "VK_EXT_debug_report" + + +typedef enum VkDebugReportObjectTypeEXT { + VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT = 0, + VK_DEBUG_REPORT_OBJECT_TYPE_INSTANCE_EXT = 1, + VK_DEBUG_REPORT_OBJECT_TYPE_PHYSICAL_DEVICE_EXT = 2, + VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT = 3, + VK_DEBUG_REPORT_OBJECT_TYPE_QUEUE_EXT = 4, + VK_DEBUG_REPORT_OBJECT_TYPE_SEMAPHORE_EXT = 5, + VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT = 6, + VK_DEBUG_REPORT_OBJECT_TYPE_FENCE_EXT = 7, + VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_MEMORY_EXT = 8, + VK_DEBUG_REPORT_OBJECT_TYPE_BUFFER_EXT = 9, + VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_EXT = 10, + VK_DEBUG_REPORT_OBJECT_TYPE_EVENT_EXT = 11, + VK_DEBUG_REPORT_OBJECT_TYPE_QUERY_POOL_EXT = 12, + VK_DEBUG_REPORT_OBJECT_TYPE_BUFFER_VIEW_EXT = 13, + VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_VIEW_EXT = 14, + VK_DEBUG_REPORT_OBJECT_TYPE_SHADER_MODULE_EXT = 15, + VK_DEBUG_REPORT_OBJECT_TYPE_PIPELINE_CACHE_EXT = 16, + VK_DEBUG_REPORT_OBJECT_TYPE_PIPELINE_LAYOUT_EXT = 17, + VK_DEBUG_REPORT_OBJECT_TYPE_RENDER_PASS_EXT = 18, + VK_DEBUG_REPORT_OBJECT_TYPE_PIPELINE_EXT = 19, + VK_DEBUG_REPORT_OBJECT_TYPE_DESCRIPTOR_SET_LAYOUT_EXT = 20, + VK_DEBUG_REPORT_OBJECT_TYPE_SAMPLER_EXT = 21, + VK_DEBUG_REPORT_OBJECT_TYPE_DESCRIPTOR_POOL_EXT = 22, + VK_DEBUG_REPORT_OBJECT_TYPE_DESCRIPTOR_SET_EXT = 23, + VK_DEBUG_REPORT_OBJECT_TYPE_FRAMEBUFFER_EXT = 24, + VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_POOL_EXT = 25, + VK_DEBUG_REPORT_OBJECT_TYPE_SURFACE_KHR_EXT = 26, + VK_DEBUG_REPORT_OBJECT_TYPE_SWAPCHAIN_KHR_EXT = 27, + VK_DEBUG_REPORT_OBJECT_TYPE_DEBUG_REPORT_CALLBACK_EXT_EXT = 28, + VK_DEBUG_REPORT_OBJECT_TYPE_DISPLAY_KHR_EXT = 29, + VK_DEBUG_REPORT_OBJECT_TYPE_DISPLAY_MODE_KHR_EXT = 30, + VK_DEBUG_REPORT_OBJECT_TYPE_OBJECT_TABLE_NVX_EXT = 31, + VK_DEBUG_REPORT_OBJECT_TYPE_INDIRECT_COMMANDS_LAYOUT_NVX_EXT = 32, + VK_DEBUG_REPORT_OBJECT_TYPE_VALIDATION_CACHE_EXT_EXT = 33, + VK_DEBUG_REPORT_OBJECT_TYPE_SAMPLER_YCBCR_CONVERSION_EXT = 1000156000, + VK_DEBUG_REPORT_OBJECT_TYPE_DESCRIPTOR_UPDATE_TEMPLATE_EXT = 1000085000, + VK_DEBUG_REPORT_OBJECT_TYPE_ACCELERATION_STRUCTURE_NV_EXT = 1000165000, + VK_DEBUG_REPORT_OBJECT_TYPE_DEBUG_REPORT_EXT = VK_DEBUG_REPORT_OBJECT_TYPE_DEBUG_REPORT_CALLBACK_EXT_EXT, + VK_DEBUG_REPORT_OBJECT_TYPE_VALIDATION_CACHE_EXT = VK_DEBUG_REPORT_OBJECT_TYPE_VALIDATION_CACHE_EXT_EXT, + VK_DEBUG_REPORT_OBJECT_TYPE_DESCRIPTOR_UPDATE_TEMPLATE_KHR_EXT = VK_DEBUG_REPORT_OBJECT_TYPE_DESCRIPTOR_UPDATE_TEMPLATE_EXT, + VK_DEBUG_REPORT_OBJECT_TYPE_SAMPLER_YCBCR_CONVERSION_KHR_EXT = VK_DEBUG_REPORT_OBJECT_TYPE_SAMPLER_YCBCR_CONVERSION_EXT, + VK_DEBUG_REPORT_OBJECT_TYPE_BEGIN_RANGE_EXT = VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, + VK_DEBUG_REPORT_OBJECT_TYPE_END_RANGE_EXT = VK_DEBUG_REPORT_OBJECT_TYPE_VALIDATION_CACHE_EXT_EXT, + VK_DEBUG_REPORT_OBJECT_TYPE_RANGE_SIZE_EXT = (VK_DEBUG_REPORT_OBJECT_TYPE_VALIDATION_CACHE_EXT_EXT - VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT + 1), + VK_DEBUG_REPORT_OBJECT_TYPE_MAX_ENUM_EXT = 0x7FFFFFFF +} VkDebugReportObjectTypeEXT; + + +typedef enum VkDebugReportFlagBitsEXT { + VK_DEBUG_REPORT_INFORMATION_BIT_EXT = 0x00000001, + VK_DEBUG_REPORT_WARNING_BIT_EXT = 0x00000002, + VK_DEBUG_REPORT_PERFORMANCE_WARNING_BIT_EXT = 0x00000004, + VK_DEBUG_REPORT_ERROR_BIT_EXT = 0x00000008, + VK_DEBUG_REPORT_DEBUG_BIT_EXT = 0x00000010, + VK_DEBUG_REPORT_FLAG_BITS_MAX_ENUM_EXT = 0x7FFFFFFF +} VkDebugReportFlagBitsEXT; +typedef VkFlags VkDebugReportFlagsEXT; + +typedef VkBool32 (VKAPI_PTR *PFN_vkDebugReportCallbackEXT)( + VkDebugReportFlagsEXT flags, + VkDebugReportObjectTypeEXT objectType, + uint64_t object, + size_t location, + int32_t messageCode, + const char* pLayerPrefix, + const char* pMessage, + void* pUserData); + +typedef struct VkDebugReportCallbackCreateInfoEXT { + VkStructureType sType; + const void* pNext; + VkDebugReportFlagsEXT flags; + PFN_vkDebugReportCallbackEXT pfnCallback; + void* pUserData; +} VkDebugReportCallbackCreateInfoEXT; + + +typedef VkResult (VKAPI_PTR *PFN_vkCreateDebugReportCallbackEXT)(VkInstance instance, const VkDebugReportCallbackCreateInfoEXT* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkDebugReportCallbackEXT* pCallback); +typedef void (VKAPI_PTR *PFN_vkDestroyDebugReportCallbackEXT)(VkInstance instance, VkDebugReportCallbackEXT callback, const VkAllocationCallbacks* pAllocator); +typedef void (VKAPI_PTR *PFN_vkDebugReportMessageEXT)(VkInstance instance, VkDebugReportFlagsEXT flags, VkDebugReportObjectTypeEXT objectType, uint64_t object, size_t location, int32_t messageCode, const char* pLayerPrefix, const char* pMessage); + +#ifndef VK_NO_PROTOTYPES +VKAPI_ATTR VkResult VKAPI_CALL vkCreateDebugReportCallbackEXT( + VkInstance instance, + const VkDebugReportCallbackCreateInfoEXT* pCreateInfo, + const VkAllocationCallbacks* pAllocator, + VkDebugReportCallbackEXT* pCallback); + +VKAPI_ATTR void VKAPI_CALL vkDestroyDebugReportCallbackEXT( + VkInstance instance, + VkDebugReportCallbackEXT callback, + const VkAllocationCallbacks* pAllocator); + +VKAPI_ATTR void VKAPI_CALL vkDebugReportMessageEXT( + VkInstance instance, + VkDebugReportFlagsEXT flags, + VkDebugReportObjectTypeEXT objectType, + uint64_t object, + size_t location, + int32_t messageCode, + const char* pLayerPrefix, + const char* pMessage); +#endif + +#define VK_NV_glsl_shader 1 +#define VK_NV_GLSL_SHADER_SPEC_VERSION 1 +#define VK_NV_GLSL_SHADER_EXTENSION_NAME "VK_NV_glsl_shader" + + +#define VK_EXT_depth_range_unrestricted 1 +#define VK_EXT_DEPTH_RANGE_UNRESTRICTED_SPEC_VERSION 1 +#define VK_EXT_DEPTH_RANGE_UNRESTRICTED_EXTENSION_NAME "VK_EXT_depth_range_unrestricted" + + +#define VK_IMG_filter_cubic 1 +#define VK_IMG_FILTER_CUBIC_SPEC_VERSION 1 +#define VK_IMG_FILTER_CUBIC_EXTENSION_NAME "VK_IMG_filter_cubic" + + +#define VK_AMD_rasterization_order 1 +#define VK_AMD_RASTERIZATION_ORDER_SPEC_VERSION 1 +#define VK_AMD_RASTERIZATION_ORDER_EXTENSION_NAME "VK_AMD_rasterization_order" + + +typedef enum VkRasterizationOrderAMD { + VK_RASTERIZATION_ORDER_STRICT_AMD = 0, + VK_RASTERIZATION_ORDER_RELAXED_AMD = 1, + VK_RASTERIZATION_ORDER_BEGIN_RANGE_AMD = VK_RASTERIZATION_ORDER_STRICT_AMD, + VK_RASTERIZATION_ORDER_END_RANGE_AMD = VK_RASTERIZATION_ORDER_RELAXED_AMD, + VK_RASTERIZATION_ORDER_RANGE_SIZE_AMD = (VK_RASTERIZATION_ORDER_RELAXED_AMD - VK_RASTERIZATION_ORDER_STRICT_AMD + 1), + VK_RASTERIZATION_ORDER_MAX_ENUM_AMD = 0x7FFFFFFF +} VkRasterizationOrderAMD; + +typedef struct VkPipelineRasterizationStateRasterizationOrderAMD { + VkStructureType sType; + const void* pNext; + VkRasterizationOrderAMD rasterizationOrder; +} VkPipelineRasterizationStateRasterizationOrderAMD; + + + +#define VK_AMD_shader_trinary_minmax 1 +#define VK_AMD_SHADER_TRINARY_MINMAX_SPEC_VERSION 1 +#define VK_AMD_SHADER_TRINARY_MINMAX_EXTENSION_NAME "VK_AMD_shader_trinary_minmax" + + +#define VK_AMD_shader_explicit_vertex_parameter 1 +#define VK_AMD_SHADER_EXPLICIT_VERTEX_PARAMETER_SPEC_VERSION 1 +#define VK_AMD_SHADER_EXPLICIT_VERTEX_PARAMETER_EXTENSION_NAME "VK_AMD_shader_explicit_vertex_parameter" + + +#define VK_EXT_debug_marker 1 +#define VK_EXT_DEBUG_MARKER_SPEC_VERSION 4 +#define VK_EXT_DEBUG_MARKER_EXTENSION_NAME "VK_EXT_debug_marker" + +typedef struct VkDebugMarkerObjectNameInfoEXT { + VkStructureType sType; + const void* pNext; + VkDebugReportObjectTypeEXT objectType; + uint64_t object; + const char* pObjectName; +} VkDebugMarkerObjectNameInfoEXT; + +typedef struct VkDebugMarkerObjectTagInfoEXT { + VkStructureType sType; + const void* pNext; + VkDebugReportObjectTypeEXT objectType; + uint64_t object; + uint64_t tagName; + size_t tagSize; + const void* pTag; +} VkDebugMarkerObjectTagInfoEXT; + +typedef struct VkDebugMarkerMarkerInfoEXT { + VkStructureType sType; + const void* pNext; + const char* pMarkerName; + float color[4]; +} VkDebugMarkerMarkerInfoEXT; + + +typedef VkResult (VKAPI_PTR *PFN_vkDebugMarkerSetObjectTagEXT)(VkDevice device, const VkDebugMarkerObjectTagInfoEXT* pTagInfo); +typedef VkResult (VKAPI_PTR *PFN_vkDebugMarkerSetObjectNameEXT)(VkDevice device, const VkDebugMarkerObjectNameInfoEXT* pNameInfo); +typedef void (VKAPI_PTR *PFN_vkCmdDebugMarkerBeginEXT)(VkCommandBuffer commandBuffer, const VkDebugMarkerMarkerInfoEXT* pMarkerInfo); +typedef void (VKAPI_PTR *PFN_vkCmdDebugMarkerEndEXT)(VkCommandBuffer commandBuffer); +typedef void (VKAPI_PTR *PFN_vkCmdDebugMarkerInsertEXT)(VkCommandBuffer commandBuffer, const VkDebugMarkerMarkerInfoEXT* pMarkerInfo); + +#ifndef VK_NO_PROTOTYPES +VKAPI_ATTR VkResult VKAPI_CALL vkDebugMarkerSetObjectTagEXT( + VkDevice device, + const VkDebugMarkerObjectTagInfoEXT* pTagInfo); + +VKAPI_ATTR VkResult VKAPI_CALL vkDebugMarkerSetObjectNameEXT( + VkDevice device, + const VkDebugMarkerObjectNameInfoEXT* pNameInfo); + +VKAPI_ATTR void VKAPI_CALL vkCmdDebugMarkerBeginEXT( + VkCommandBuffer commandBuffer, + const VkDebugMarkerMarkerInfoEXT* pMarkerInfo); + +VKAPI_ATTR void VKAPI_CALL vkCmdDebugMarkerEndEXT( + VkCommandBuffer commandBuffer); + +VKAPI_ATTR void VKAPI_CALL vkCmdDebugMarkerInsertEXT( + VkCommandBuffer commandBuffer, + const VkDebugMarkerMarkerInfoEXT* pMarkerInfo); +#endif + +#define VK_AMD_gcn_shader 1 +#define VK_AMD_GCN_SHADER_SPEC_VERSION 1 +#define VK_AMD_GCN_SHADER_EXTENSION_NAME "VK_AMD_gcn_shader" + + +#define VK_NV_dedicated_allocation 1 +#define VK_NV_DEDICATED_ALLOCATION_SPEC_VERSION 1 +#define VK_NV_DEDICATED_ALLOCATION_EXTENSION_NAME "VK_NV_dedicated_allocation" + +typedef struct VkDedicatedAllocationImageCreateInfoNV { + VkStructureType sType; + const void* pNext; + VkBool32 dedicatedAllocation; +} VkDedicatedAllocationImageCreateInfoNV; + +typedef struct VkDedicatedAllocationBufferCreateInfoNV { + VkStructureType sType; + const void* pNext; + VkBool32 dedicatedAllocation; +} VkDedicatedAllocationBufferCreateInfoNV; + +typedef struct VkDedicatedAllocationMemoryAllocateInfoNV { + VkStructureType sType; + const void* pNext; + VkImage image; + VkBuffer buffer; +} VkDedicatedAllocationMemoryAllocateInfoNV; + + + +#define VK_EXT_transform_feedback 1 +#define VK_EXT_TRANSFORM_FEEDBACK_SPEC_VERSION 1 +#define VK_EXT_TRANSFORM_FEEDBACK_EXTENSION_NAME "VK_EXT_transform_feedback" + +typedef VkFlags VkPipelineRasterizationStateStreamCreateFlagsEXT; + +typedef struct VkPhysicalDeviceTransformFeedbackFeaturesEXT { + VkStructureType sType; + void* pNext; + VkBool32 transformFeedback; + VkBool32 geometryStreams; +} VkPhysicalDeviceTransformFeedbackFeaturesEXT; + +typedef struct VkPhysicalDeviceTransformFeedbackPropertiesEXT { + VkStructureType sType; + void* pNext; + uint32_t maxTransformFeedbackStreams; + uint32_t maxTransformFeedbackBuffers; + VkDeviceSize maxTransformFeedbackBufferSize; + uint32_t maxTransformFeedbackStreamDataSize; + uint32_t maxTransformFeedbackBufferDataSize; + uint32_t maxTransformFeedbackBufferDataStride; + VkBool32 transformFeedbackQueries; + VkBool32 transformFeedbackStreamsLinesTriangles; + VkBool32 transformFeedbackRasterizationStreamSelect; + VkBool32 transformFeedbackDraw; +} VkPhysicalDeviceTransformFeedbackPropertiesEXT; + +typedef struct VkPipelineRasterizationStateStreamCreateInfoEXT { + VkStructureType sType; + const void* pNext; + VkPipelineRasterizationStateStreamCreateFlagsEXT flags; + uint32_t rasterizationStream; +} VkPipelineRasterizationStateStreamCreateInfoEXT; + + +typedef void (VKAPI_PTR *PFN_vkCmdBindTransformFeedbackBuffersEXT)(VkCommandBuffer commandBuffer, uint32_t firstBinding, uint32_t bindingCount, const VkBuffer* pBuffers, const VkDeviceSize* pOffsets, const VkDeviceSize* pSizes); +typedef void (VKAPI_PTR *PFN_vkCmdBeginTransformFeedbackEXT)(VkCommandBuffer commandBuffer, uint32_t firstCounterBuffer, uint32_t counterBufferCount, const VkBuffer* pCounterBuffers, const VkDeviceSize* pCounterBufferOffsets); +typedef void (VKAPI_PTR *PFN_vkCmdEndTransformFeedbackEXT)(VkCommandBuffer commandBuffer, uint32_t firstCounterBuffer, uint32_t counterBufferCount, const VkBuffer* pCounterBuffers, const VkDeviceSize* pCounterBufferOffsets); +typedef void (VKAPI_PTR *PFN_vkCmdBeginQueryIndexedEXT)(VkCommandBuffer commandBuffer, VkQueryPool queryPool, uint32_t query, VkQueryControlFlags flags, uint32_t index); +typedef void (VKAPI_PTR *PFN_vkCmdEndQueryIndexedEXT)(VkCommandBuffer commandBuffer, VkQueryPool queryPool, uint32_t query, uint32_t index); +typedef void (VKAPI_PTR *PFN_vkCmdDrawIndirectByteCountEXT)(VkCommandBuffer commandBuffer, uint32_t instanceCount, uint32_t firstInstance, VkBuffer counterBuffer, VkDeviceSize counterBufferOffset, uint32_t counterOffset, uint32_t vertexStride); + +#ifndef VK_NO_PROTOTYPES +VKAPI_ATTR void VKAPI_CALL vkCmdBindTransformFeedbackBuffersEXT( + VkCommandBuffer commandBuffer, + uint32_t firstBinding, + uint32_t bindingCount, + const VkBuffer* pBuffers, + const VkDeviceSize* pOffsets, + const VkDeviceSize* pSizes); + +VKAPI_ATTR void VKAPI_CALL vkCmdBeginTransformFeedbackEXT( + VkCommandBuffer commandBuffer, + uint32_t firstCounterBuffer, + uint32_t counterBufferCount, + const VkBuffer* pCounterBuffers, + const VkDeviceSize* pCounterBufferOffsets); + +VKAPI_ATTR void VKAPI_CALL vkCmdEndTransformFeedbackEXT( + VkCommandBuffer commandBuffer, + uint32_t firstCounterBuffer, + uint32_t counterBufferCount, + const VkBuffer* pCounterBuffers, + const VkDeviceSize* pCounterBufferOffsets); + +VKAPI_ATTR void VKAPI_CALL vkCmdBeginQueryIndexedEXT( + VkCommandBuffer commandBuffer, + VkQueryPool queryPool, + uint32_t query, + VkQueryControlFlags flags, + uint32_t index); + +VKAPI_ATTR void VKAPI_CALL vkCmdEndQueryIndexedEXT( + VkCommandBuffer commandBuffer, + VkQueryPool queryPool, + uint32_t query, + uint32_t index); + +VKAPI_ATTR void VKAPI_CALL vkCmdDrawIndirectByteCountEXT( + VkCommandBuffer commandBuffer, + uint32_t instanceCount, + uint32_t firstInstance, + VkBuffer counterBuffer, + VkDeviceSize counterBufferOffset, + uint32_t counterOffset, + uint32_t vertexStride); +#endif + +#define VK_AMD_draw_indirect_count 1 +#define VK_AMD_DRAW_INDIRECT_COUNT_SPEC_VERSION 1 +#define VK_AMD_DRAW_INDIRECT_COUNT_EXTENSION_NAME "VK_AMD_draw_indirect_count" + +typedef void (VKAPI_PTR *PFN_vkCmdDrawIndirectCountAMD)(VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset, VkBuffer countBuffer, VkDeviceSize countBufferOffset, uint32_t maxDrawCount, uint32_t stride); +typedef void (VKAPI_PTR *PFN_vkCmdDrawIndexedIndirectCountAMD)(VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset, VkBuffer countBuffer, VkDeviceSize countBufferOffset, uint32_t maxDrawCount, uint32_t stride); + +#ifndef VK_NO_PROTOTYPES +VKAPI_ATTR void VKAPI_CALL vkCmdDrawIndirectCountAMD( + VkCommandBuffer commandBuffer, + VkBuffer buffer, + VkDeviceSize offset, + VkBuffer countBuffer, + VkDeviceSize countBufferOffset, + uint32_t maxDrawCount, + uint32_t stride); + +VKAPI_ATTR void VKAPI_CALL vkCmdDrawIndexedIndirectCountAMD( + VkCommandBuffer commandBuffer, + VkBuffer buffer, + VkDeviceSize offset, + VkBuffer countBuffer, + VkDeviceSize countBufferOffset, + uint32_t maxDrawCount, + uint32_t stride); +#endif + +#define VK_AMD_negative_viewport_height 1 +#define VK_AMD_NEGATIVE_VIEWPORT_HEIGHT_SPEC_VERSION 1 +#define VK_AMD_NEGATIVE_VIEWPORT_HEIGHT_EXTENSION_NAME "VK_AMD_negative_viewport_height" + + +#define VK_AMD_gpu_shader_half_float 1 +#define VK_AMD_GPU_SHADER_HALF_FLOAT_SPEC_VERSION 1 +#define VK_AMD_GPU_SHADER_HALF_FLOAT_EXTENSION_NAME "VK_AMD_gpu_shader_half_float" + + +#define VK_AMD_shader_ballot 1 +#define VK_AMD_SHADER_BALLOT_SPEC_VERSION 1 +#define VK_AMD_SHADER_BALLOT_EXTENSION_NAME "VK_AMD_shader_ballot" + + +#define VK_AMD_texture_gather_bias_lod 1 +#define VK_AMD_TEXTURE_GATHER_BIAS_LOD_SPEC_VERSION 1 +#define VK_AMD_TEXTURE_GATHER_BIAS_LOD_EXTENSION_NAME "VK_AMD_texture_gather_bias_lod" + +typedef struct VkTextureLODGatherFormatPropertiesAMD { + VkStructureType sType; + void* pNext; + VkBool32 supportsTextureGatherLODBiasAMD; +} VkTextureLODGatherFormatPropertiesAMD; + + + +#define VK_AMD_shader_info 1 +#define VK_AMD_SHADER_INFO_SPEC_VERSION 1 +#define VK_AMD_SHADER_INFO_EXTENSION_NAME "VK_AMD_shader_info" + + +typedef enum VkShaderInfoTypeAMD { + VK_SHADER_INFO_TYPE_STATISTICS_AMD = 0, + VK_SHADER_INFO_TYPE_BINARY_AMD = 1, + VK_SHADER_INFO_TYPE_DISASSEMBLY_AMD = 2, + VK_SHADER_INFO_TYPE_BEGIN_RANGE_AMD = VK_SHADER_INFO_TYPE_STATISTICS_AMD, + VK_SHADER_INFO_TYPE_END_RANGE_AMD = VK_SHADER_INFO_TYPE_DISASSEMBLY_AMD, + VK_SHADER_INFO_TYPE_RANGE_SIZE_AMD = (VK_SHADER_INFO_TYPE_DISASSEMBLY_AMD - VK_SHADER_INFO_TYPE_STATISTICS_AMD + 1), + VK_SHADER_INFO_TYPE_MAX_ENUM_AMD = 0x7FFFFFFF +} VkShaderInfoTypeAMD; + +typedef struct VkShaderResourceUsageAMD { + uint32_t numUsedVgprs; + uint32_t numUsedSgprs; + uint32_t ldsSizePerLocalWorkGroup; + size_t ldsUsageSizeInBytes; + size_t scratchMemUsageInBytes; +} VkShaderResourceUsageAMD; + +typedef struct VkShaderStatisticsInfoAMD { + VkShaderStageFlags shaderStageMask; + VkShaderResourceUsageAMD resourceUsage; + uint32_t numPhysicalVgprs; + uint32_t numPhysicalSgprs; + uint32_t numAvailableVgprs; + uint32_t numAvailableSgprs; + uint32_t computeWorkGroupSize[3]; +} VkShaderStatisticsInfoAMD; + + +typedef VkResult (VKAPI_PTR *PFN_vkGetShaderInfoAMD)(VkDevice device, VkPipeline pipeline, VkShaderStageFlagBits shaderStage, VkShaderInfoTypeAMD infoType, size_t* pInfoSize, void* pInfo); + +#ifndef VK_NO_PROTOTYPES +VKAPI_ATTR VkResult VKAPI_CALL vkGetShaderInfoAMD( + VkDevice device, + VkPipeline pipeline, + VkShaderStageFlagBits shaderStage, + VkShaderInfoTypeAMD infoType, + size_t* pInfoSize, + void* pInfo); +#endif + +#define VK_AMD_shader_image_load_store_lod 1 +#define VK_AMD_SHADER_IMAGE_LOAD_STORE_LOD_SPEC_VERSION 1 +#define VK_AMD_SHADER_IMAGE_LOAD_STORE_LOD_EXTENSION_NAME "VK_AMD_shader_image_load_store_lod" + + +#define VK_NV_corner_sampled_image 1 +#define VK_NV_CORNER_SAMPLED_IMAGE_SPEC_VERSION 2 +#define VK_NV_CORNER_SAMPLED_IMAGE_EXTENSION_NAME "VK_NV_corner_sampled_image" + +typedef struct VkPhysicalDeviceCornerSampledImageFeaturesNV { + VkStructureType sType; + void* pNext; + VkBool32 cornerSampledImage; +} VkPhysicalDeviceCornerSampledImageFeaturesNV; + + + +#define VK_IMG_format_pvrtc 1 +#define VK_IMG_FORMAT_PVRTC_SPEC_VERSION 1 +#define VK_IMG_FORMAT_PVRTC_EXTENSION_NAME "VK_IMG_format_pvrtc" + + +#define VK_NV_external_memory_capabilities 1 +#define VK_NV_EXTERNAL_MEMORY_CAPABILITIES_SPEC_VERSION 1 +#define VK_NV_EXTERNAL_MEMORY_CAPABILITIES_EXTENSION_NAME "VK_NV_external_memory_capabilities" + + +typedef enum VkExternalMemoryHandleTypeFlagBitsNV { + VK_EXTERNAL_MEMORY_HANDLE_TYPE_OPAQUE_WIN32_BIT_NV = 0x00000001, + VK_EXTERNAL_MEMORY_HANDLE_TYPE_OPAQUE_WIN32_KMT_BIT_NV = 0x00000002, + VK_EXTERNAL_MEMORY_HANDLE_TYPE_D3D11_IMAGE_BIT_NV = 0x00000004, + VK_EXTERNAL_MEMORY_HANDLE_TYPE_D3D11_IMAGE_KMT_BIT_NV = 0x00000008, + VK_EXTERNAL_MEMORY_HANDLE_TYPE_FLAG_BITS_MAX_ENUM_NV = 0x7FFFFFFF +} VkExternalMemoryHandleTypeFlagBitsNV; +typedef VkFlags VkExternalMemoryHandleTypeFlagsNV; + +typedef enum VkExternalMemoryFeatureFlagBitsNV { + VK_EXTERNAL_MEMORY_FEATURE_DEDICATED_ONLY_BIT_NV = 0x00000001, + VK_EXTERNAL_MEMORY_FEATURE_EXPORTABLE_BIT_NV = 0x00000002, + VK_EXTERNAL_MEMORY_FEATURE_IMPORTABLE_BIT_NV = 0x00000004, + VK_EXTERNAL_MEMORY_FEATURE_FLAG_BITS_MAX_ENUM_NV = 0x7FFFFFFF +} VkExternalMemoryFeatureFlagBitsNV; +typedef VkFlags VkExternalMemoryFeatureFlagsNV; + +typedef struct VkExternalImageFormatPropertiesNV { + VkImageFormatProperties imageFormatProperties; + VkExternalMemoryFeatureFlagsNV externalMemoryFeatures; + VkExternalMemoryHandleTypeFlagsNV exportFromImportedHandleTypes; + VkExternalMemoryHandleTypeFlagsNV compatibleHandleTypes; +} VkExternalImageFormatPropertiesNV; + + +typedef VkResult (VKAPI_PTR *PFN_vkGetPhysicalDeviceExternalImageFormatPropertiesNV)(VkPhysicalDevice physicalDevice, VkFormat format, VkImageType type, VkImageTiling tiling, VkImageUsageFlags usage, VkImageCreateFlags flags, VkExternalMemoryHandleTypeFlagsNV externalHandleType, VkExternalImageFormatPropertiesNV* pExternalImageFormatProperties); + +#ifndef VK_NO_PROTOTYPES +VKAPI_ATTR VkResult VKAPI_CALL vkGetPhysicalDeviceExternalImageFormatPropertiesNV( + VkPhysicalDevice physicalDevice, + VkFormat format, + VkImageType type, + VkImageTiling tiling, + VkImageUsageFlags usage, + VkImageCreateFlags flags, + VkExternalMemoryHandleTypeFlagsNV externalHandleType, + VkExternalImageFormatPropertiesNV* pExternalImageFormatProperties); +#endif + +#define VK_NV_external_memory 1 +#define VK_NV_EXTERNAL_MEMORY_SPEC_VERSION 1 +#define VK_NV_EXTERNAL_MEMORY_EXTENSION_NAME "VK_NV_external_memory" + +typedef struct VkExternalMemoryImageCreateInfoNV { + VkStructureType sType; + const void* pNext; + VkExternalMemoryHandleTypeFlagsNV handleTypes; +} VkExternalMemoryImageCreateInfoNV; + +typedef struct VkExportMemoryAllocateInfoNV { + VkStructureType sType; + const void* pNext; + VkExternalMemoryHandleTypeFlagsNV handleTypes; +} VkExportMemoryAllocateInfoNV; + + + +#define VK_EXT_validation_flags 1 +#define VK_EXT_VALIDATION_FLAGS_SPEC_VERSION 1 +#define VK_EXT_VALIDATION_FLAGS_EXTENSION_NAME "VK_EXT_validation_flags" + + +typedef enum VkValidationCheckEXT { + VK_VALIDATION_CHECK_ALL_EXT = 0, + VK_VALIDATION_CHECK_SHADERS_EXT = 1, + VK_VALIDATION_CHECK_BEGIN_RANGE_EXT = VK_VALIDATION_CHECK_ALL_EXT, + VK_VALIDATION_CHECK_END_RANGE_EXT = VK_VALIDATION_CHECK_SHADERS_EXT, + VK_VALIDATION_CHECK_RANGE_SIZE_EXT = (VK_VALIDATION_CHECK_SHADERS_EXT - VK_VALIDATION_CHECK_ALL_EXT + 1), + VK_VALIDATION_CHECK_MAX_ENUM_EXT = 0x7FFFFFFF +} VkValidationCheckEXT; + +typedef struct VkValidationFlagsEXT { + VkStructureType sType; + const void* pNext; + uint32_t disabledValidationCheckCount; + const VkValidationCheckEXT* pDisabledValidationChecks; +} VkValidationFlagsEXT; + + + +#define VK_EXT_shader_subgroup_ballot 1 +#define VK_EXT_SHADER_SUBGROUP_BALLOT_SPEC_VERSION 1 +#define VK_EXT_SHADER_SUBGROUP_BALLOT_EXTENSION_NAME "VK_EXT_shader_subgroup_ballot" + + +#define VK_EXT_shader_subgroup_vote 1 +#define VK_EXT_SHADER_SUBGROUP_VOTE_SPEC_VERSION 1 +#define VK_EXT_SHADER_SUBGROUP_VOTE_EXTENSION_NAME "VK_EXT_shader_subgroup_vote" + + +#define VK_EXT_astc_decode_mode 1 +#define VK_EXT_ASTC_DECODE_MODE_SPEC_VERSION 1 +#define VK_EXT_ASTC_DECODE_MODE_EXTENSION_NAME "VK_EXT_astc_decode_mode" + +typedef struct VkImageViewASTCDecodeModeEXT { + VkStructureType sType; + const void* pNext; + VkFormat decodeMode; +} VkImageViewASTCDecodeModeEXT; + +typedef struct VkPhysicalDeviceASTCDecodeFeaturesEXT { + VkStructureType sType; + void* pNext; + VkBool32 decodeModeSharedExponent; +} VkPhysicalDeviceASTCDecodeFeaturesEXT; + + + +#define VK_EXT_conditional_rendering 1 +#define VK_EXT_CONDITIONAL_RENDERING_SPEC_VERSION 1 +#define VK_EXT_CONDITIONAL_RENDERING_EXTENSION_NAME "VK_EXT_conditional_rendering" + + +typedef enum VkConditionalRenderingFlagBitsEXT { + VK_CONDITIONAL_RENDERING_INVERTED_BIT_EXT = 0x00000001, + VK_CONDITIONAL_RENDERING_FLAG_BITS_MAX_ENUM_EXT = 0x7FFFFFFF +} VkConditionalRenderingFlagBitsEXT; +typedef VkFlags VkConditionalRenderingFlagsEXT; + +typedef struct VkConditionalRenderingBeginInfoEXT { + VkStructureType sType; + const void* pNext; + VkBuffer buffer; + VkDeviceSize offset; + VkConditionalRenderingFlagsEXT flags; +} VkConditionalRenderingBeginInfoEXT; + +typedef struct VkPhysicalDeviceConditionalRenderingFeaturesEXT { + VkStructureType sType; + void* pNext; + VkBool32 conditionalRendering; + VkBool32 inheritedConditionalRendering; +} VkPhysicalDeviceConditionalRenderingFeaturesEXT; + +typedef struct VkCommandBufferInheritanceConditionalRenderingInfoEXT { + VkStructureType sType; + const void* pNext; + VkBool32 conditionalRenderingEnable; +} VkCommandBufferInheritanceConditionalRenderingInfoEXT; + + +typedef void (VKAPI_PTR *PFN_vkCmdBeginConditionalRenderingEXT)(VkCommandBuffer commandBuffer, const VkConditionalRenderingBeginInfoEXT* pConditionalRenderingBegin); +typedef void (VKAPI_PTR *PFN_vkCmdEndConditionalRenderingEXT)(VkCommandBuffer commandBuffer); + +#ifndef VK_NO_PROTOTYPES +VKAPI_ATTR void VKAPI_CALL vkCmdBeginConditionalRenderingEXT( + VkCommandBuffer commandBuffer, + const VkConditionalRenderingBeginInfoEXT* pConditionalRenderingBegin); + +VKAPI_ATTR void VKAPI_CALL vkCmdEndConditionalRenderingEXT( + VkCommandBuffer commandBuffer); +#endif + +#define VK_NVX_device_generated_commands 1 +VK_DEFINE_NON_DISPATCHABLE_HANDLE(VkObjectTableNVX) +VK_DEFINE_NON_DISPATCHABLE_HANDLE(VkIndirectCommandsLayoutNVX) + +#define VK_NVX_DEVICE_GENERATED_COMMANDS_SPEC_VERSION 3 +#define VK_NVX_DEVICE_GENERATED_COMMANDS_EXTENSION_NAME "VK_NVX_device_generated_commands" + + +typedef enum VkIndirectCommandsTokenTypeNVX { + VK_INDIRECT_COMMANDS_TOKEN_TYPE_PIPELINE_NVX = 0, + VK_INDIRECT_COMMANDS_TOKEN_TYPE_DESCRIPTOR_SET_NVX = 1, + VK_INDIRECT_COMMANDS_TOKEN_TYPE_INDEX_BUFFER_NVX = 2, + VK_INDIRECT_COMMANDS_TOKEN_TYPE_VERTEX_BUFFER_NVX = 3, + VK_INDIRECT_COMMANDS_TOKEN_TYPE_PUSH_CONSTANT_NVX = 4, + VK_INDIRECT_COMMANDS_TOKEN_TYPE_DRAW_INDEXED_NVX = 5, + VK_INDIRECT_COMMANDS_TOKEN_TYPE_DRAW_NVX = 6, + VK_INDIRECT_COMMANDS_TOKEN_TYPE_DISPATCH_NVX = 7, + VK_INDIRECT_COMMANDS_TOKEN_TYPE_BEGIN_RANGE_NVX = VK_INDIRECT_COMMANDS_TOKEN_TYPE_PIPELINE_NVX, + VK_INDIRECT_COMMANDS_TOKEN_TYPE_END_RANGE_NVX = VK_INDIRECT_COMMANDS_TOKEN_TYPE_DISPATCH_NVX, + VK_INDIRECT_COMMANDS_TOKEN_TYPE_RANGE_SIZE_NVX = (VK_INDIRECT_COMMANDS_TOKEN_TYPE_DISPATCH_NVX - VK_INDIRECT_COMMANDS_TOKEN_TYPE_PIPELINE_NVX + 1), + VK_INDIRECT_COMMANDS_TOKEN_TYPE_MAX_ENUM_NVX = 0x7FFFFFFF +} VkIndirectCommandsTokenTypeNVX; + +typedef enum VkObjectEntryTypeNVX { + VK_OBJECT_ENTRY_TYPE_DESCRIPTOR_SET_NVX = 0, + VK_OBJECT_ENTRY_TYPE_PIPELINE_NVX = 1, + VK_OBJECT_ENTRY_TYPE_INDEX_BUFFER_NVX = 2, + VK_OBJECT_ENTRY_TYPE_VERTEX_BUFFER_NVX = 3, + VK_OBJECT_ENTRY_TYPE_PUSH_CONSTANT_NVX = 4, + VK_OBJECT_ENTRY_TYPE_BEGIN_RANGE_NVX = VK_OBJECT_ENTRY_TYPE_DESCRIPTOR_SET_NVX, + VK_OBJECT_ENTRY_TYPE_END_RANGE_NVX = VK_OBJECT_ENTRY_TYPE_PUSH_CONSTANT_NVX, + VK_OBJECT_ENTRY_TYPE_RANGE_SIZE_NVX = (VK_OBJECT_ENTRY_TYPE_PUSH_CONSTANT_NVX - VK_OBJECT_ENTRY_TYPE_DESCRIPTOR_SET_NVX + 1), + VK_OBJECT_ENTRY_TYPE_MAX_ENUM_NVX = 0x7FFFFFFF +} VkObjectEntryTypeNVX; + + +typedef enum VkIndirectCommandsLayoutUsageFlagBitsNVX { + VK_INDIRECT_COMMANDS_LAYOUT_USAGE_UNORDERED_SEQUENCES_BIT_NVX = 0x00000001, + VK_INDIRECT_COMMANDS_LAYOUT_USAGE_SPARSE_SEQUENCES_BIT_NVX = 0x00000002, + VK_INDIRECT_COMMANDS_LAYOUT_USAGE_EMPTY_EXECUTIONS_BIT_NVX = 0x00000004, + VK_INDIRECT_COMMANDS_LAYOUT_USAGE_INDEXED_SEQUENCES_BIT_NVX = 0x00000008, + VK_INDIRECT_COMMANDS_LAYOUT_USAGE_FLAG_BITS_MAX_ENUM_NVX = 0x7FFFFFFF +} VkIndirectCommandsLayoutUsageFlagBitsNVX; +typedef VkFlags VkIndirectCommandsLayoutUsageFlagsNVX; + +typedef enum VkObjectEntryUsageFlagBitsNVX { + VK_OBJECT_ENTRY_USAGE_GRAPHICS_BIT_NVX = 0x00000001, + VK_OBJECT_ENTRY_USAGE_COMPUTE_BIT_NVX = 0x00000002, + VK_OBJECT_ENTRY_USAGE_FLAG_BITS_MAX_ENUM_NVX = 0x7FFFFFFF +} VkObjectEntryUsageFlagBitsNVX; +typedef VkFlags VkObjectEntryUsageFlagsNVX; + +typedef struct VkDeviceGeneratedCommandsFeaturesNVX { + VkStructureType sType; + const void* pNext; + VkBool32 computeBindingPointSupport; +} VkDeviceGeneratedCommandsFeaturesNVX; + +typedef struct VkDeviceGeneratedCommandsLimitsNVX { + VkStructureType sType; + const void* pNext; + uint32_t maxIndirectCommandsLayoutTokenCount; + uint32_t maxObjectEntryCounts; + uint32_t minSequenceCountBufferOffsetAlignment; + uint32_t minSequenceIndexBufferOffsetAlignment; + uint32_t minCommandsTokenBufferOffsetAlignment; +} VkDeviceGeneratedCommandsLimitsNVX; + +typedef struct VkIndirectCommandsTokenNVX { + VkIndirectCommandsTokenTypeNVX tokenType; + VkBuffer buffer; + VkDeviceSize offset; +} VkIndirectCommandsTokenNVX; + +typedef struct VkIndirectCommandsLayoutTokenNVX { + VkIndirectCommandsTokenTypeNVX tokenType; + uint32_t bindingUnit; + uint32_t dynamicCount; + uint32_t divisor; +} VkIndirectCommandsLayoutTokenNVX; + +typedef struct VkIndirectCommandsLayoutCreateInfoNVX { + VkStructureType sType; + const void* pNext; + VkPipelineBindPoint pipelineBindPoint; + VkIndirectCommandsLayoutUsageFlagsNVX flags; + uint32_t tokenCount; + const VkIndirectCommandsLayoutTokenNVX* pTokens; +} VkIndirectCommandsLayoutCreateInfoNVX; + +typedef struct VkCmdProcessCommandsInfoNVX { + VkStructureType sType; + const void* pNext; + VkObjectTableNVX objectTable; + VkIndirectCommandsLayoutNVX indirectCommandsLayout; + uint32_t indirectCommandsTokenCount; + const VkIndirectCommandsTokenNVX* pIndirectCommandsTokens; + uint32_t maxSequencesCount; + VkCommandBuffer targetCommandBuffer; + VkBuffer sequencesCountBuffer; + VkDeviceSize sequencesCountOffset; + VkBuffer sequencesIndexBuffer; + VkDeviceSize sequencesIndexOffset; +} VkCmdProcessCommandsInfoNVX; + +typedef struct VkCmdReserveSpaceForCommandsInfoNVX { + VkStructureType sType; + const void* pNext; + VkObjectTableNVX objectTable; + VkIndirectCommandsLayoutNVX indirectCommandsLayout; + uint32_t maxSequencesCount; +} VkCmdReserveSpaceForCommandsInfoNVX; + +typedef struct VkObjectTableCreateInfoNVX { + VkStructureType sType; + const void* pNext; + uint32_t objectCount; + const VkObjectEntryTypeNVX* pObjectEntryTypes; + const uint32_t* pObjectEntryCounts; + const VkObjectEntryUsageFlagsNVX* pObjectEntryUsageFlags; + uint32_t maxUniformBuffersPerDescriptor; + uint32_t maxStorageBuffersPerDescriptor; + uint32_t maxStorageImagesPerDescriptor; + uint32_t maxSampledImagesPerDescriptor; + uint32_t maxPipelineLayouts; +} VkObjectTableCreateInfoNVX; + +typedef struct VkObjectTableEntryNVX { + VkObjectEntryTypeNVX type; + VkObjectEntryUsageFlagsNVX flags; +} VkObjectTableEntryNVX; + +typedef struct VkObjectTablePipelineEntryNVX { + VkObjectEntryTypeNVX type; + VkObjectEntryUsageFlagsNVX flags; + VkPipeline pipeline; +} VkObjectTablePipelineEntryNVX; + +typedef struct VkObjectTableDescriptorSetEntryNVX { + VkObjectEntryTypeNVX type; + VkObjectEntryUsageFlagsNVX flags; + VkPipelineLayout pipelineLayout; + VkDescriptorSet descriptorSet; +} VkObjectTableDescriptorSetEntryNVX; + +typedef struct VkObjectTableVertexBufferEntryNVX { + VkObjectEntryTypeNVX type; + VkObjectEntryUsageFlagsNVX flags; + VkBuffer buffer; +} VkObjectTableVertexBufferEntryNVX; + +typedef struct VkObjectTableIndexBufferEntryNVX { + VkObjectEntryTypeNVX type; + VkObjectEntryUsageFlagsNVX flags; + VkBuffer buffer; + VkIndexType indexType; +} VkObjectTableIndexBufferEntryNVX; + +typedef struct VkObjectTablePushConstantEntryNVX { + VkObjectEntryTypeNVX type; + VkObjectEntryUsageFlagsNVX flags; + VkPipelineLayout pipelineLayout; + VkShaderStageFlags stageFlags; +} VkObjectTablePushConstantEntryNVX; + + +typedef void (VKAPI_PTR *PFN_vkCmdProcessCommandsNVX)(VkCommandBuffer commandBuffer, const VkCmdProcessCommandsInfoNVX* pProcessCommandsInfo); +typedef void (VKAPI_PTR *PFN_vkCmdReserveSpaceForCommandsNVX)(VkCommandBuffer commandBuffer, const VkCmdReserveSpaceForCommandsInfoNVX* pReserveSpaceInfo); +typedef VkResult (VKAPI_PTR *PFN_vkCreateIndirectCommandsLayoutNVX)(VkDevice device, const VkIndirectCommandsLayoutCreateInfoNVX* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkIndirectCommandsLayoutNVX* pIndirectCommandsLayout); +typedef void (VKAPI_PTR *PFN_vkDestroyIndirectCommandsLayoutNVX)(VkDevice device, VkIndirectCommandsLayoutNVX indirectCommandsLayout, const VkAllocationCallbacks* pAllocator); +typedef VkResult (VKAPI_PTR *PFN_vkCreateObjectTableNVX)(VkDevice device, const VkObjectTableCreateInfoNVX* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkObjectTableNVX* pObjectTable); +typedef void (VKAPI_PTR *PFN_vkDestroyObjectTableNVX)(VkDevice device, VkObjectTableNVX objectTable, const VkAllocationCallbacks* pAllocator); +typedef VkResult (VKAPI_PTR *PFN_vkRegisterObjectsNVX)(VkDevice device, VkObjectTableNVX objectTable, uint32_t objectCount, const VkObjectTableEntryNVX* const* ppObjectTableEntries, const uint32_t* pObjectIndices); +typedef VkResult (VKAPI_PTR *PFN_vkUnregisterObjectsNVX)(VkDevice device, VkObjectTableNVX objectTable, uint32_t objectCount, const VkObjectEntryTypeNVX* pObjectEntryTypes, const uint32_t* pObjectIndices); +typedef void (VKAPI_PTR *PFN_vkGetPhysicalDeviceGeneratedCommandsPropertiesNVX)(VkPhysicalDevice physicalDevice, VkDeviceGeneratedCommandsFeaturesNVX* pFeatures, VkDeviceGeneratedCommandsLimitsNVX* pLimits); + +#ifndef VK_NO_PROTOTYPES +VKAPI_ATTR void VKAPI_CALL vkCmdProcessCommandsNVX( + VkCommandBuffer commandBuffer, + const VkCmdProcessCommandsInfoNVX* pProcessCommandsInfo); + +VKAPI_ATTR void VKAPI_CALL vkCmdReserveSpaceForCommandsNVX( + VkCommandBuffer commandBuffer, + const VkCmdReserveSpaceForCommandsInfoNVX* pReserveSpaceInfo); + +VKAPI_ATTR VkResult VKAPI_CALL vkCreateIndirectCommandsLayoutNVX( + VkDevice device, + const VkIndirectCommandsLayoutCreateInfoNVX* pCreateInfo, + const VkAllocationCallbacks* pAllocator, + VkIndirectCommandsLayoutNVX* pIndirectCommandsLayout); + +VKAPI_ATTR void VKAPI_CALL vkDestroyIndirectCommandsLayoutNVX( + VkDevice device, + VkIndirectCommandsLayoutNVX indirectCommandsLayout, + const VkAllocationCallbacks* pAllocator); + +VKAPI_ATTR VkResult VKAPI_CALL vkCreateObjectTableNVX( + VkDevice device, + const VkObjectTableCreateInfoNVX* pCreateInfo, + const VkAllocationCallbacks* pAllocator, + VkObjectTableNVX* pObjectTable); + +VKAPI_ATTR void VKAPI_CALL vkDestroyObjectTableNVX( + VkDevice device, + VkObjectTableNVX objectTable, + const VkAllocationCallbacks* pAllocator); + +VKAPI_ATTR VkResult VKAPI_CALL vkRegisterObjectsNVX( + VkDevice device, + VkObjectTableNVX objectTable, + uint32_t objectCount, + const VkObjectTableEntryNVX* const* ppObjectTableEntries, + const uint32_t* pObjectIndices); + +VKAPI_ATTR VkResult VKAPI_CALL vkUnregisterObjectsNVX( + VkDevice device, + VkObjectTableNVX objectTable, + uint32_t objectCount, + const VkObjectEntryTypeNVX* pObjectEntryTypes, + const uint32_t* pObjectIndices); + +VKAPI_ATTR void VKAPI_CALL vkGetPhysicalDeviceGeneratedCommandsPropertiesNVX( + VkPhysicalDevice physicalDevice, + VkDeviceGeneratedCommandsFeaturesNVX* pFeatures, + VkDeviceGeneratedCommandsLimitsNVX* pLimits); +#endif + +#define VK_NV_clip_space_w_scaling 1 +#define VK_NV_CLIP_SPACE_W_SCALING_SPEC_VERSION 1 +#define VK_NV_CLIP_SPACE_W_SCALING_EXTENSION_NAME "VK_NV_clip_space_w_scaling" + +typedef struct VkViewportWScalingNV { + float xcoeff; + float ycoeff; +} VkViewportWScalingNV; + +typedef struct VkPipelineViewportWScalingStateCreateInfoNV { + VkStructureType sType; + const void* pNext; + VkBool32 viewportWScalingEnable; + uint32_t viewportCount; + const VkViewportWScalingNV* pViewportWScalings; +} VkPipelineViewportWScalingStateCreateInfoNV; + + +typedef void (VKAPI_PTR *PFN_vkCmdSetViewportWScalingNV)(VkCommandBuffer commandBuffer, uint32_t firstViewport, uint32_t viewportCount, const VkViewportWScalingNV* pViewportWScalings); + +#ifndef VK_NO_PROTOTYPES +VKAPI_ATTR void VKAPI_CALL vkCmdSetViewportWScalingNV( + VkCommandBuffer commandBuffer, + uint32_t firstViewport, + uint32_t viewportCount, + const VkViewportWScalingNV* pViewportWScalings); +#endif + +#define VK_EXT_direct_mode_display 1 +#define VK_EXT_DIRECT_MODE_DISPLAY_SPEC_VERSION 1 +#define VK_EXT_DIRECT_MODE_DISPLAY_EXTENSION_NAME "VK_EXT_direct_mode_display" + +typedef VkResult (VKAPI_PTR *PFN_vkReleaseDisplayEXT)(VkPhysicalDevice physicalDevice, VkDisplayKHR display); + +#ifndef VK_NO_PROTOTYPES +VKAPI_ATTR VkResult VKAPI_CALL vkReleaseDisplayEXT( + VkPhysicalDevice physicalDevice, + VkDisplayKHR display); +#endif + +#define VK_EXT_display_surface_counter 1 +#define VK_EXT_DISPLAY_SURFACE_COUNTER_SPEC_VERSION 1 +#define VK_EXT_DISPLAY_SURFACE_COUNTER_EXTENSION_NAME "VK_EXT_display_surface_counter" + + +typedef enum VkSurfaceCounterFlagBitsEXT { + VK_SURFACE_COUNTER_VBLANK_EXT = 0x00000001, + VK_SURFACE_COUNTER_FLAG_BITS_MAX_ENUM_EXT = 0x7FFFFFFF +} VkSurfaceCounterFlagBitsEXT; +typedef VkFlags VkSurfaceCounterFlagsEXT; + +typedef struct VkSurfaceCapabilities2EXT { + VkStructureType sType; + void* pNext; + uint32_t minImageCount; + uint32_t maxImageCount; + VkExtent2D currentExtent; + VkExtent2D minImageExtent; + VkExtent2D maxImageExtent; + uint32_t maxImageArrayLayers; + VkSurfaceTransformFlagsKHR supportedTransforms; + VkSurfaceTransformFlagBitsKHR currentTransform; + VkCompositeAlphaFlagsKHR supportedCompositeAlpha; + VkImageUsageFlags supportedUsageFlags; + VkSurfaceCounterFlagsEXT supportedSurfaceCounters; +} VkSurfaceCapabilities2EXT; + + +typedef VkResult (VKAPI_PTR *PFN_vkGetPhysicalDeviceSurfaceCapabilities2EXT)(VkPhysicalDevice physicalDevice, VkSurfaceKHR surface, VkSurfaceCapabilities2EXT* pSurfaceCapabilities); + +#ifndef VK_NO_PROTOTYPES +VKAPI_ATTR VkResult VKAPI_CALL vkGetPhysicalDeviceSurfaceCapabilities2EXT( + VkPhysicalDevice physicalDevice, + VkSurfaceKHR surface, + VkSurfaceCapabilities2EXT* pSurfaceCapabilities); +#endif + +#define VK_EXT_display_control 1 +#define VK_EXT_DISPLAY_CONTROL_SPEC_VERSION 1 +#define VK_EXT_DISPLAY_CONTROL_EXTENSION_NAME "VK_EXT_display_control" + + +typedef enum VkDisplayPowerStateEXT { + VK_DISPLAY_POWER_STATE_OFF_EXT = 0, + VK_DISPLAY_POWER_STATE_SUSPEND_EXT = 1, + VK_DISPLAY_POWER_STATE_ON_EXT = 2, + VK_DISPLAY_POWER_STATE_BEGIN_RANGE_EXT = VK_DISPLAY_POWER_STATE_OFF_EXT, + VK_DISPLAY_POWER_STATE_END_RANGE_EXT = VK_DISPLAY_POWER_STATE_ON_EXT, + VK_DISPLAY_POWER_STATE_RANGE_SIZE_EXT = (VK_DISPLAY_POWER_STATE_ON_EXT - VK_DISPLAY_POWER_STATE_OFF_EXT + 1), + VK_DISPLAY_POWER_STATE_MAX_ENUM_EXT = 0x7FFFFFFF +} VkDisplayPowerStateEXT; + +typedef enum VkDeviceEventTypeEXT { + VK_DEVICE_EVENT_TYPE_DISPLAY_HOTPLUG_EXT = 0, + VK_DEVICE_EVENT_TYPE_BEGIN_RANGE_EXT = VK_DEVICE_EVENT_TYPE_DISPLAY_HOTPLUG_EXT, + VK_DEVICE_EVENT_TYPE_END_RANGE_EXT = VK_DEVICE_EVENT_TYPE_DISPLAY_HOTPLUG_EXT, + VK_DEVICE_EVENT_TYPE_RANGE_SIZE_EXT = (VK_DEVICE_EVENT_TYPE_DISPLAY_HOTPLUG_EXT - VK_DEVICE_EVENT_TYPE_DISPLAY_HOTPLUG_EXT + 1), + VK_DEVICE_EVENT_TYPE_MAX_ENUM_EXT = 0x7FFFFFFF +} VkDeviceEventTypeEXT; + +typedef enum VkDisplayEventTypeEXT { + VK_DISPLAY_EVENT_TYPE_FIRST_PIXEL_OUT_EXT = 0, + VK_DISPLAY_EVENT_TYPE_BEGIN_RANGE_EXT = VK_DISPLAY_EVENT_TYPE_FIRST_PIXEL_OUT_EXT, + VK_DISPLAY_EVENT_TYPE_END_RANGE_EXT = VK_DISPLAY_EVENT_TYPE_FIRST_PIXEL_OUT_EXT, + VK_DISPLAY_EVENT_TYPE_RANGE_SIZE_EXT = (VK_DISPLAY_EVENT_TYPE_FIRST_PIXEL_OUT_EXT - VK_DISPLAY_EVENT_TYPE_FIRST_PIXEL_OUT_EXT + 1), + VK_DISPLAY_EVENT_TYPE_MAX_ENUM_EXT = 0x7FFFFFFF +} VkDisplayEventTypeEXT; + +typedef struct VkDisplayPowerInfoEXT { + VkStructureType sType; + const void* pNext; + VkDisplayPowerStateEXT powerState; +} VkDisplayPowerInfoEXT; + +typedef struct VkDeviceEventInfoEXT { + VkStructureType sType; + const void* pNext; + VkDeviceEventTypeEXT deviceEvent; +} VkDeviceEventInfoEXT; + +typedef struct VkDisplayEventInfoEXT { + VkStructureType sType; + const void* pNext; + VkDisplayEventTypeEXT displayEvent; +} VkDisplayEventInfoEXT; + +typedef struct VkSwapchainCounterCreateInfoEXT { + VkStructureType sType; + const void* pNext; + VkSurfaceCounterFlagsEXT surfaceCounters; +} VkSwapchainCounterCreateInfoEXT; + + +typedef VkResult (VKAPI_PTR *PFN_vkDisplayPowerControlEXT)(VkDevice device, VkDisplayKHR display, const VkDisplayPowerInfoEXT* pDisplayPowerInfo); +typedef VkResult (VKAPI_PTR *PFN_vkRegisterDeviceEventEXT)(VkDevice device, const VkDeviceEventInfoEXT* pDeviceEventInfo, const VkAllocationCallbacks* pAllocator, VkFence* pFence); +typedef VkResult (VKAPI_PTR *PFN_vkRegisterDisplayEventEXT)(VkDevice device, VkDisplayKHR display, const VkDisplayEventInfoEXT* pDisplayEventInfo, const VkAllocationCallbacks* pAllocator, VkFence* pFence); +typedef VkResult (VKAPI_PTR *PFN_vkGetSwapchainCounterEXT)(VkDevice device, VkSwapchainKHR swapchain, VkSurfaceCounterFlagBitsEXT counter, uint64_t* pCounterValue); + +#ifndef VK_NO_PROTOTYPES +VKAPI_ATTR VkResult VKAPI_CALL vkDisplayPowerControlEXT( + VkDevice device, + VkDisplayKHR display, + const VkDisplayPowerInfoEXT* pDisplayPowerInfo); + +VKAPI_ATTR VkResult VKAPI_CALL vkRegisterDeviceEventEXT( + VkDevice device, + const VkDeviceEventInfoEXT* pDeviceEventInfo, + const VkAllocationCallbacks* pAllocator, + VkFence* pFence); + +VKAPI_ATTR VkResult VKAPI_CALL vkRegisterDisplayEventEXT( + VkDevice device, + VkDisplayKHR display, + const VkDisplayEventInfoEXT* pDisplayEventInfo, + const VkAllocationCallbacks* pAllocator, + VkFence* pFence); + +VKAPI_ATTR VkResult VKAPI_CALL vkGetSwapchainCounterEXT( + VkDevice device, + VkSwapchainKHR swapchain, + VkSurfaceCounterFlagBitsEXT counter, + uint64_t* pCounterValue); +#endif + +#define VK_GOOGLE_display_timing 1 +#define VK_GOOGLE_DISPLAY_TIMING_SPEC_VERSION 1 +#define VK_GOOGLE_DISPLAY_TIMING_EXTENSION_NAME "VK_GOOGLE_display_timing" + +typedef struct VkRefreshCycleDurationGOOGLE { + uint64_t refreshDuration; +} VkRefreshCycleDurationGOOGLE; + +typedef struct VkPastPresentationTimingGOOGLE { + uint32_t presentID; + uint64_t desiredPresentTime; + uint64_t actualPresentTime; + uint64_t earliestPresentTime; + uint64_t presentMargin; +} VkPastPresentationTimingGOOGLE; + +typedef struct VkPresentTimeGOOGLE { + uint32_t presentID; + uint64_t desiredPresentTime; +} VkPresentTimeGOOGLE; + +typedef struct VkPresentTimesInfoGOOGLE { + VkStructureType sType; + const void* pNext; + uint32_t swapchainCount; + const VkPresentTimeGOOGLE* pTimes; +} VkPresentTimesInfoGOOGLE; + + +typedef VkResult (VKAPI_PTR *PFN_vkGetRefreshCycleDurationGOOGLE)(VkDevice device, VkSwapchainKHR swapchain, VkRefreshCycleDurationGOOGLE* pDisplayTimingProperties); +typedef VkResult (VKAPI_PTR *PFN_vkGetPastPresentationTimingGOOGLE)(VkDevice device, VkSwapchainKHR swapchain, uint32_t* pPresentationTimingCount, VkPastPresentationTimingGOOGLE* pPresentationTimings); + +#ifndef VK_NO_PROTOTYPES +VKAPI_ATTR VkResult VKAPI_CALL vkGetRefreshCycleDurationGOOGLE( + VkDevice device, + VkSwapchainKHR swapchain, + VkRefreshCycleDurationGOOGLE* pDisplayTimingProperties); + +VKAPI_ATTR VkResult VKAPI_CALL vkGetPastPresentationTimingGOOGLE( + VkDevice device, + VkSwapchainKHR swapchain, + uint32_t* pPresentationTimingCount, + VkPastPresentationTimingGOOGLE* pPresentationTimings); +#endif + +#define VK_NV_sample_mask_override_coverage 1 +#define VK_NV_SAMPLE_MASK_OVERRIDE_COVERAGE_SPEC_VERSION 1 +#define VK_NV_SAMPLE_MASK_OVERRIDE_COVERAGE_EXTENSION_NAME "VK_NV_sample_mask_override_coverage" + + +#define VK_NV_geometry_shader_passthrough 1 +#define VK_NV_GEOMETRY_SHADER_PASSTHROUGH_SPEC_VERSION 1 +#define VK_NV_GEOMETRY_SHADER_PASSTHROUGH_EXTENSION_NAME "VK_NV_geometry_shader_passthrough" + + +#define VK_NV_viewport_array2 1 +#define VK_NV_VIEWPORT_ARRAY2_SPEC_VERSION 1 +#define VK_NV_VIEWPORT_ARRAY2_EXTENSION_NAME "VK_NV_viewport_array2" + + +#define VK_NVX_multiview_per_view_attributes 1 +#define VK_NVX_MULTIVIEW_PER_VIEW_ATTRIBUTES_SPEC_VERSION 1 +#define VK_NVX_MULTIVIEW_PER_VIEW_ATTRIBUTES_EXTENSION_NAME "VK_NVX_multiview_per_view_attributes" + +typedef struct VkPhysicalDeviceMultiviewPerViewAttributesPropertiesNVX { + VkStructureType sType; + void* pNext; + VkBool32 perViewPositionAllComponents; +} VkPhysicalDeviceMultiviewPerViewAttributesPropertiesNVX; + + + +#define VK_NV_viewport_swizzle 1 +#define VK_NV_VIEWPORT_SWIZZLE_SPEC_VERSION 1 +#define VK_NV_VIEWPORT_SWIZZLE_EXTENSION_NAME "VK_NV_viewport_swizzle" + + +typedef enum VkViewportCoordinateSwizzleNV { + VK_VIEWPORT_COORDINATE_SWIZZLE_POSITIVE_X_NV = 0, + VK_VIEWPORT_COORDINATE_SWIZZLE_NEGATIVE_X_NV = 1, + VK_VIEWPORT_COORDINATE_SWIZZLE_POSITIVE_Y_NV = 2, + VK_VIEWPORT_COORDINATE_SWIZZLE_NEGATIVE_Y_NV = 3, + VK_VIEWPORT_COORDINATE_SWIZZLE_POSITIVE_Z_NV = 4, + VK_VIEWPORT_COORDINATE_SWIZZLE_NEGATIVE_Z_NV = 5, + VK_VIEWPORT_COORDINATE_SWIZZLE_POSITIVE_W_NV = 6, + VK_VIEWPORT_COORDINATE_SWIZZLE_NEGATIVE_W_NV = 7, + VK_VIEWPORT_COORDINATE_SWIZZLE_BEGIN_RANGE_NV = VK_VIEWPORT_COORDINATE_SWIZZLE_POSITIVE_X_NV, + VK_VIEWPORT_COORDINATE_SWIZZLE_END_RANGE_NV = VK_VIEWPORT_COORDINATE_SWIZZLE_NEGATIVE_W_NV, + VK_VIEWPORT_COORDINATE_SWIZZLE_RANGE_SIZE_NV = (VK_VIEWPORT_COORDINATE_SWIZZLE_NEGATIVE_W_NV - VK_VIEWPORT_COORDINATE_SWIZZLE_POSITIVE_X_NV + 1), + VK_VIEWPORT_COORDINATE_SWIZZLE_MAX_ENUM_NV = 0x7FFFFFFF +} VkViewportCoordinateSwizzleNV; + +typedef VkFlags VkPipelineViewportSwizzleStateCreateFlagsNV; + +typedef struct VkViewportSwizzleNV { + VkViewportCoordinateSwizzleNV x; + VkViewportCoordinateSwizzleNV y; + VkViewportCoordinateSwizzleNV z; + VkViewportCoordinateSwizzleNV w; +} VkViewportSwizzleNV; + +typedef struct VkPipelineViewportSwizzleStateCreateInfoNV { + VkStructureType sType; + const void* pNext; + VkPipelineViewportSwizzleStateCreateFlagsNV flags; + uint32_t viewportCount; + const VkViewportSwizzleNV* pViewportSwizzles; +} VkPipelineViewportSwizzleStateCreateInfoNV; + + + +#define VK_EXT_discard_rectangles 1 +#define VK_EXT_DISCARD_RECTANGLES_SPEC_VERSION 1 +#define VK_EXT_DISCARD_RECTANGLES_EXTENSION_NAME "VK_EXT_discard_rectangles" + + +typedef enum VkDiscardRectangleModeEXT { + VK_DISCARD_RECTANGLE_MODE_INCLUSIVE_EXT = 0, + VK_DISCARD_RECTANGLE_MODE_EXCLUSIVE_EXT = 1, + VK_DISCARD_RECTANGLE_MODE_BEGIN_RANGE_EXT = VK_DISCARD_RECTANGLE_MODE_INCLUSIVE_EXT, + VK_DISCARD_RECTANGLE_MODE_END_RANGE_EXT = VK_DISCARD_RECTANGLE_MODE_EXCLUSIVE_EXT, + VK_DISCARD_RECTANGLE_MODE_RANGE_SIZE_EXT = (VK_DISCARD_RECTANGLE_MODE_EXCLUSIVE_EXT - VK_DISCARD_RECTANGLE_MODE_INCLUSIVE_EXT + 1), + VK_DISCARD_RECTANGLE_MODE_MAX_ENUM_EXT = 0x7FFFFFFF +} VkDiscardRectangleModeEXT; + +typedef VkFlags VkPipelineDiscardRectangleStateCreateFlagsEXT; + +typedef struct VkPhysicalDeviceDiscardRectanglePropertiesEXT { + VkStructureType sType; + void* pNext; + uint32_t maxDiscardRectangles; +} VkPhysicalDeviceDiscardRectanglePropertiesEXT; + +typedef struct VkPipelineDiscardRectangleStateCreateInfoEXT { + VkStructureType sType; + const void* pNext; + VkPipelineDiscardRectangleStateCreateFlagsEXT flags; + VkDiscardRectangleModeEXT discardRectangleMode; + uint32_t discardRectangleCount; + const VkRect2D* pDiscardRectangles; +} VkPipelineDiscardRectangleStateCreateInfoEXT; + + +typedef void (VKAPI_PTR *PFN_vkCmdSetDiscardRectangleEXT)(VkCommandBuffer commandBuffer, uint32_t firstDiscardRectangle, uint32_t discardRectangleCount, const VkRect2D* pDiscardRectangles); + +#ifndef VK_NO_PROTOTYPES +VKAPI_ATTR void VKAPI_CALL vkCmdSetDiscardRectangleEXT( + VkCommandBuffer commandBuffer, + uint32_t firstDiscardRectangle, + uint32_t discardRectangleCount, + const VkRect2D* pDiscardRectangles); +#endif + +#define VK_EXT_conservative_rasterization 1 +#define VK_EXT_CONSERVATIVE_RASTERIZATION_SPEC_VERSION 1 +#define VK_EXT_CONSERVATIVE_RASTERIZATION_EXTENSION_NAME "VK_EXT_conservative_rasterization" + + +typedef enum VkConservativeRasterizationModeEXT { + VK_CONSERVATIVE_RASTERIZATION_MODE_DISABLED_EXT = 0, + VK_CONSERVATIVE_RASTERIZATION_MODE_OVERESTIMATE_EXT = 1, + VK_CONSERVATIVE_RASTERIZATION_MODE_UNDERESTIMATE_EXT = 2, + VK_CONSERVATIVE_RASTERIZATION_MODE_BEGIN_RANGE_EXT = VK_CONSERVATIVE_RASTERIZATION_MODE_DISABLED_EXT, + VK_CONSERVATIVE_RASTERIZATION_MODE_END_RANGE_EXT = VK_CONSERVATIVE_RASTERIZATION_MODE_UNDERESTIMATE_EXT, + VK_CONSERVATIVE_RASTERIZATION_MODE_RANGE_SIZE_EXT = (VK_CONSERVATIVE_RASTERIZATION_MODE_UNDERESTIMATE_EXT - VK_CONSERVATIVE_RASTERIZATION_MODE_DISABLED_EXT + 1), + VK_CONSERVATIVE_RASTERIZATION_MODE_MAX_ENUM_EXT = 0x7FFFFFFF +} VkConservativeRasterizationModeEXT; + +typedef VkFlags VkPipelineRasterizationConservativeStateCreateFlagsEXT; + +typedef struct VkPhysicalDeviceConservativeRasterizationPropertiesEXT { + VkStructureType sType; + void* pNext; + float primitiveOverestimationSize; + float maxExtraPrimitiveOverestimationSize; + float extraPrimitiveOverestimationSizeGranularity; + VkBool32 primitiveUnderestimation; + VkBool32 conservativePointAndLineRasterization; + VkBool32 degenerateTrianglesRasterized; + VkBool32 degenerateLinesRasterized; + VkBool32 fullyCoveredFragmentShaderInputVariable; + VkBool32 conservativeRasterizationPostDepthCoverage; +} VkPhysicalDeviceConservativeRasterizationPropertiesEXT; + +typedef struct VkPipelineRasterizationConservativeStateCreateInfoEXT { + VkStructureType sType; + const void* pNext; + VkPipelineRasterizationConservativeStateCreateFlagsEXT flags; + VkConservativeRasterizationModeEXT conservativeRasterizationMode; + float extraPrimitiveOverestimationSize; +} VkPipelineRasterizationConservativeStateCreateInfoEXT; + + + +#define VK_EXT_swapchain_colorspace 1 +#define VK_EXT_SWAPCHAIN_COLOR_SPACE_SPEC_VERSION 3 +#define VK_EXT_SWAPCHAIN_COLOR_SPACE_EXTENSION_NAME "VK_EXT_swapchain_colorspace" + + +#define VK_EXT_hdr_metadata 1 +#define VK_EXT_HDR_METADATA_SPEC_VERSION 1 +#define VK_EXT_HDR_METADATA_EXTENSION_NAME "VK_EXT_hdr_metadata" + +typedef struct VkXYColorEXT { + float x; + float y; +} VkXYColorEXT; + +typedef struct VkHdrMetadataEXT { + VkStructureType sType; + const void* pNext; + VkXYColorEXT displayPrimaryRed; + VkXYColorEXT displayPrimaryGreen; + VkXYColorEXT displayPrimaryBlue; + VkXYColorEXT whitePoint; + float maxLuminance; + float minLuminance; + float maxContentLightLevel; + float maxFrameAverageLightLevel; +} VkHdrMetadataEXT; + + +typedef void (VKAPI_PTR *PFN_vkSetHdrMetadataEXT)(VkDevice device, uint32_t swapchainCount, const VkSwapchainKHR* pSwapchains, const VkHdrMetadataEXT* pMetadata); + +#ifndef VK_NO_PROTOTYPES +VKAPI_ATTR void VKAPI_CALL vkSetHdrMetadataEXT( + VkDevice device, + uint32_t swapchainCount, + const VkSwapchainKHR* pSwapchains, + const VkHdrMetadataEXT* pMetadata); +#endif + +#define VK_EXT_external_memory_dma_buf 1 +#define VK_EXT_EXTERNAL_MEMORY_DMA_BUF_SPEC_VERSION 1 +#define VK_EXT_EXTERNAL_MEMORY_DMA_BUF_EXTENSION_NAME "VK_EXT_external_memory_dma_buf" + + +#define VK_EXT_queue_family_foreign 1 +#define VK_EXT_QUEUE_FAMILY_FOREIGN_SPEC_VERSION 1 +#define VK_EXT_QUEUE_FAMILY_FOREIGN_EXTENSION_NAME "VK_EXT_queue_family_foreign" +#define VK_QUEUE_FAMILY_FOREIGN_EXT (~0U-2) + + +#define VK_EXT_debug_utils 1 +VK_DEFINE_NON_DISPATCHABLE_HANDLE(VkDebugUtilsMessengerEXT) + +#define VK_EXT_DEBUG_UTILS_SPEC_VERSION 1 +#define VK_EXT_DEBUG_UTILS_EXTENSION_NAME "VK_EXT_debug_utils" + +typedef VkFlags VkDebugUtilsMessengerCallbackDataFlagsEXT; +typedef VkFlags VkDebugUtilsMessengerCreateFlagsEXT; + +typedef enum VkDebugUtilsMessageSeverityFlagBitsEXT { + VK_DEBUG_UTILS_MESSAGE_SEVERITY_VERBOSE_BIT_EXT = 0x00000001, + VK_DEBUG_UTILS_MESSAGE_SEVERITY_INFO_BIT_EXT = 0x00000010, + VK_DEBUG_UTILS_MESSAGE_SEVERITY_WARNING_BIT_EXT = 0x00000100, + VK_DEBUG_UTILS_MESSAGE_SEVERITY_ERROR_BIT_EXT = 0x00001000, + VK_DEBUG_UTILS_MESSAGE_SEVERITY_FLAG_BITS_MAX_ENUM_EXT = 0x7FFFFFFF +} VkDebugUtilsMessageSeverityFlagBitsEXT; +typedef VkFlags VkDebugUtilsMessageSeverityFlagsEXT; + +typedef enum VkDebugUtilsMessageTypeFlagBitsEXT { + VK_DEBUG_UTILS_MESSAGE_TYPE_GENERAL_BIT_EXT = 0x00000001, + VK_DEBUG_UTILS_MESSAGE_TYPE_VALIDATION_BIT_EXT = 0x00000002, + VK_DEBUG_UTILS_MESSAGE_TYPE_PERFORMANCE_BIT_EXT = 0x00000004, + VK_DEBUG_UTILS_MESSAGE_TYPE_FLAG_BITS_MAX_ENUM_EXT = 0x7FFFFFFF +} VkDebugUtilsMessageTypeFlagBitsEXT; +typedef VkFlags VkDebugUtilsMessageTypeFlagsEXT; + +typedef struct VkDebugUtilsObjectNameInfoEXT { + VkStructureType sType; + const void* pNext; + VkObjectType objectType; + uint64_t objectHandle; + const char* pObjectName; +} VkDebugUtilsObjectNameInfoEXT; + +typedef struct VkDebugUtilsObjectTagInfoEXT { + VkStructureType sType; + const void* pNext; + VkObjectType objectType; + uint64_t objectHandle; + uint64_t tagName; + size_t tagSize; + const void* pTag; +} VkDebugUtilsObjectTagInfoEXT; + +typedef struct VkDebugUtilsLabelEXT { + VkStructureType sType; + const void* pNext; + const char* pLabelName; + float color[4]; +} VkDebugUtilsLabelEXT; + +typedef struct VkDebugUtilsMessengerCallbackDataEXT { + VkStructureType sType; + const void* pNext; + VkDebugUtilsMessengerCallbackDataFlagsEXT flags; + const char* pMessageIdName; + int32_t messageIdNumber; + const char* pMessage; + uint32_t queueLabelCount; + VkDebugUtilsLabelEXT* pQueueLabels; + uint32_t cmdBufLabelCount; + VkDebugUtilsLabelEXT* pCmdBufLabels; + uint32_t objectCount; + VkDebugUtilsObjectNameInfoEXT* pObjects; +} VkDebugUtilsMessengerCallbackDataEXT; + +typedef VkBool32 (VKAPI_PTR *PFN_vkDebugUtilsMessengerCallbackEXT)( + VkDebugUtilsMessageSeverityFlagBitsEXT messageSeverity, + VkDebugUtilsMessageTypeFlagsEXT messageTypes, + const VkDebugUtilsMessengerCallbackDataEXT* pCallbackData, + void* pUserData); + +typedef struct VkDebugUtilsMessengerCreateInfoEXT { + VkStructureType sType; + const void* pNext; + VkDebugUtilsMessengerCreateFlagsEXT flags; + VkDebugUtilsMessageSeverityFlagsEXT messageSeverity; + VkDebugUtilsMessageTypeFlagsEXT messageType; + PFN_vkDebugUtilsMessengerCallbackEXT pfnUserCallback; + void* pUserData; +} VkDebugUtilsMessengerCreateInfoEXT; + + +typedef VkResult (VKAPI_PTR *PFN_vkSetDebugUtilsObjectNameEXT)(VkDevice device, const VkDebugUtilsObjectNameInfoEXT* pNameInfo); +typedef VkResult (VKAPI_PTR *PFN_vkSetDebugUtilsObjectTagEXT)(VkDevice device, const VkDebugUtilsObjectTagInfoEXT* pTagInfo); +typedef void (VKAPI_PTR *PFN_vkQueueBeginDebugUtilsLabelEXT)(VkQueue queue, const VkDebugUtilsLabelEXT* pLabelInfo); +typedef void (VKAPI_PTR *PFN_vkQueueEndDebugUtilsLabelEXT)(VkQueue queue); +typedef void (VKAPI_PTR *PFN_vkQueueInsertDebugUtilsLabelEXT)(VkQueue queue, const VkDebugUtilsLabelEXT* pLabelInfo); +typedef void (VKAPI_PTR *PFN_vkCmdBeginDebugUtilsLabelEXT)(VkCommandBuffer commandBuffer, const VkDebugUtilsLabelEXT* pLabelInfo); +typedef void (VKAPI_PTR *PFN_vkCmdEndDebugUtilsLabelEXT)(VkCommandBuffer commandBuffer); +typedef void (VKAPI_PTR *PFN_vkCmdInsertDebugUtilsLabelEXT)(VkCommandBuffer commandBuffer, const VkDebugUtilsLabelEXT* pLabelInfo); +typedef VkResult (VKAPI_PTR *PFN_vkCreateDebugUtilsMessengerEXT)(VkInstance instance, const VkDebugUtilsMessengerCreateInfoEXT* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkDebugUtilsMessengerEXT* pMessenger); +typedef void (VKAPI_PTR *PFN_vkDestroyDebugUtilsMessengerEXT)(VkInstance instance, VkDebugUtilsMessengerEXT messenger, const VkAllocationCallbacks* pAllocator); +typedef void (VKAPI_PTR *PFN_vkSubmitDebugUtilsMessageEXT)(VkInstance instance, VkDebugUtilsMessageSeverityFlagBitsEXT messageSeverity, VkDebugUtilsMessageTypeFlagsEXT messageTypes, const VkDebugUtilsMessengerCallbackDataEXT* pCallbackData); + +#ifndef VK_NO_PROTOTYPES +VKAPI_ATTR VkResult VKAPI_CALL vkSetDebugUtilsObjectNameEXT( + VkDevice device, + const VkDebugUtilsObjectNameInfoEXT* pNameInfo); + +VKAPI_ATTR VkResult VKAPI_CALL vkSetDebugUtilsObjectTagEXT( + VkDevice device, + const VkDebugUtilsObjectTagInfoEXT* pTagInfo); + +VKAPI_ATTR void VKAPI_CALL vkQueueBeginDebugUtilsLabelEXT( + VkQueue queue, + const VkDebugUtilsLabelEXT* pLabelInfo); + +VKAPI_ATTR void VKAPI_CALL vkQueueEndDebugUtilsLabelEXT( + VkQueue queue); + +VKAPI_ATTR void VKAPI_CALL vkQueueInsertDebugUtilsLabelEXT( + VkQueue queue, + const VkDebugUtilsLabelEXT* pLabelInfo); + +VKAPI_ATTR void VKAPI_CALL vkCmdBeginDebugUtilsLabelEXT( + VkCommandBuffer commandBuffer, + const VkDebugUtilsLabelEXT* pLabelInfo); + +VKAPI_ATTR void VKAPI_CALL vkCmdEndDebugUtilsLabelEXT( + VkCommandBuffer commandBuffer); + +VKAPI_ATTR void VKAPI_CALL vkCmdInsertDebugUtilsLabelEXT( + VkCommandBuffer commandBuffer, + const VkDebugUtilsLabelEXT* pLabelInfo); + +VKAPI_ATTR VkResult VKAPI_CALL vkCreateDebugUtilsMessengerEXT( + VkInstance instance, + const VkDebugUtilsMessengerCreateInfoEXT* pCreateInfo, + const VkAllocationCallbacks* pAllocator, + VkDebugUtilsMessengerEXT* pMessenger); + +VKAPI_ATTR void VKAPI_CALL vkDestroyDebugUtilsMessengerEXT( + VkInstance instance, + VkDebugUtilsMessengerEXT messenger, + const VkAllocationCallbacks* pAllocator); + +VKAPI_ATTR void VKAPI_CALL vkSubmitDebugUtilsMessageEXT( + VkInstance instance, + VkDebugUtilsMessageSeverityFlagBitsEXT messageSeverity, + VkDebugUtilsMessageTypeFlagsEXT messageTypes, + const VkDebugUtilsMessengerCallbackDataEXT* pCallbackData); +#endif + +#define VK_EXT_sampler_filter_minmax 1 +#define VK_EXT_SAMPLER_FILTER_MINMAX_SPEC_VERSION 1 +#define VK_EXT_SAMPLER_FILTER_MINMAX_EXTENSION_NAME "VK_EXT_sampler_filter_minmax" + + +typedef enum VkSamplerReductionModeEXT { + VK_SAMPLER_REDUCTION_MODE_WEIGHTED_AVERAGE_EXT = 0, + VK_SAMPLER_REDUCTION_MODE_MIN_EXT = 1, + VK_SAMPLER_REDUCTION_MODE_MAX_EXT = 2, + VK_SAMPLER_REDUCTION_MODE_BEGIN_RANGE_EXT = VK_SAMPLER_REDUCTION_MODE_WEIGHTED_AVERAGE_EXT, + VK_SAMPLER_REDUCTION_MODE_END_RANGE_EXT = VK_SAMPLER_REDUCTION_MODE_MAX_EXT, + VK_SAMPLER_REDUCTION_MODE_RANGE_SIZE_EXT = (VK_SAMPLER_REDUCTION_MODE_MAX_EXT - VK_SAMPLER_REDUCTION_MODE_WEIGHTED_AVERAGE_EXT + 1), + VK_SAMPLER_REDUCTION_MODE_MAX_ENUM_EXT = 0x7FFFFFFF +} VkSamplerReductionModeEXT; + +typedef struct VkSamplerReductionModeCreateInfoEXT { + VkStructureType sType; + const void* pNext; + VkSamplerReductionModeEXT reductionMode; +} VkSamplerReductionModeCreateInfoEXT; + +typedef struct VkPhysicalDeviceSamplerFilterMinmaxPropertiesEXT { + VkStructureType sType; + void* pNext; + VkBool32 filterMinmaxSingleComponentFormats; + VkBool32 filterMinmaxImageComponentMapping; +} VkPhysicalDeviceSamplerFilterMinmaxPropertiesEXT; + + + +#define VK_AMD_gpu_shader_int16 1 +#define VK_AMD_GPU_SHADER_INT16_SPEC_VERSION 1 +#define VK_AMD_GPU_SHADER_INT16_EXTENSION_NAME "VK_AMD_gpu_shader_int16" + + +#define VK_AMD_mixed_attachment_samples 1 +#define VK_AMD_MIXED_ATTACHMENT_SAMPLES_SPEC_VERSION 1 +#define VK_AMD_MIXED_ATTACHMENT_SAMPLES_EXTENSION_NAME "VK_AMD_mixed_attachment_samples" + + +#define VK_AMD_shader_fragment_mask 1 +#define VK_AMD_SHADER_FRAGMENT_MASK_SPEC_VERSION 1 +#define VK_AMD_SHADER_FRAGMENT_MASK_EXTENSION_NAME "VK_AMD_shader_fragment_mask" + + +#define VK_EXT_inline_uniform_block 1 +#define VK_EXT_INLINE_UNIFORM_BLOCK_SPEC_VERSION 1 +#define VK_EXT_INLINE_UNIFORM_BLOCK_EXTENSION_NAME "VK_EXT_inline_uniform_block" + +typedef struct VkPhysicalDeviceInlineUniformBlockFeaturesEXT { + VkStructureType sType; + void* pNext; + VkBool32 inlineUniformBlock; + VkBool32 descriptorBindingInlineUniformBlockUpdateAfterBind; +} VkPhysicalDeviceInlineUniformBlockFeaturesEXT; + +typedef struct VkPhysicalDeviceInlineUniformBlockPropertiesEXT { + VkStructureType sType; + void* pNext; + uint32_t maxInlineUniformBlockSize; + uint32_t maxPerStageDescriptorInlineUniformBlocks; + uint32_t maxPerStageDescriptorUpdateAfterBindInlineUniformBlocks; + uint32_t maxDescriptorSetInlineUniformBlocks; + uint32_t maxDescriptorSetUpdateAfterBindInlineUniformBlocks; +} VkPhysicalDeviceInlineUniformBlockPropertiesEXT; + +typedef struct VkWriteDescriptorSetInlineUniformBlockEXT { + VkStructureType sType; + const void* pNext; + uint32_t dataSize; + const void* pData; +} VkWriteDescriptorSetInlineUniformBlockEXT; + +typedef struct VkDescriptorPoolInlineUniformBlockCreateInfoEXT { + VkStructureType sType; + const void* pNext; + uint32_t maxInlineUniformBlockBindings; +} VkDescriptorPoolInlineUniformBlockCreateInfoEXT; + + + +#define VK_EXT_shader_stencil_export 1 +#define VK_EXT_SHADER_STENCIL_EXPORT_SPEC_VERSION 1 +#define VK_EXT_SHADER_STENCIL_EXPORT_EXTENSION_NAME "VK_EXT_shader_stencil_export" + + +#define VK_EXT_sample_locations 1 +#define VK_EXT_SAMPLE_LOCATIONS_SPEC_VERSION 1 +#define VK_EXT_SAMPLE_LOCATIONS_EXTENSION_NAME "VK_EXT_sample_locations" + +typedef struct VkSampleLocationEXT { + float x; + float y; +} VkSampleLocationEXT; + +typedef struct VkSampleLocationsInfoEXT { + VkStructureType sType; + const void* pNext; + VkSampleCountFlagBits sampleLocationsPerPixel; + VkExtent2D sampleLocationGridSize; + uint32_t sampleLocationsCount; + const VkSampleLocationEXT* pSampleLocations; +} VkSampleLocationsInfoEXT; + +typedef struct VkAttachmentSampleLocationsEXT { + uint32_t attachmentIndex; + VkSampleLocationsInfoEXT sampleLocationsInfo; +} VkAttachmentSampleLocationsEXT; + +typedef struct VkSubpassSampleLocationsEXT { + uint32_t subpassIndex; + VkSampleLocationsInfoEXT sampleLocationsInfo; +} VkSubpassSampleLocationsEXT; + +typedef struct VkRenderPassSampleLocationsBeginInfoEXT { + VkStructureType sType; + const void* pNext; + uint32_t attachmentInitialSampleLocationsCount; + const VkAttachmentSampleLocationsEXT* pAttachmentInitialSampleLocations; + uint32_t postSubpassSampleLocationsCount; + const VkSubpassSampleLocationsEXT* pPostSubpassSampleLocations; +} VkRenderPassSampleLocationsBeginInfoEXT; + +typedef struct VkPipelineSampleLocationsStateCreateInfoEXT { + VkStructureType sType; + const void* pNext; + VkBool32 sampleLocationsEnable; + VkSampleLocationsInfoEXT sampleLocationsInfo; +} VkPipelineSampleLocationsStateCreateInfoEXT; + +typedef struct VkPhysicalDeviceSampleLocationsPropertiesEXT { + VkStructureType sType; + void* pNext; + VkSampleCountFlags sampleLocationSampleCounts; + VkExtent2D maxSampleLocationGridSize; + float sampleLocationCoordinateRange[2]; + uint32_t sampleLocationSubPixelBits; + VkBool32 variableSampleLocations; +} VkPhysicalDeviceSampleLocationsPropertiesEXT; + +typedef struct VkMultisamplePropertiesEXT { + VkStructureType sType; + void* pNext; + VkExtent2D maxSampleLocationGridSize; +} VkMultisamplePropertiesEXT; + + +typedef void (VKAPI_PTR *PFN_vkCmdSetSampleLocationsEXT)(VkCommandBuffer commandBuffer, const VkSampleLocationsInfoEXT* pSampleLocationsInfo); +typedef void (VKAPI_PTR *PFN_vkGetPhysicalDeviceMultisamplePropertiesEXT)(VkPhysicalDevice physicalDevice, VkSampleCountFlagBits samples, VkMultisamplePropertiesEXT* pMultisampleProperties); + +#ifndef VK_NO_PROTOTYPES +VKAPI_ATTR void VKAPI_CALL vkCmdSetSampleLocationsEXT( + VkCommandBuffer commandBuffer, + const VkSampleLocationsInfoEXT* pSampleLocationsInfo); + +VKAPI_ATTR void VKAPI_CALL vkGetPhysicalDeviceMultisamplePropertiesEXT( + VkPhysicalDevice physicalDevice, + VkSampleCountFlagBits samples, + VkMultisamplePropertiesEXT* pMultisampleProperties); +#endif + +#define VK_EXT_blend_operation_advanced 1 +#define VK_EXT_BLEND_OPERATION_ADVANCED_SPEC_VERSION 2 +#define VK_EXT_BLEND_OPERATION_ADVANCED_EXTENSION_NAME "VK_EXT_blend_operation_advanced" + + +typedef enum VkBlendOverlapEXT { + VK_BLEND_OVERLAP_UNCORRELATED_EXT = 0, + VK_BLEND_OVERLAP_DISJOINT_EXT = 1, + VK_BLEND_OVERLAP_CONJOINT_EXT = 2, + VK_BLEND_OVERLAP_BEGIN_RANGE_EXT = VK_BLEND_OVERLAP_UNCORRELATED_EXT, + VK_BLEND_OVERLAP_END_RANGE_EXT = VK_BLEND_OVERLAP_CONJOINT_EXT, + VK_BLEND_OVERLAP_RANGE_SIZE_EXT = (VK_BLEND_OVERLAP_CONJOINT_EXT - VK_BLEND_OVERLAP_UNCORRELATED_EXT + 1), + VK_BLEND_OVERLAP_MAX_ENUM_EXT = 0x7FFFFFFF +} VkBlendOverlapEXT; + +typedef struct VkPhysicalDeviceBlendOperationAdvancedFeaturesEXT { + VkStructureType sType; + void* pNext; + VkBool32 advancedBlendCoherentOperations; +} VkPhysicalDeviceBlendOperationAdvancedFeaturesEXT; + +typedef struct VkPhysicalDeviceBlendOperationAdvancedPropertiesEXT { + VkStructureType sType; + void* pNext; + uint32_t advancedBlendMaxColorAttachments; + VkBool32 advancedBlendIndependentBlend; + VkBool32 advancedBlendNonPremultipliedSrcColor; + VkBool32 advancedBlendNonPremultipliedDstColor; + VkBool32 advancedBlendCorrelatedOverlap; + VkBool32 advancedBlendAllOperations; +} VkPhysicalDeviceBlendOperationAdvancedPropertiesEXT; + +typedef struct VkPipelineColorBlendAdvancedStateCreateInfoEXT { + VkStructureType sType; + const void* pNext; + VkBool32 srcPremultiplied; + VkBool32 dstPremultiplied; + VkBlendOverlapEXT blendOverlap; +} VkPipelineColorBlendAdvancedStateCreateInfoEXT; + + + +#define VK_NV_fragment_coverage_to_color 1 +#define VK_NV_FRAGMENT_COVERAGE_TO_COLOR_SPEC_VERSION 1 +#define VK_NV_FRAGMENT_COVERAGE_TO_COLOR_EXTENSION_NAME "VK_NV_fragment_coverage_to_color" + +typedef VkFlags VkPipelineCoverageToColorStateCreateFlagsNV; + +typedef struct VkPipelineCoverageToColorStateCreateInfoNV { + VkStructureType sType; + const void* pNext; + VkPipelineCoverageToColorStateCreateFlagsNV flags; + VkBool32 coverageToColorEnable; + uint32_t coverageToColorLocation; +} VkPipelineCoverageToColorStateCreateInfoNV; + + + +#define VK_NV_framebuffer_mixed_samples 1 +#define VK_NV_FRAMEBUFFER_MIXED_SAMPLES_SPEC_VERSION 1 +#define VK_NV_FRAMEBUFFER_MIXED_SAMPLES_EXTENSION_NAME "VK_NV_framebuffer_mixed_samples" + + +typedef enum VkCoverageModulationModeNV { + VK_COVERAGE_MODULATION_MODE_NONE_NV = 0, + VK_COVERAGE_MODULATION_MODE_RGB_NV = 1, + VK_COVERAGE_MODULATION_MODE_ALPHA_NV = 2, + VK_COVERAGE_MODULATION_MODE_RGBA_NV = 3, + VK_COVERAGE_MODULATION_MODE_BEGIN_RANGE_NV = VK_COVERAGE_MODULATION_MODE_NONE_NV, + VK_COVERAGE_MODULATION_MODE_END_RANGE_NV = VK_COVERAGE_MODULATION_MODE_RGBA_NV, + VK_COVERAGE_MODULATION_MODE_RANGE_SIZE_NV = (VK_COVERAGE_MODULATION_MODE_RGBA_NV - VK_COVERAGE_MODULATION_MODE_NONE_NV + 1), + VK_COVERAGE_MODULATION_MODE_MAX_ENUM_NV = 0x7FFFFFFF +} VkCoverageModulationModeNV; + +typedef VkFlags VkPipelineCoverageModulationStateCreateFlagsNV; + +typedef struct VkPipelineCoverageModulationStateCreateInfoNV { + VkStructureType sType; + const void* pNext; + VkPipelineCoverageModulationStateCreateFlagsNV flags; + VkCoverageModulationModeNV coverageModulationMode; + VkBool32 coverageModulationTableEnable; + uint32_t coverageModulationTableCount; + const float* pCoverageModulationTable; +} VkPipelineCoverageModulationStateCreateInfoNV; + + + +#define VK_NV_fill_rectangle 1 +#define VK_NV_FILL_RECTANGLE_SPEC_VERSION 1 +#define VK_NV_FILL_RECTANGLE_EXTENSION_NAME "VK_NV_fill_rectangle" + + +#define VK_EXT_post_depth_coverage 1 +#define VK_EXT_POST_DEPTH_COVERAGE_SPEC_VERSION 1 +#define VK_EXT_POST_DEPTH_COVERAGE_EXTENSION_NAME "VK_EXT_post_depth_coverage" + + +#define VK_EXT_image_drm_format_modifier 1 +#define VK_EXT_EXTENSION_159_SPEC_VERSION 0 +#define VK_EXT_EXTENSION_159_EXTENSION_NAME "VK_EXT_extension_159" +#define VK_EXT_IMAGE_DRM_FORMAT_MODIFIER_SPEC_VERSION 1 +#define VK_EXT_IMAGE_DRM_FORMAT_MODIFIER_EXTENSION_NAME "VK_EXT_image_drm_format_modifier" + +typedef struct VkDrmFormatModifierPropertiesEXT { + uint64_t drmFormatModifier; + uint32_t drmFormatModifierPlaneCount; + VkFormatFeatureFlags drmFormatModifierTilingFeatures; +} VkDrmFormatModifierPropertiesEXT; + +typedef struct VkDrmFormatModifierPropertiesListEXT { + VkStructureType sType; + void* pNext; + uint32_t drmFormatModifierCount; + VkDrmFormatModifierPropertiesEXT* pDrmFormatModifierProperties; +} VkDrmFormatModifierPropertiesListEXT; + +typedef struct VkPhysicalDeviceImageDrmFormatModifierInfoEXT { + VkStructureType sType; + const void* pNext; + uint64_t drmFormatModifier; + VkSharingMode sharingMode; + uint32_t queueFamilyIndexCount; + const uint32_t* pQueueFamilyIndices; +} VkPhysicalDeviceImageDrmFormatModifierInfoEXT; + +typedef struct VkImageDrmFormatModifierListCreateInfoEXT { + VkStructureType sType; + const void* pNext; + uint32_t drmFormatModifierCount; + const uint64_t* pDrmFormatModifiers; +} VkImageDrmFormatModifierListCreateInfoEXT; + +typedef struct VkImageDrmFormatModifierExplicitCreateInfoEXT { + VkStructureType sType; + const void* pNext; + uint64_t drmFormatModifier; + uint32_t drmFormatModifierPlaneCount; + const VkSubresourceLayout* pPlaneLayouts; +} VkImageDrmFormatModifierExplicitCreateInfoEXT; + +typedef struct VkImageDrmFormatModifierPropertiesEXT { + VkStructureType sType; + void* pNext; + uint64_t drmFormatModifier; +} VkImageDrmFormatModifierPropertiesEXT; + + +typedef VkResult (VKAPI_PTR *PFN_vkGetImageDrmFormatModifierPropertiesEXT)(VkDevice device, VkImage image, VkImageDrmFormatModifierPropertiesEXT* pProperties); + +#ifndef VK_NO_PROTOTYPES +VKAPI_ATTR VkResult VKAPI_CALL vkGetImageDrmFormatModifierPropertiesEXT( + VkDevice device, + VkImage image, + VkImageDrmFormatModifierPropertiesEXT* pProperties); +#endif + +#define VK_EXT_validation_cache 1 +VK_DEFINE_NON_DISPATCHABLE_HANDLE(VkValidationCacheEXT) + +#define VK_EXT_VALIDATION_CACHE_SPEC_VERSION 1 +#define VK_EXT_VALIDATION_CACHE_EXTENSION_NAME "VK_EXT_validation_cache" + + +typedef enum VkValidationCacheHeaderVersionEXT { + VK_VALIDATION_CACHE_HEADER_VERSION_ONE_EXT = 1, + VK_VALIDATION_CACHE_HEADER_VERSION_BEGIN_RANGE_EXT = VK_VALIDATION_CACHE_HEADER_VERSION_ONE_EXT, + VK_VALIDATION_CACHE_HEADER_VERSION_END_RANGE_EXT = VK_VALIDATION_CACHE_HEADER_VERSION_ONE_EXT, + VK_VALIDATION_CACHE_HEADER_VERSION_RANGE_SIZE_EXT = (VK_VALIDATION_CACHE_HEADER_VERSION_ONE_EXT - VK_VALIDATION_CACHE_HEADER_VERSION_ONE_EXT + 1), + VK_VALIDATION_CACHE_HEADER_VERSION_MAX_ENUM_EXT = 0x7FFFFFFF +} VkValidationCacheHeaderVersionEXT; + +typedef VkFlags VkValidationCacheCreateFlagsEXT; + +typedef struct VkValidationCacheCreateInfoEXT { + VkStructureType sType; + const void* pNext; + VkValidationCacheCreateFlagsEXT flags; + size_t initialDataSize; + const void* pInitialData; +} VkValidationCacheCreateInfoEXT; + +typedef struct VkShaderModuleValidationCacheCreateInfoEXT { + VkStructureType sType; + const void* pNext; + VkValidationCacheEXT validationCache; +} VkShaderModuleValidationCacheCreateInfoEXT; + + +typedef VkResult (VKAPI_PTR *PFN_vkCreateValidationCacheEXT)(VkDevice device, const VkValidationCacheCreateInfoEXT* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkValidationCacheEXT* pValidationCache); +typedef void (VKAPI_PTR *PFN_vkDestroyValidationCacheEXT)(VkDevice device, VkValidationCacheEXT validationCache, const VkAllocationCallbacks* pAllocator); +typedef VkResult (VKAPI_PTR *PFN_vkMergeValidationCachesEXT)(VkDevice device, VkValidationCacheEXT dstCache, uint32_t srcCacheCount, const VkValidationCacheEXT* pSrcCaches); +typedef VkResult (VKAPI_PTR *PFN_vkGetValidationCacheDataEXT)(VkDevice device, VkValidationCacheEXT validationCache, size_t* pDataSize, void* pData); + +#ifndef VK_NO_PROTOTYPES +VKAPI_ATTR VkResult VKAPI_CALL vkCreateValidationCacheEXT( + VkDevice device, + const VkValidationCacheCreateInfoEXT* pCreateInfo, + const VkAllocationCallbacks* pAllocator, + VkValidationCacheEXT* pValidationCache); + +VKAPI_ATTR void VKAPI_CALL vkDestroyValidationCacheEXT( + VkDevice device, + VkValidationCacheEXT validationCache, + const VkAllocationCallbacks* pAllocator); + +VKAPI_ATTR VkResult VKAPI_CALL vkMergeValidationCachesEXT( + VkDevice device, + VkValidationCacheEXT dstCache, + uint32_t srcCacheCount, + const VkValidationCacheEXT* pSrcCaches); + +VKAPI_ATTR VkResult VKAPI_CALL vkGetValidationCacheDataEXT( + VkDevice device, + VkValidationCacheEXT validationCache, + size_t* pDataSize, + void* pData); +#endif + +#define VK_EXT_descriptor_indexing 1 +#define VK_EXT_DESCRIPTOR_INDEXING_SPEC_VERSION 2 +#define VK_EXT_DESCRIPTOR_INDEXING_EXTENSION_NAME "VK_EXT_descriptor_indexing" + + +typedef enum VkDescriptorBindingFlagBitsEXT { + VK_DESCRIPTOR_BINDING_UPDATE_AFTER_BIND_BIT_EXT = 0x00000001, + VK_DESCRIPTOR_BINDING_UPDATE_UNUSED_WHILE_PENDING_BIT_EXT = 0x00000002, + VK_DESCRIPTOR_BINDING_PARTIALLY_BOUND_BIT_EXT = 0x00000004, + VK_DESCRIPTOR_BINDING_VARIABLE_DESCRIPTOR_COUNT_BIT_EXT = 0x00000008, + VK_DESCRIPTOR_BINDING_FLAG_BITS_MAX_ENUM_EXT = 0x7FFFFFFF +} VkDescriptorBindingFlagBitsEXT; +typedef VkFlags VkDescriptorBindingFlagsEXT; + +typedef struct VkDescriptorSetLayoutBindingFlagsCreateInfoEXT { + VkStructureType sType; + const void* pNext; + uint32_t bindingCount; + const VkDescriptorBindingFlagsEXT* pBindingFlags; +} VkDescriptorSetLayoutBindingFlagsCreateInfoEXT; + +typedef struct VkPhysicalDeviceDescriptorIndexingFeaturesEXT { + VkStructureType sType; + void* pNext; + VkBool32 shaderInputAttachmentArrayDynamicIndexing; + VkBool32 shaderUniformTexelBufferArrayDynamicIndexing; + VkBool32 shaderStorageTexelBufferArrayDynamicIndexing; + VkBool32 shaderUniformBufferArrayNonUniformIndexing; + VkBool32 shaderSampledImageArrayNonUniformIndexing; + VkBool32 shaderStorageBufferArrayNonUniformIndexing; + VkBool32 shaderStorageImageArrayNonUniformIndexing; + VkBool32 shaderInputAttachmentArrayNonUniformIndexing; + VkBool32 shaderUniformTexelBufferArrayNonUniformIndexing; + VkBool32 shaderStorageTexelBufferArrayNonUniformIndexing; + VkBool32 descriptorBindingUniformBufferUpdateAfterBind; + VkBool32 descriptorBindingSampledImageUpdateAfterBind; + VkBool32 descriptorBindingStorageImageUpdateAfterBind; + VkBool32 descriptorBindingStorageBufferUpdateAfterBind; + VkBool32 descriptorBindingUniformTexelBufferUpdateAfterBind; + VkBool32 descriptorBindingStorageTexelBufferUpdateAfterBind; + VkBool32 descriptorBindingUpdateUnusedWhilePending; + VkBool32 descriptorBindingPartiallyBound; + VkBool32 descriptorBindingVariableDescriptorCount; + VkBool32 runtimeDescriptorArray; +} VkPhysicalDeviceDescriptorIndexingFeaturesEXT; + +typedef struct VkPhysicalDeviceDescriptorIndexingPropertiesEXT { + VkStructureType sType; + void* pNext; + uint32_t maxUpdateAfterBindDescriptorsInAllPools; + VkBool32 shaderUniformBufferArrayNonUniformIndexingNative; + VkBool32 shaderSampledImageArrayNonUniformIndexingNative; + VkBool32 shaderStorageBufferArrayNonUniformIndexingNative; + VkBool32 shaderStorageImageArrayNonUniformIndexingNative; + VkBool32 shaderInputAttachmentArrayNonUniformIndexingNative; + VkBool32 robustBufferAccessUpdateAfterBind; + VkBool32 quadDivergentImplicitLod; + uint32_t maxPerStageDescriptorUpdateAfterBindSamplers; + uint32_t maxPerStageDescriptorUpdateAfterBindUniformBuffers; + uint32_t maxPerStageDescriptorUpdateAfterBindStorageBuffers; + uint32_t maxPerStageDescriptorUpdateAfterBindSampledImages; + uint32_t maxPerStageDescriptorUpdateAfterBindStorageImages; + uint32_t maxPerStageDescriptorUpdateAfterBindInputAttachments; + uint32_t maxPerStageUpdateAfterBindResources; + uint32_t maxDescriptorSetUpdateAfterBindSamplers; + uint32_t maxDescriptorSetUpdateAfterBindUniformBuffers; + uint32_t maxDescriptorSetUpdateAfterBindUniformBuffersDynamic; + uint32_t maxDescriptorSetUpdateAfterBindStorageBuffers; + uint32_t maxDescriptorSetUpdateAfterBindStorageBuffersDynamic; + uint32_t maxDescriptorSetUpdateAfterBindSampledImages; + uint32_t maxDescriptorSetUpdateAfterBindStorageImages; + uint32_t maxDescriptorSetUpdateAfterBindInputAttachments; +} VkPhysicalDeviceDescriptorIndexingPropertiesEXT; + +typedef struct VkDescriptorSetVariableDescriptorCountAllocateInfoEXT { + VkStructureType sType; + const void* pNext; + uint32_t descriptorSetCount; + const uint32_t* pDescriptorCounts; +} VkDescriptorSetVariableDescriptorCountAllocateInfoEXT; + +typedef struct VkDescriptorSetVariableDescriptorCountLayoutSupportEXT { + VkStructureType sType; + void* pNext; + uint32_t maxVariableDescriptorCount; +} VkDescriptorSetVariableDescriptorCountLayoutSupportEXT; + + + +#define VK_EXT_shader_viewport_index_layer 1 +#define VK_EXT_SHADER_VIEWPORT_INDEX_LAYER_SPEC_VERSION 1 +#define VK_EXT_SHADER_VIEWPORT_INDEX_LAYER_EXTENSION_NAME "VK_EXT_shader_viewport_index_layer" + + +#define VK_NV_shading_rate_image 1 +#define VK_NV_SHADING_RATE_IMAGE_SPEC_VERSION 3 +#define VK_NV_SHADING_RATE_IMAGE_EXTENSION_NAME "VK_NV_shading_rate_image" + + +typedef enum VkShadingRatePaletteEntryNV { + VK_SHADING_RATE_PALETTE_ENTRY_NO_INVOCATIONS_NV = 0, + VK_SHADING_RATE_PALETTE_ENTRY_16_INVOCATIONS_PER_PIXEL_NV = 1, + VK_SHADING_RATE_PALETTE_ENTRY_8_INVOCATIONS_PER_PIXEL_NV = 2, + VK_SHADING_RATE_PALETTE_ENTRY_4_INVOCATIONS_PER_PIXEL_NV = 3, + VK_SHADING_RATE_PALETTE_ENTRY_2_INVOCATIONS_PER_PIXEL_NV = 4, + VK_SHADING_RATE_PALETTE_ENTRY_1_INVOCATION_PER_PIXEL_NV = 5, + VK_SHADING_RATE_PALETTE_ENTRY_1_INVOCATION_PER_2X1_PIXELS_NV = 6, + VK_SHADING_RATE_PALETTE_ENTRY_1_INVOCATION_PER_1X2_PIXELS_NV = 7, + VK_SHADING_RATE_PALETTE_ENTRY_1_INVOCATION_PER_2X2_PIXELS_NV = 8, + VK_SHADING_RATE_PALETTE_ENTRY_1_INVOCATION_PER_4X2_PIXELS_NV = 9, + VK_SHADING_RATE_PALETTE_ENTRY_1_INVOCATION_PER_2X4_PIXELS_NV = 10, + VK_SHADING_RATE_PALETTE_ENTRY_1_INVOCATION_PER_4X4_PIXELS_NV = 11, + VK_SHADING_RATE_PALETTE_ENTRY_BEGIN_RANGE_NV = VK_SHADING_RATE_PALETTE_ENTRY_NO_INVOCATIONS_NV, + VK_SHADING_RATE_PALETTE_ENTRY_END_RANGE_NV = VK_SHADING_RATE_PALETTE_ENTRY_1_INVOCATION_PER_4X4_PIXELS_NV, + VK_SHADING_RATE_PALETTE_ENTRY_RANGE_SIZE_NV = (VK_SHADING_RATE_PALETTE_ENTRY_1_INVOCATION_PER_4X4_PIXELS_NV - VK_SHADING_RATE_PALETTE_ENTRY_NO_INVOCATIONS_NV + 1), + VK_SHADING_RATE_PALETTE_ENTRY_MAX_ENUM_NV = 0x7FFFFFFF +} VkShadingRatePaletteEntryNV; + +typedef enum VkCoarseSampleOrderTypeNV { + VK_COARSE_SAMPLE_ORDER_TYPE_DEFAULT_NV = 0, + VK_COARSE_SAMPLE_ORDER_TYPE_CUSTOM_NV = 1, + VK_COARSE_SAMPLE_ORDER_TYPE_PIXEL_MAJOR_NV = 2, + VK_COARSE_SAMPLE_ORDER_TYPE_SAMPLE_MAJOR_NV = 3, + VK_COARSE_SAMPLE_ORDER_TYPE_BEGIN_RANGE_NV = VK_COARSE_SAMPLE_ORDER_TYPE_DEFAULT_NV, + VK_COARSE_SAMPLE_ORDER_TYPE_END_RANGE_NV = VK_COARSE_SAMPLE_ORDER_TYPE_SAMPLE_MAJOR_NV, + VK_COARSE_SAMPLE_ORDER_TYPE_RANGE_SIZE_NV = (VK_COARSE_SAMPLE_ORDER_TYPE_SAMPLE_MAJOR_NV - VK_COARSE_SAMPLE_ORDER_TYPE_DEFAULT_NV + 1), + VK_COARSE_SAMPLE_ORDER_TYPE_MAX_ENUM_NV = 0x7FFFFFFF +} VkCoarseSampleOrderTypeNV; + +typedef struct VkShadingRatePaletteNV { + uint32_t shadingRatePaletteEntryCount; + const VkShadingRatePaletteEntryNV* pShadingRatePaletteEntries; +} VkShadingRatePaletteNV; + +typedef struct VkPipelineViewportShadingRateImageStateCreateInfoNV { + VkStructureType sType; + const void* pNext; + VkBool32 shadingRateImageEnable; + uint32_t viewportCount; + const VkShadingRatePaletteNV* pShadingRatePalettes; +} VkPipelineViewportShadingRateImageStateCreateInfoNV; + +typedef struct VkPhysicalDeviceShadingRateImageFeaturesNV { + VkStructureType sType; + void* pNext; + VkBool32 shadingRateImage; + VkBool32 shadingRateCoarseSampleOrder; +} VkPhysicalDeviceShadingRateImageFeaturesNV; + +typedef struct VkPhysicalDeviceShadingRateImagePropertiesNV { + VkStructureType sType; + void* pNext; + VkExtent2D shadingRateTexelSize; + uint32_t shadingRatePaletteSize; + uint32_t shadingRateMaxCoarseSamples; +} VkPhysicalDeviceShadingRateImagePropertiesNV; + +typedef struct VkCoarseSampleLocationNV { + uint32_t pixelX; + uint32_t pixelY; + uint32_t sample; +} VkCoarseSampleLocationNV; + +typedef struct VkCoarseSampleOrderCustomNV { + VkShadingRatePaletteEntryNV shadingRate; + uint32_t sampleCount; + uint32_t sampleLocationCount; + const VkCoarseSampleLocationNV* pSampleLocations; +} VkCoarseSampleOrderCustomNV; + +typedef struct VkPipelineViewportCoarseSampleOrderStateCreateInfoNV { + VkStructureType sType; + const void* pNext; + VkCoarseSampleOrderTypeNV sampleOrderType; + uint32_t customSampleOrderCount; + const VkCoarseSampleOrderCustomNV* pCustomSampleOrders; +} VkPipelineViewportCoarseSampleOrderStateCreateInfoNV; + + +typedef void (VKAPI_PTR *PFN_vkCmdBindShadingRateImageNV)(VkCommandBuffer commandBuffer, VkImageView imageView, VkImageLayout imageLayout); +typedef void (VKAPI_PTR *PFN_vkCmdSetViewportShadingRatePaletteNV)(VkCommandBuffer commandBuffer, uint32_t firstViewport, uint32_t viewportCount, const VkShadingRatePaletteNV* pShadingRatePalettes); +typedef void (VKAPI_PTR *PFN_vkCmdSetCoarseSampleOrderNV)(VkCommandBuffer commandBuffer, VkCoarseSampleOrderTypeNV sampleOrderType, uint32_t customSampleOrderCount, const VkCoarseSampleOrderCustomNV* pCustomSampleOrders); + +#ifndef VK_NO_PROTOTYPES +VKAPI_ATTR void VKAPI_CALL vkCmdBindShadingRateImageNV( + VkCommandBuffer commandBuffer, + VkImageView imageView, + VkImageLayout imageLayout); + +VKAPI_ATTR void VKAPI_CALL vkCmdSetViewportShadingRatePaletteNV( + VkCommandBuffer commandBuffer, + uint32_t firstViewport, + uint32_t viewportCount, + const VkShadingRatePaletteNV* pShadingRatePalettes); + +VKAPI_ATTR void VKAPI_CALL vkCmdSetCoarseSampleOrderNV( + VkCommandBuffer commandBuffer, + VkCoarseSampleOrderTypeNV sampleOrderType, + uint32_t customSampleOrderCount, + const VkCoarseSampleOrderCustomNV* pCustomSampleOrders); +#endif + +#define VK_NV_ray_tracing 1 +VK_DEFINE_NON_DISPATCHABLE_HANDLE(VkAccelerationStructureNV) + +#define VK_NV_RAY_TRACING_SPEC_VERSION 2 +#define VK_NV_RAY_TRACING_EXTENSION_NAME "VK_NV_ray_tracing" +#define VK_SHADER_UNUSED_NV (~0U) + + +typedef enum VkRayTracingShaderGroupTypeNV { + VK_RAY_TRACING_SHADER_GROUP_TYPE_GENERAL_NV = 0, + VK_RAY_TRACING_SHADER_GROUP_TYPE_TRIANGLES_HIT_GROUP_NV = 1, + VK_RAY_TRACING_SHADER_GROUP_TYPE_PROCEDURAL_HIT_GROUP_NV = 2, + VK_RAY_TRACING_SHADER_GROUP_TYPE_BEGIN_RANGE_NV = VK_RAY_TRACING_SHADER_GROUP_TYPE_GENERAL_NV, + VK_RAY_TRACING_SHADER_GROUP_TYPE_END_RANGE_NV = VK_RAY_TRACING_SHADER_GROUP_TYPE_PROCEDURAL_HIT_GROUP_NV, + VK_RAY_TRACING_SHADER_GROUP_TYPE_RANGE_SIZE_NV = (VK_RAY_TRACING_SHADER_GROUP_TYPE_PROCEDURAL_HIT_GROUP_NV - VK_RAY_TRACING_SHADER_GROUP_TYPE_GENERAL_NV + 1), + VK_RAY_TRACING_SHADER_GROUP_TYPE_MAX_ENUM_NV = 0x7FFFFFFF +} VkRayTracingShaderGroupTypeNV; + +typedef enum VkGeometryTypeNV { + VK_GEOMETRY_TYPE_TRIANGLES_NV = 0, + VK_GEOMETRY_TYPE_AABBS_NV = 1, + VK_GEOMETRY_TYPE_BEGIN_RANGE_NV = VK_GEOMETRY_TYPE_TRIANGLES_NV, + VK_GEOMETRY_TYPE_END_RANGE_NV = VK_GEOMETRY_TYPE_AABBS_NV, + VK_GEOMETRY_TYPE_RANGE_SIZE_NV = (VK_GEOMETRY_TYPE_AABBS_NV - VK_GEOMETRY_TYPE_TRIANGLES_NV + 1), + VK_GEOMETRY_TYPE_MAX_ENUM_NV = 0x7FFFFFFF +} VkGeometryTypeNV; + +typedef enum VkAccelerationStructureTypeNV { + VK_ACCELERATION_STRUCTURE_TYPE_TOP_LEVEL_NV = 0, + VK_ACCELERATION_STRUCTURE_TYPE_BOTTOM_LEVEL_NV = 1, + VK_ACCELERATION_STRUCTURE_TYPE_BEGIN_RANGE_NV = VK_ACCELERATION_STRUCTURE_TYPE_TOP_LEVEL_NV, + VK_ACCELERATION_STRUCTURE_TYPE_END_RANGE_NV = VK_ACCELERATION_STRUCTURE_TYPE_BOTTOM_LEVEL_NV, + VK_ACCELERATION_STRUCTURE_TYPE_RANGE_SIZE_NV = (VK_ACCELERATION_STRUCTURE_TYPE_BOTTOM_LEVEL_NV - VK_ACCELERATION_STRUCTURE_TYPE_TOP_LEVEL_NV + 1), + VK_ACCELERATION_STRUCTURE_TYPE_MAX_ENUM_NV = 0x7FFFFFFF +} VkAccelerationStructureTypeNV; + +typedef enum VkCopyAccelerationStructureModeNV { + VK_COPY_ACCELERATION_STRUCTURE_MODE_CLONE_NV = 0, + VK_COPY_ACCELERATION_STRUCTURE_MODE_COMPACT_NV = 1, + VK_COPY_ACCELERATION_STRUCTURE_MODE_BEGIN_RANGE_NV = VK_COPY_ACCELERATION_STRUCTURE_MODE_CLONE_NV, + VK_COPY_ACCELERATION_STRUCTURE_MODE_END_RANGE_NV = VK_COPY_ACCELERATION_STRUCTURE_MODE_COMPACT_NV, + VK_COPY_ACCELERATION_STRUCTURE_MODE_RANGE_SIZE_NV = (VK_COPY_ACCELERATION_STRUCTURE_MODE_COMPACT_NV - VK_COPY_ACCELERATION_STRUCTURE_MODE_CLONE_NV + 1), + VK_COPY_ACCELERATION_STRUCTURE_MODE_MAX_ENUM_NV = 0x7FFFFFFF +} VkCopyAccelerationStructureModeNV; + +typedef enum VkAccelerationStructureMemoryRequirementsTypeNV { + VK_ACCELERATION_STRUCTURE_MEMORY_REQUIREMENTS_TYPE_OBJECT_NV = 0, + VK_ACCELERATION_STRUCTURE_MEMORY_REQUIREMENTS_TYPE_BUILD_SCRATCH_NV = 1, + VK_ACCELERATION_STRUCTURE_MEMORY_REQUIREMENTS_TYPE_UPDATE_SCRATCH_NV = 2, + VK_ACCELERATION_STRUCTURE_MEMORY_REQUIREMENTS_TYPE_BEGIN_RANGE_NV = VK_ACCELERATION_STRUCTURE_MEMORY_REQUIREMENTS_TYPE_OBJECT_NV, + VK_ACCELERATION_STRUCTURE_MEMORY_REQUIREMENTS_TYPE_END_RANGE_NV = VK_ACCELERATION_STRUCTURE_MEMORY_REQUIREMENTS_TYPE_UPDATE_SCRATCH_NV, + VK_ACCELERATION_STRUCTURE_MEMORY_REQUIREMENTS_TYPE_RANGE_SIZE_NV = (VK_ACCELERATION_STRUCTURE_MEMORY_REQUIREMENTS_TYPE_UPDATE_SCRATCH_NV - VK_ACCELERATION_STRUCTURE_MEMORY_REQUIREMENTS_TYPE_OBJECT_NV + 1), + VK_ACCELERATION_STRUCTURE_MEMORY_REQUIREMENTS_TYPE_MAX_ENUM_NV = 0x7FFFFFFF +} VkAccelerationStructureMemoryRequirementsTypeNV; + + +typedef enum VkGeometryFlagBitsNV { + VK_GEOMETRY_OPAQUE_BIT_NV = 0x00000001, + VK_GEOMETRY_NO_DUPLICATE_ANY_HIT_INVOCATION_BIT_NV = 0x00000002, + VK_GEOMETRY_FLAG_BITS_MAX_ENUM_NV = 0x7FFFFFFF +} VkGeometryFlagBitsNV; +typedef VkFlags VkGeometryFlagsNV; + +typedef enum VkGeometryInstanceFlagBitsNV { + VK_GEOMETRY_INSTANCE_TRIANGLE_CULL_DISABLE_BIT_NV = 0x00000001, + VK_GEOMETRY_INSTANCE_TRIANGLE_FRONT_COUNTERCLOCKWISE_BIT_NV = 0x00000002, + VK_GEOMETRY_INSTANCE_FORCE_OPAQUE_BIT_NV = 0x00000004, + VK_GEOMETRY_INSTANCE_FORCE_NO_OPAQUE_BIT_NV = 0x00000008, + VK_GEOMETRY_INSTANCE_FLAG_BITS_MAX_ENUM_NV = 0x7FFFFFFF +} VkGeometryInstanceFlagBitsNV; +typedef VkFlags VkGeometryInstanceFlagsNV; + +typedef enum VkBuildAccelerationStructureFlagBitsNV { + VK_BUILD_ACCELERATION_STRUCTURE_ALLOW_UPDATE_BIT_NV = 0x00000001, + VK_BUILD_ACCELERATION_STRUCTURE_ALLOW_COMPACTION_BIT_NV = 0x00000002, + VK_BUILD_ACCELERATION_STRUCTURE_PREFER_FAST_TRACE_BIT_NV = 0x00000004, + VK_BUILD_ACCELERATION_STRUCTURE_PREFER_FAST_BUILD_BIT_NV = 0x00000008, + VK_BUILD_ACCELERATION_STRUCTURE_LOW_MEMORY_BIT_NV = 0x00000010, + VK_BUILD_ACCELERATION_STRUCTURE_FLAG_BITS_MAX_ENUM_NV = 0x7FFFFFFF +} VkBuildAccelerationStructureFlagBitsNV; +typedef VkFlags VkBuildAccelerationStructureFlagsNV; + +typedef struct VkRayTracingShaderGroupCreateInfoNV { + VkStructureType sType; + const void* pNext; + VkRayTracingShaderGroupTypeNV type; + uint32_t generalShader; + uint32_t closestHitShader; + uint32_t anyHitShader; + uint32_t intersectionShader; +} VkRayTracingShaderGroupCreateInfoNV; + +typedef struct VkRayTracingPipelineCreateInfoNV { + VkStructureType sType; + const void* pNext; + VkPipelineCreateFlags flags; + uint32_t stageCount; + const VkPipelineShaderStageCreateInfo* pStages; + uint32_t groupCount; + const VkRayTracingShaderGroupCreateInfoNV* pGroups; + uint32_t maxRecursionDepth; + VkPipelineLayout layout; + VkPipeline basePipelineHandle; + int32_t basePipelineIndex; +} VkRayTracingPipelineCreateInfoNV; + +typedef struct VkGeometryTrianglesNV { + VkStructureType sType; + const void* pNext; + VkBuffer vertexData; + VkDeviceSize vertexOffset; + uint32_t vertexCount; + VkDeviceSize vertexStride; + VkFormat vertexFormat; + VkBuffer indexData; + VkDeviceSize indexOffset; + uint32_t indexCount; + VkIndexType indexType; + VkBuffer transformData; + VkDeviceSize transformOffset; +} VkGeometryTrianglesNV; + +typedef struct VkGeometryAABBNV { + VkStructureType sType; + const void* pNext; + VkBuffer aabbData; + uint32_t numAABBs; + uint32_t stride; + VkDeviceSize offset; +} VkGeometryAABBNV; + +typedef struct VkGeometryDataNV { + VkGeometryTrianglesNV triangles; + VkGeometryAABBNV aabbs; +} VkGeometryDataNV; + +typedef struct VkGeometryNV { + VkStructureType sType; + const void* pNext; + VkGeometryTypeNV geometryType; + VkGeometryDataNV geometry; + VkGeometryFlagsNV flags; +} VkGeometryNV; + +typedef struct VkAccelerationStructureInfoNV { + VkStructureType sType; + const void* pNext; + VkAccelerationStructureTypeNV type; + VkBuildAccelerationStructureFlagsNV flags; + uint32_t instanceCount; + uint32_t geometryCount; + const VkGeometryNV* pGeometries; +} VkAccelerationStructureInfoNV; + +typedef struct VkAccelerationStructureCreateInfoNV { + VkStructureType sType; + const void* pNext; + VkDeviceSize compactedSize; + VkAccelerationStructureInfoNV info; +} VkAccelerationStructureCreateInfoNV; + +typedef struct VkBindAccelerationStructureMemoryInfoNV { + VkStructureType sType; + const void* pNext; + VkAccelerationStructureNV accelerationStructure; + VkDeviceMemory memory; + VkDeviceSize memoryOffset; + uint32_t deviceIndexCount; + const uint32_t* pDeviceIndices; +} VkBindAccelerationStructureMemoryInfoNV; + +typedef struct VkWriteDescriptorSetAccelerationStructureNV { + VkStructureType sType; + const void* pNext; + uint32_t accelerationStructureCount; + const VkAccelerationStructureNV* pAccelerationStructures; +} VkWriteDescriptorSetAccelerationStructureNV; + +typedef struct VkAccelerationStructureMemoryRequirementsInfoNV { + VkStructureType sType; + const void* pNext; + VkAccelerationStructureMemoryRequirementsTypeNV type; + VkAccelerationStructureNV accelerationStructure; +} VkAccelerationStructureMemoryRequirementsInfoNV; + +typedef struct VkPhysicalDeviceRayTracingPropertiesNV { + VkStructureType sType; + void* pNext; + uint32_t shaderGroupHandleSize; + uint32_t maxRecursionDepth; + uint32_t maxShaderGroupStride; + uint32_t shaderGroupBaseAlignment; + uint64_t maxGeometryCount; + uint64_t maxInstanceCount; + uint64_t maxTriangleCount; + uint32_t maxDescriptorSetAccelerationStructures; +} VkPhysicalDeviceRayTracingPropertiesNV; + + +typedef VkResult (VKAPI_PTR *PFN_vkCreateAccelerationStructureNV)(VkDevice device, const VkAccelerationStructureCreateInfoNV* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkAccelerationStructureNV* pAccelerationStructure); +typedef void (VKAPI_PTR *PFN_vkDestroyAccelerationStructureNV)(VkDevice device, VkAccelerationStructureNV accelerationStructure, const VkAllocationCallbacks* pAllocator); +typedef void (VKAPI_PTR *PFN_vkGetAccelerationStructureMemoryRequirementsNV)(VkDevice device, const VkAccelerationStructureMemoryRequirementsInfoNV* pInfo, VkMemoryRequirements2KHR* pMemoryRequirements); +typedef VkResult (VKAPI_PTR *PFN_vkBindAccelerationStructureMemoryNV)(VkDevice device, uint32_t bindInfoCount, const VkBindAccelerationStructureMemoryInfoNV* pBindInfos); +typedef void (VKAPI_PTR *PFN_vkCmdBuildAccelerationStructureNV)(VkCommandBuffer commandBuffer, const VkAccelerationStructureInfoNV* pInfo, VkBuffer instanceData, VkDeviceSize instanceOffset, VkBool32 update, VkAccelerationStructureNV dst, VkAccelerationStructureNV src, VkBuffer scratch, VkDeviceSize scratchOffset); +typedef void (VKAPI_PTR *PFN_vkCmdCopyAccelerationStructureNV)(VkCommandBuffer commandBuffer, VkAccelerationStructureNV dst, VkAccelerationStructureNV src, VkCopyAccelerationStructureModeNV mode); +typedef void (VKAPI_PTR *PFN_vkCmdTraceRaysNV)(VkCommandBuffer commandBuffer, VkBuffer raygenShaderBindingTableBuffer, VkDeviceSize raygenShaderBindingOffset, VkBuffer missShaderBindingTableBuffer, VkDeviceSize missShaderBindingOffset, VkDeviceSize missShaderBindingStride, VkBuffer hitShaderBindingTableBuffer, VkDeviceSize hitShaderBindingOffset, VkDeviceSize hitShaderBindingStride, VkBuffer callableShaderBindingTableBuffer, VkDeviceSize callableShaderBindingOffset, VkDeviceSize callableShaderBindingStride, uint32_t width, uint32_t height, uint32_t depth); +typedef VkResult (VKAPI_PTR *PFN_vkCreateRayTracingPipelinesNV)(VkDevice device, VkPipelineCache pipelineCache, uint32_t createInfoCount, const VkRayTracingPipelineCreateInfoNV* pCreateInfos, const VkAllocationCallbacks* pAllocator, VkPipeline* pPipelines); +typedef VkResult (VKAPI_PTR *PFN_vkGetRayTracingShaderGroupHandlesNV)(VkDevice device, VkPipeline pipeline, uint32_t firstGroup, uint32_t groupCount, size_t dataSize, void* pData); +typedef VkResult (VKAPI_PTR *PFN_vkGetAccelerationStructureHandleNV)(VkDevice device, VkAccelerationStructureNV accelerationStructure, size_t dataSize, void* pData); +typedef void (VKAPI_PTR *PFN_vkCmdWriteAccelerationStructuresPropertiesNV)(VkCommandBuffer commandBuffer, uint32_t accelerationStructureCount, const VkAccelerationStructureNV* pAccelerationStructures, VkQueryType queryType, VkQueryPool queryPool, uint32_t firstQuery); +typedef VkResult (VKAPI_PTR *PFN_vkCompileDeferredNV)(VkDevice device, VkPipeline pipeline, uint32_t shader); + +#ifndef VK_NO_PROTOTYPES +VKAPI_ATTR VkResult VKAPI_CALL vkCreateAccelerationStructureNV( + VkDevice device, + const VkAccelerationStructureCreateInfoNV* pCreateInfo, + const VkAllocationCallbacks* pAllocator, + VkAccelerationStructureNV* pAccelerationStructure); + +VKAPI_ATTR void VKAPI_CALL vkDestroyAccelerationStructureNV( + VkDevice device, + VkAccelerationStructureNV accelerationStructure, + const VkAllocationCallbacks* pAllocator); + +VKAPI_ATTR void VKAPI_CALL vkGetAccelerationStructureMemoryRequirementsNV( + VkDevice device, + const VkAccelerationStructureMemoryRequirementsInfoNV* pInfo, + VkMemoryRequirements2KHR* pMemoryRequirements); + +VKAPI_ATTR VkResult VKAPI_CALL vkBindAccelerationStructureMemoryNV( + VkDevice device, + uint32_t bindInfoCount, + const VkBindAccelerationStructureMemoryInfoNV* pBindInfos); + +VKAPI_ATTR void VKAPI_CALL vkCmdBuildAccelerationStructureNV( + VkCommandBuffer commandBuffer, + const VkAccelerationStructureInfoNV* pInfo, + VkBuffer instanceData, + VkDeviceSize instanceOffset, + VkBool32 update, + VkAccelerationStructureNV dst, + VkAccelerationStructureNV src, + VkBuffer scratch, + VkDeviceSize scratchOffset); + +VKAPI_ATTR void VKAPI_CALL vkCmdCopyAccelerationStructureNV( + VkCommandBuffer commandBuffer, + VkAccelerationStructureNV dst, + VkAccelerationStructureNV src, + VkCopyAccelerationStructureModeNV mode); + +VKAPI_ATTR void VKAPI_CALL vkCmdTraceRaysNV( + VkCommandBuffer commandBuffer, + VkBuffer raygenShaderBindingTableBuffer, + VkDeviceSize raygenShaderBindingOffset, + VkBuffer missShaderBindingTableBuffer, + VkDeviceSize missShaderBindingOffset, + VkDeviceSize missShaderBindingStride, + VkBuffer hitShaderBindingTableBuffer, + VkDeviceSize hitShaderBindingOffset, + VkDeviceSize hitShaderBindingStride, + VkBuffer callableShaderBindingTableBuffer, + VkDeviceSize callableShaderBindingOffset, + VkDeviceSize callableShaderBindingStride, + uint32_t width, + uint32_t height, + uint32_t depth); + +VKAPI_ATTR VkResult VKAPI_CALL vkCreateRayTracingPipelinesNV( + VkDevice device, + VkPipelineCache pipelineCache, + uint32_t createInfoCount, + const VkRayTracingPipelineCreateInfoNV* pCreateInfos, + const VkAllocationCallbacks* pAllocator, + VkPipeline* pPipelines); + +VKAPI_ATTR VkResult VKAPI_CALL vkGetRayTracingShaderGroupHandlesNV( + VkDevice device, + VkPipeline pipeline, + uint32_t firstGroup, + uint32_t groupCount, + size_t dataSize, + void* pData); + +VKAPI_ATTR VkResult VKAPI_CALL vkGetAccelerationStructureHandleNV( + VkDevice device, + VkAccelerationStructureNV accelerationStructure, + size_t dataSize, + void* pData); + +VKAPI_ATTR void VKAPI_CALL vkCmdWriteAccelerationStructuresPropertiesNV( + VkCommandBuffer commandBuffer, + uint32_t accelerationStructureCount, + const VkAccelerationStructureNV* pAccelerationStructures, + VkQueryType queryType, + VkQueryPool queryPool, + uint32_t firstQuery); + +VKAPI_ATTR VkResult VKAPI_CALL vkCompileDeferredNV( + VkDevice device, + VkPipeline pipeline, + uint32_t shader); +#endif + +#define VK_NV_representative_fragment_test 1 +#define VK_NV_REPRESENTATIVE_FRAGMENT_TEST_SPEC_VERSION 1 +#define VK_NV_REPRESENTATIVE_FRAGMENT_TEST_EXTENSION_NAME "VK_NV_representative_fragment_test" + +typedef struct VkPhysicalDeviceRepresentativeFragmentTestFeaturesNV { + VkStructureType sType; + void* pNext; + VkBool32 representativeFragmentTest; +} VkPhysicalDeviceRepresentativeFragmentTestFeaturesNV; + +typedef struct VkPipelineRepresentativeFragmentTestStateCreateInfoNV { + VkStructureType sType; + const void* pNext; + VkBool32 representativeFragmentTestEnable; +} VkPipelineRepresentativeFragmentTestStateCreateInfoNV; + + + +#define VK_EXT_global_priority 1 +#define VK_EXT_GLOBAL_PRIORITY_SPEC_VERSION 2 +#define VK_EXT_GLOBAL_PRIORITY_EXTENSION_NAME "VK_EXT_global_priority" + + +typedef enum VkQueueGlobalPriorityEXT { + VK_QUEUE_GLOBAL_PRIORITY_LOW_EXT = 128, + VK_QUEUE_GLOBAL_PRIORITY_MEDIUM_EXT = 256, + VK_QUEUE_GLOBAL_PRIORITY_HIGH_EXT = 512, + VK_QUEUE_GLOBAL_PRIORITY_REALTIME_EXT = 1024, + VK_QUEUE_GLOBAL_PRIORITY_BEGIN_RANGE_EXT = VK_QUEUE_GLOBAL_PRIORITY_LOW_EXT, + VK_QUEUE_GLOBAL_PRIORITY_END_RANGE_EXT = VK_QUEUE_GLOBAL_PRIORITY_REALTIME_EXT, + VK_QUEUE_GLOBAL_PRIORITY_RANGE_SIZE_EXT = (VK_QUEUE_GLOBAL_PRIORITY_REALTIME_EXT - VK_QUEUE_GLOBAL_PRIORITY_LOW_EXT + 1), + VK_QUEUE_GLOBAL_PRIORITY_MAX_ENUM_EXT = 0x7FFFFFFF +} VkQueueGlobalPriorityEXT; + +typedef struct VkDeviceQueueGlobalPriorityCreateInfoEXT { + VkStructureType sType; + const void* pNext; + VkQueueGlobalPriorityEXT globalPriority; +} VkDeviceQueueGlobalPriorityCreateInfoEXT; + + + +#define VK_EXT_external_memory_host 1 +#define VK_EXT_EXTERNAL_MEMORY_HOST_SPEC_VERSION 1 +#define VK_EXT_EXTERNAL_MEMORY_HOST_EXTENSION_NAME "VK_EXT_external_memory_host" + +typedef struct VkImportMemoryHostPointerInfoEXT { + VkStructureType sType; + const void* pNext; + VkExternalMemoryHandleTypeFlagBits handleType; + void* pHostPointer; +} VkImportMemoryHostPointerInfoEXT; + +typedef struct VkMemoryHostPointerPropertiesEXT { + VkStructureType sType; + void* pNext; + uint32_t memoryTypeBits; +} VkMemoryHostPointerPropertiesEXT; + +typedef struct VkPhysicalDeviceExternalMemoryHostPropertiesEXT { + VkStructureType sType; + void* pNext; + VkDeviceSize minImportedHostPointerAlignment; +} VkPhysicalDeviceExternalMemoryHostPropertiesEXT; + + +typedef VkResult (VKAPI_PTR *PFN_vkGetMemoryHostPointerPropertiesEXT)(VkDevice device, VkExternalMemoryHandleTypeFlagBits handleType, const void* pHostPointer, VkMemoryHostPointerPropertiesEXT* pMemoryHostPointerProperties); + +#ifndef VK_NO_PROTOTYPES +VKAPI_ATTR VkResult VKAPI_CALL vkGetMemoryHostPointerPropertiesEXT( + VkDevice device, + VkExternalMemoryHandleTypeFlagBits handleType, + const void* pHostPointer, + VkMemoryHostPointerPropertiesEXT* pMemoryHostPointerProperties); +#endif + +#define VK_AMD_buffer_marker 1 +#define VK_AMD_BUFFER_MARKER_SPEC_VERSION 1 +#define VK_AMD_BUFFER_MARKER_EXTENSION_NAME "VK_AMD_buffer_marker" + +typedef void (VKAPI_PTR *PFN_vkCmdWriteBufferMarkerAMD)(VkCommandBuffer commandBuffer, VkPipelineStageFlagBits pipelineStage, VkBuffer dstBuffer, VkDeviceSize dstOffset, uint32_t marker); + +#ifndef VK_NO_PROTOTYPES +VKAPI_ATTR void VKAPI_CALL vkCmdWriteBufferMarkerAMD( + VkCommandBuffer commandBuffer, + VkPipelineStageFlagBits pipelineStage, + VkBuffer dstBuffer, + VkDeviceSize dstOffset, + uint32_t marker); +#endif + +#define VK_EXT_calibrated_timestamps 1 +#define VK_EXT_CALIBRATED_TIMESTAMPS_SPEC_VERSION 1 +#define VK_EXT_CALIBRATED_TIMESTAMPS_EXTENSION_NAME "VK_EXT_calibrated_timestamps" + + +typedef enum VkTimeDomainEXT { + VK_TIME_DOMAIN_DEVICE_EXT = 0, + VK_TIME_DOMAIN_CLOCK_MONOTONIC_EXT = 1, + VK_TIME_DOMAIN_CLOCK_MONOTONIC_RAW_EXT = 2, + VK_TIME_DOMAIN_QUERY_PERFORMANCE_COUNTER_EXT = 3, + VK_TIME_DOMAIN_BEGIN_RANGE_EXT = VK_TIME_DOMAIN_DEVICE_EXT, + VK_TIME_DOMAIN_END_RANGE_EXT = VK_TIME_DOMAIN_QUERY_PERFORMANCE_COUNTER_EXT, + VK_TIME_DOMAIN_RANGE_SIZE_EXT = (VK_TIME_DOMAIN_QUERY_PERFORMANCE_COUNTER_EXT - VK_TIME_DOMAIN_DEVICE_EXT + 1), + VK_TIME_DOMAIN_MAX_ENUM_EXT = 0x7FFFFFFF +} VkTimeDomainEXT; + +typedef struct VkCalibratedTimestampInfoEXT { + VkStructureType sType; + const void* pNext; + VkTimeDomainEXT timeDomain; +} VkCalibratedTimestampInfoEXT; + + +typedef VkResult (VKAPI_PTR *PFN_vkGetPhysicalDeviceCalibrateableTimeDomainsEXT)(VkPhysicalDevice physicalDevice, uint32_t* pTimeDomainCount, VkTimeDomainEXT* pTimeDomains); +typedef VkResult (VKAPI_PTR *PFN_vkGetCalibratedTimestampsEXT)(VkDevice device, uint32_t timestampCount, const VkCalibratedTimestampInfoEXT* pTimestampInfos, uint64_t* pTimestamps, uint64_t* pMaxDeviation); + +#ifndef VK_NO_PROTOTYPES +VKAPI_ATTR VkResult VKAPI_CALL vkGetPhysicalDeviceCalibrateableTimeDomainsEXT( + VkPhysicalDevice physicalDevice, + uint32_t* pTimeDomainCount, + VkTimeDomainEXT* pTimeDomains); + +VKAPI_ATTR VkResult VKAPI_CALL vkGetCalibratedTimestampsEXT( + VkDevice device, + uint32_t timestampCount, + const VkCalibratedTimestampInfoEXT* pTimestampInfos, + uint64_t* pTimestamps, + uint64_t* pMaxDeviation); +#endif + +#define VK_AMD_shader_core_properties 1 +#define VK_AMD_SHADER_CORE_PROPERTIES_SPEC_VERSION 1 +#define VK_AMD_SHADER_CORE_PROPERTIES_EXTENSION_NAME "VK_AMD_shader_core_properties" + +typedef struct VkPhysicalDeviceShaderCorePropertiesAMD { + VkStructureType sType; + void* pNext; + uint32_t shaderEngineCount; + uint32_t shaderArraysPerEngineCount; + uint32_t computeUnitsPerShaderArray; + uint32_t simdPerComputeUnit; + uint32_t wavefrontsPerSimd; + uint32_t wavefrontSize; + uint32_t sgprsPerSimd; + uint32_t minSgprAllocation; + uint32_t maxSgprAllocation; + uint32_t sgprAllocationGranularity; + uint32_t vgprsPerSimd; + uint32_t minVgprAllocation; + uint32_t maxVgprAllocation; + uint32_t vgprAllocationGranularity; +} VkPhysicalDeviceShaderCorePropertiesAMD; + + + +#define VK_AMD_memory_overallocation_behavior 1 +#define VK_AMD_MEMORY_OVERALLOCATION_BEHAVIOR_SPEC_VERSION 1 +#define VK_AMD_MEMORY_OVERALLOCATION_BEHAVIOR_EXTENSION_NAME "VK_AMD_memory_overallocation_behavior" + + +typedef enum VkMemoryOverallocationBehaviorAMD { + VK_MEMORY_OVERALLOCATION_BEHAVIOR_DEFAULT_AMD = 0, + VK_MEMORY_OVERALLOCATION_BEHAVIOR_ALLOWED_AMD = 1, + VK_MEMORY_OVERALLOCATION_BEHAVIOR_DISALLOWED_AMD = 2, + VK_MEMORY_OVERALLOCATION_BEHAVIOR_BEGIN_RANGE_AMD = VK_MEMORY_OVERALLOCATION_BEHAVIOR_DEFAULT_AMD, + VK_MEMORY_OVERALLOCATION_BEHAVIOR_END_RANGE_AMD = VK_MEMORY_OVERALLOCATION_BEHAVIOR_DISALLOWED_AMD, + VK_MEMORY_OVERALLOCATION_BEHAVIOR_RANGE_SIZE_AMD = (VK_MEMORY_OVERALLOCATION_BEHAVIOR_DISALLOWED_AMD - VK_MEMORY_OVERALLOCATION_BEHAVIOR_DEFAULT_AMD + 1), + VK_MEMORY_OVERALLOCATION_BEHAVIOR_MAX_ENUM_AMD = 0x7FFFFFFF +} VkMemoryOverallocationBehaviorAMD; + +typedef struct VkDeviceMemoryOverallocationCreateInfoAMD { + VkStructureType sType; + const void* pNext; + VkMemoryOverallocationBehaviorAMD overallocationBehavior; +} VkDeviceMemoryOverallocationCreateInfoAMD; + + + +#define VK_EXT_vertex_attribute_divisor 1 +#define VK_EXT_VERTEX_ATTRIBUTE_DIVISOR_SPEC_VERSION 3 +#define VK_EXT_VERTEX_ATTRIBUTE_DIVISOR_EXTENSION_NAME "VK_EXT_vertex_attribute_divisor" + +typedef struct VkPhysicalDeviceVertexAttributeDivisorPropertiesEXT { + VkStructureType sType; + void* pNext; + uint32_t maxVertexAttribDivisor; +} VkPhysicalDeviceVertexAttributeDivisorPropertiesEXT; + +typedef struct VkVertexInputBindingDivisorDescriptionEXT { + uint32_t binding; + uint32_t divisor; +} VkVertexInputBindingDivisorDescriptionEXT; + +typedef struct VkPipelineVertexInputDivisorStateCreateInfoEXT { + VkStructureType sType; + const void* pNext; + uint32_t vertexBindingDivisorCount; + const VkVertexInputBindingDivisorDescriptionEXT* pVertexBindingDivisors; +} VkPipelineVertexInputDivisorStateCreateInfoEXT; + +typedef struct VkPhysicalDeviceVertexAttributeDivisorFeaturesEXT { + VkStructureType sType; + void* pNext; + VkBool32 vertexAttributeInstanceRateDivisor; + VkBool32 vertexAttributeInstanceRateZeroDivisor; +} VkPhysicalDeviceVertexAttributeDivisorFeaturesEXT; + + + +#define VK_NV_shader_subgroup_partitioned 1 +#define VK_NV_SHADER_SUBGROUP_PARTITIONED_SPEC_VERSION 1 +#define VK_NV_SHADER_SUBGROUP_PARTITIONED_EXTENSION_NAME "VK_NV_shader_subgroup_partitioned" + + +#define VK_NV_compute_shader_derivatives 1 +#define VK_NV_COMPUTE_SHADER_DERIVATIVES_SPEC_VERSION 1 +#define VK_NV_COMPUTE_SHADER_DERIVATIVES_EXTENSION_NAME "VK_NV_compute_shader_derivatives" + +typedef struct VkPhysicalDeviceComputeShaderDerivativesFeaturesNV { + VkStructureType sType; + void* pNext; + VkBool32 computeDerivativeGroupQuads; + VkBool32 computeDerivativeGroupLinear; +} VkPhysicalDeviceComputeShaderDerivativesFeaturesNV; + + + +#define VK_NV_mesh_shader 1 +#define VK_NV_MESH_SHADER_SPEC_VERSION 1 +#define VK_NV_MESH_SHADER_EXTENSION_NAME "VK_NV_mesh_shader" + +typedef struct VkPhysicalDeviceMeshShaderFeaturesNV { + VkStructureType sType; + void* pNext; + VkBool32 taskShader; + VkBool32 meshShader; +} VkPhysicalDeviceMeshShaderFeaturesNV; + +typedef struct VkPhysicalDeviceMeshShaderPropertiesNV { + VkStructureType sType; + void* pNext; + uint32_t maxDrawMeshTasksCount; + uint32_t maxTaskWorkGroupInvocations; + uint32_t maxTaskWorkGroupSize[3]; + uint32_t maxTaskTotalMemorySize; + uint32_t maxTaskOutputCount; + uint32_t maxMeshWorkGroupInvocations; + uint32_t maxMeshWorkGroupSize[3]; + uint32_t maxMeshTotalMemorySize; + uint32_t maxMeshOutputVertices; + uint32_t maxMeshOutputPrimitives; + uint32_t maxMeshMultiviewViewCount; + uint32_t meshOutputPerVertexGranularity; + uint32_t meshOutputPerPrimitiveGranularity; +} VkPhysicalDeviceMeshShaderPropertiesNV; + +typedef struct VkDrawMeshTasksIndirectCommandNV { + uint32_t taskCount; + uint32_t firstTask; +} VkDrawMeshTasksIndirectCommandNV; + + +typedef void (VKAPI_PTR *PFN_vkCmdDrawMeshTasksNV)(VkCommandBuffer commandBuffer, uint32_t taskCount, uint32_t firstTask); +typedef void (VKAPI_PTR *PFN_vkCmdDrawMeshTasksIndirectNV)(VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset, uint32_t drawCount, uint32_t stride); +typedef void (VKAPI_PTR *PFN_vkCmdDrawMeshTasksIndirectCountNV)(VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset, VkBuffer countBuffer, VkDeviceSize countBufferOffset, uint32_t maxDrawCount, uint32_t stride); + +#ifndef VK_NO_PROTOTYPES +VKAPI_ATTR void VKAPI_CALL vkCmdDrawMeshTasksNV( + VkCommandBuffer commandBuffer, + uint32_t taskCount, + uint32_t firstTask); + +VKAPI_ATTR void VKAPI_CALL vkCmdDrawMeshTasksIndirectNV( + VkCommandBuffer commandBuffer, + VkBuffer buffer, + VkDeviceSize offset, + uint32_t drawCount, + uint32_t stride); + +VKAPI_ATTR void VKAPI_CALL vkCmdDrawMeshTasksIndirectCountNV( + VkCommandBuffer commandBuffer, + VkBuffer buffer, + VkDeviceSize offset, + VkBuffer countBuffer, + VkDeviceSize countBufferOffset, + uint32_t maxDrawCount, + uint32_t stride); +#endif + +#define VK_NV_fragment_shader_barycentric 1 +#define VK_NV_FRAGMENT_SHADER_BARYCENTRIC_SPEC_VERSION 1 +#define VK_NV_FRAGMENT_SHADER_BARYCENTRIC_EXTENSION_NAME "VK_NV_fragment_shader_barycentric" + +typedef struct VkPhysicalDeviceFragmentShaderBarycentricFeaturesNV { + VkStructureType sType; + void* pNext; + VkBool32 fragmentShaderBarycentric; +} VkPhysicalDeviceFragmentShaderBarycentricFeaturesNV; + + + +#define VK_NV_shader_image_footprint 1 +#define VK_NV_SHADER_IMAGE_FOOTPRINT_SPEC_VERSION 1 +#define VK_NV_SHADER_IMAGE_FOOTPRINT_EXTENSION_NAME "VK_NV_shader_image_footprint" + +typedef struct VkPhysicalDeviceShaderImageFootprintFeaturesNV { + VkStructureType sType; + void* pNext; + VkBool32 imageFootprint; +} VkPhysicalDeviceShaderImageFootprintFeaturesNV; + + + +#define VK_NV_scissor_exclusive 1 +#define VK_NV_SCISSOR_EXCLUSIVE_SPEC_VERSION 1 +#define VK_NV_SCISSOR_EXCLUSIVE_EXTENSION_NAME "VK_NV_scissor_exclusive" + +typedef struct VkPipelineViewportExclusiveScissorStateCreateInfoNV { + VkStructureType sType; + const void* pNext; + uint32_t exclusiveScissorCount; + const VkRect2D* pExclusiveScissors; +} VkPipelineViewportExclusiveScissorStateCreateInfoNV; + +typedef struct VkPhysicalDeviceExclusiveScissorFeaturesNV { + VkStructureType sType; + void* pNext; + VkBool32 exclusiveScissor; +} VkPhysicalDeviceExclusiveScissorFeaturesNV; + + +typedef void (VKAPI_PTR *PFN_vkCmdSetExclusiveScissorNV)(VkCommandBuffer commandBuffer, uint32_t firstExclusiveScissor, uint32_t exclusiveScissorCount, const VkRect2D* pExclusiveScissors); + +#ifndef VK_NO_PROTOTYPES +VKAPI_ATTR void VKAPI_CALL vkCmdSetExclusiveScissorNV( + VkCommandBuffer commandBuffer, + uint32_t firstExclusiveScissor, + uint32_t exclusiveScissorCount, + const VkRect2D* pExclusiveScissors); +#endif + +#define VK_NV_device_diagnostic_checkpoints 1 +#define VK_NV_DEVICE_DIAGNOSTIC_CHECKPOINTS_SPEC_VERSION 2 +#define VK_NV_DEVICE_DIAGNOSTIC_CHECKPOINTS_EXTENSION_NAME "VK_NV_device_diagnostic_checkpoints" + +typedef struct VkQueueFamilyCheckpointPropertiesNV { + VkStructureType sType; + void* pNext; + VkPipelineStageFlags checkpointExecutionStageMask; +} VkQueueFamilyCheckpointPropertiesNV; + +typedef struct VkCheckpointDataNV { + VkStructureType sType; + void* pNext; + VkPipelineStageFlagBits stage; + void* pCheckpointMarker; +} VkCheckpointDataNV; + + +typedef void (VKAPI_PTR *PFN_vkCmdSetCheckpointNV)(VkCommandBuffer commandBuffer, const void* pCheckpointMarker); +typedef void (VKAPI_PTR *PFN_vkGetQueueCheckpointDataNV)(VkQueue queue, uint32_t* pCheckpointDataCount, VkCheckpointDataNV* pCheckpointData); + +#ifndef VK_NO_PROTOTYPES +VKAPI_ATTR void VKAPI_CALL vkCmdSetCheckpointNV( + VkCommandBuffer commandBuffer, + const void* pCheckpointMarker); + +VKAPI_ATTR void VKAPI_CALL vkGetQueueCheckpointDataNV( + VkQueue queue, + uint32_t* pCheckpointDataCount, + VkCheckpointDataNV* pCheckpointData); +#endif + +#define VK_EXT_pci_bus_info 1 +#define VK_EXT_PCI_BUS_INFO_SPEC_VERSION 1 +#define VK_EXT_PCI_BUS_INFO_EXTENSION_NAME "VK_EXT_pci_bus_info" + +typedef struct VkPhysicalDevicePCIBusInfoPropertiesEXT { + VkStructureType sType; + void* pNext; + uint16_t pciDomain; + uint8_t pciBus; + uint8_t pciDevice; + uint8_t pciFunction; +} VkPhysicalDevicePCIBusInfoPropertiesEXT; + + + +#define VK_GOOGLE_hlsl_functionality1 1 +#define VK_GOOGLE_HLSL_FUNCTIONALITY1_SPEC_VERSION 0 +#define VK_GOOGLE_HLSL_FUNCTIONALITY1_EXTENSION_NAME "VK_GOOGLE_hlsl_functionality1" + + +#define VK_GOOGLE_decorate_string 1 +#define VK_GOOGLE_DECORATE_STRING_SPEC_VERSION 0 +#define VK_GOOGLE_DECORATE_STRING_EXTENSION_NAME "VK_GOOGLE_decorate_string" + + +#ifdef __cplusplus +} +#endif + +#endif diff --git a/src/video/khronos/vulkan/vulkan_fuchsia.h b/src/video/khronos/vulkan/vulkan_fuchsia.h new file mode 100644 index 000000000..e0ed5455a --- /dev/null +++ b/src/video/khronos/vulkan/vulkan_fuchsia.h @@ -0,0 +1,58 @@ +#ifndef VULKAN_FUCHSIA_H_ +#define VULKAN_FUCHSIA_H_ 1 + +#ifdef __cplusplus +extern "C" { +#endif + +/* +** Copyright (c) 2015-2018 The Khronos Group Inc. +** +** Licensed under the Apache License, Version 2.0 (the "License"); +** you may not use this file except in compliance with the License. +** You may obtain a copy of the License at +** +** http://www.apache.org/licenses/LICENSE-2.0 +** +** Unless required by applicable law or agreed to in writing, software +** distributed under the License is distributed on an "AS IS" BASIS, +** WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +** See the License for the specific language governing permissions and +** limitations under the License. +*/ + +/* +** This header is generated from the Khronos Vulkan XML API Registry. +** +*/ + + +#define VK_FUCHSIA_imagepipe_surface 1 +#define VK_FUCHSIA_IMAGEPIPE_SURFACE_SPEC_VERSION 1 +#define VK_FUCHSIA_IMAGEPIPE_SURFACE_EXTENSION_NAME "VK_FUCHSIA_imagepipe_surface" + +typedef VkFlags VkImagePipeSurfaceCreateFlagsFUCHSIA; + +typedef struct VkImagePipeSurfaceCreateInfoFUCHSIA { + VkStructureType sType; + const void* pNext; + VkImagePipeSurfaceCreateFlagsFUCHSIA flags; + zx_handle_t imagePipeHandle; +} VkImagePipeSurfaceCreateInfoFUCHSIA; + + +typedef VkResult (VKAPI_PTR *PFN_vkCreateImagePipeSurfaceFUCHSIA)(VkInstance instance, const VkImagePipeSurfaceCreateInfoFUCHSIA* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkSurfaceKHR* pSurface); + +#ifndef VK_NO_PROTOTYPES +VKAPI_ATTR VkResult VKAPI_CALL vkCreateImagePipeSurfaceFUCHSIA( + VkInstance instance, + const VkImagePipeSurfaceCreateInfoFUCHSIA* pCreateInfo, + const VkAllocationCallbacks* pAllocator, + VkSurfaceKHR* pSurface); +#endif + +#ifdef __cplusplus +} +#endif + +#endif diff --git a/src/video/khronos/vulkan/vulkan_ios.h b/src/video/khronos/vulkan/vulkan_ios.h new file mode 100644 index 000000000..a0924816d --- /dev/null +++ b/src/video/khronos/vulkan/vulkan_ios.h @@ -0,0 +1,58 @@ +#ifndef VULKAN_IOS_H_ +#define VULKAN_IOS_H_ 1 + +#ifdef __cplusplus +extern "C" { +#endif + +/* +** Copyright (c) 2015-2018 The Khronos Group Inc. +** +** Licensed under the Apache License, Version 2.0 (the "License"); +** you may not use this file except in compliance with the License. +** You may obtain a copy of the License at +** +** http://www.apache.org/licenses/LICENSE-2.0 +** +** Unless required by applicable law or agreed to in writing, software +** distributed under the License is distributed on an "AS IS" BASIS, +** WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +** See the License for the specific language governing permissions and +** limitations under the License. +*/ + +/* +** This header is generated from the Khronos Vulkan XML API Registry. +** +*/ + + +#define VK_MVK_ios_surface 1 +#define VK_MVK_IOS_SURFACE_SPEC_VERSION 2 +#define VK_MVK_IOS_SURFACE_EXTENSION_NAME "VK_MVK_ios_surface" + +typedef VkFlags VkIOSSurfaceCreateFlagsMVK; + +typedef struct VkIOSSurfaceCreateInfoMVK { + VkStructureType sType; + const void* pNext; + VkIOSSurfaceCreateFlagsMVK flags; + const void* pView; +} VkIOSSurfaceCreateInfoMVK; + + +typedef VkResult (VKAPI_PTR *PFN_vkCreateIOSSurfaceMVK)(VkInstance instance, const VkIOSSurfaceCreateInfoMVK* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkSurfaceKHR* pSurface); + +#ifndef VK_NO_PROTOTYPES +VKAPI_ATTR VkResult VKAPI_CALL vkCreateIOSSurfaceMVK( + VkInstance instance, + const VkIOSSurfaceCreateInfoMVK* pCreateInfo, + const VkAllocationCallbacks* pAllocator, + VkSurfaceKHR* pSurface); +#endif + +#ifdef __cplusplus +} +#endif + +#endif diff --git a/src/video/khronos/vulkan/vulkan_macos.h b/src/video/khronos/vulkan/vulkan_macos.h new file mode 100644 index 000000000..ff0b70180 --- /dev/null +++ b/src/video/khronos/vulkan/vulkan_macos.h @@ -0,0 +1,58 @@ +#ifndef VULKAN_MACOS_H_ +#define VULKAN_MACOS_H_ 1 + +#ifdef __cplusplus +extern "C" { +#endif + +/* +** Copyright (c) 2015-2018 The Khronos Group Inc. +** +** Licensed under the Apache License, Version 2.0 (the "License"); +** you may not use this file except in compliance with the License. +** You may obtain a copy of the License at +** +** http://www.apache.org/licenses/LICENSE-2.0 +** +** Unless required by applicable law or agreed to in writing, software +** distributed under the License is distributed on an "AS IS" BASIS, +** WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +** See the License for the specific language governing permissions and +** limitations under the License. +*/ + +/* +** This header is generated from the Khronos Vulkan XML API Registry. +** +*/ + + +#define VK_MVK_macos_surface 1 +#define VK_MVK_MACOS_SURFACE_SPEC_VERSION 2 +#define VK_MVK_MACOS_SURFACE_EXTENSION_NAME "VK_MVK_macos_surface" + +typedef VkFlags VkMacOSSurfaceCreateFlagsMVK; + +typedef struct VkMacOSSurfaceCreateInfoMVK { + VkStructureType sType; + const void* pNext; + VkMacOSSurfaceCreateFlagsMVK flags; + const void* pView; +} VkMacOSSurfaceCreateInfoMVK; + + +typedef VkResult (VKAPI_PTR *PFN_vkCreateMacOSSurfaceMVK)(VkInstance instance, const VkMacOSSurfaceCreateInfoMVK* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkSurfaceKHR* pSurface); + +#ifndef VK_NO_PROTOTYPES +VKAPI_ATTR VkResult VKAPI_CALL vkCreateMacOSSurfaceMVK( + VkInstance instance, + const VkMacOSSurfaceCreateInfoMVK* pCreateInfo, + const VkAllocationCallbacks* pAllocator, + VkSurfaceKHR* pSurface); +#endif + +#ifdef __cplusplus +} +#endif + +#endif diff --git a/src/video/khronos/vulkan/vulkan_mir.h b/src/video/khronos/vulkan/vulkan_mir.h new file mode 100644 index 000000000..7d24ed27a --- /dev/null +++ b/src/video/khronos/vulkan/vulkan_mir.h @@ -0,0 +1,65 @@ +#ifndef VULKAN_MIR_H_ +#define VULKAN_MIR_H_ 1 + +#ifdef __cplusplus +extern "C" { +#endif + +/* +** Copyright (c) 2015-2018 The Khronos Group Inc. +** +** Licensed under the Apache License, Version 2.0 (the "License"); +** you may not use this file except in compliance with the License. +** You may obtain a copy of the License at +** +** http://www.apache.org/licenses/LICENSE-2.0 +** +** Unless required by applicable law or agreed to in writing, software +** distributed under the License is distributed on an "AS IS" BASIS, +** WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +** See the License for the specific language governing permissions and +** limitations under the License. +*/ + +/* +** This header is generated from the Khronos Vulkan XML API Registry. +** +*/ + + +#define VK_KHR_mir_surface 1 +#define VK_KHR_MIR_SURFACE_SPEC_VERSION 4 +#define VK_KHR_MIR_SURFACE_EXTENSION_NAME "VK_KHR_mir_surface" + +typedef VkFlags VkMirSurfaceCreateFlagsKHR; + +typedef struct VkMirSurfaceCreateInfoKHR { + VkStructureType sType; + const void* pNext; + VkMirSurfaceCreateFlagsKHR flags; + MirConnection* connection; + MirSurface* mirSurface; +} VkMirSurfaceCreateInfoKHR; + + +typedef VkResult (VKAPI_PTR *PFN_vkCreateMirSurfaceKHR)(VkInstance instance, const VkMirSurfaceCreateInfoKHR* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkSurfaceKHR* pSurface); +typedef VkBool32 (VKAPI_PTR *PFN_vkGetPhysicalDeviceMirPresentationSupportKHR)(VkPhysicalDevice physicalDevice, uint32_t queueFamilyIndex, MirConnection* connection); + +#ifndef VK_NO_PROTOTYPES +VKAPI_ATTR VkResult VKAPI_CALL vkCreateMirSurfaceKHR( + VkInstance instance, + const VkMirSurfaceCreateInfoKHR* pCreateInfo, + const VkAllocationCallbacks* pAllocator, + VkSurfaceKHR* pSurface); + +VKAPI_ATTR VkBool32 VKAPI_CALL vkGetPhysicalDeviceMirPresentationSupportKHR( + VkPhysicalDevice physicalDevice, + uint32_t queueFamilyIndex, + MirConnection* connection); +#endif + +#ifdef __cplusplus +} +#endif + +#endif diff --git a/src/video/khronos/vulkan/vulkan_vi.h b/src/video/khronos/vulkan/vulkan_vi.h new file mode 100644 index 000000000..015166bfc --- /dev/null +++ b/src/video/khronos/vulkan/vulkan_vi.h @@ -0,0 +1,58 @@ +#ifndef VULKAN_VI_H_ +#define VULKAN_VI_H_ 1 + +#ifdef __cplusplus +extern "C" { +#endif + +/* +** Copyright (c) 2015-2018 The Khronos Group Inc. +** +** Licensed under the Apache License, Version 2.0 (the "License"); +** you may not use this file except in compliance with the License. +** You may obtain a copy of the License at +** +** http://www.apache.org/licenses/LICENSE-2.0 +** +** Unless required by applicable law or agreed to in writing, software +** distributed under the License is distributed on an "AS IS" BASIS, +** WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +** See the License for the specific language governing permissions and +** limitations under the License. +*/ + +/* +** This header is generated from the Khronos Vulkan XML API Registry. +** +*/ + + +#define VK_NN_vi_surface 1 +#define VK_NN_VI_SURFACE_SPEC_VERSION 1 +#define VK_NN_VI_SURFACE_EXTENSION_NAME "VK_NN_vi_surface" + +typedef VkFlags VkViSurfaceCreateFlagsNN; + +typedef struct VkViSurfaceCreateInfoNN { + VkStructureType sType; + const void* pNext; + VkViSurfaceCreateFlagsNN flags; + void* window; +} VkViSurfaceCreateInfoNN; + + +typedef VkResult (VKAPI_PTR *PFN_vkCreateViSurfaceNN)(VkInstance instance, const VkViSurfaceCreateInfoNN* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkSurfaceKHR* pSurface); + +#ifndef VK_NO_PROTOTYPES +VKAPI_ATTR VkResult VKAPI_CALL vkCreateViSurfaceNN( + VkInstance instance, + const VkViSurfaceCreateInfoNN* pCreateInfo, + const VkAllocationCallbacks* pAllocator, + VkSurfaceKHR* pSurface); +#endif + +#ifdef __cplusplus +} +#endif + +#endif diff --git a/src/video/khronos/vulkan/vulkan_wayland.h b/src/video/khronos/vulkan/vulkan_wayland.h new file mode 100644 index 000000000..5ba0827aa --- /dev/null +++ b/src/video/khronos/vulkan/vulkan_wayland.h @@ -0,0 +1,65 @@ +#ifndef VULKAN_WAYLAND_H_ +#define VULKAN_WAYLAND_H_ 1 + +#ifdef __cplusplus +extern "C" { +#endif + +/* +** Copyright (c) 2015-2018 The Khronos Group Inc. +** +** Licensed under the Apache License, Version 2.0 (the "License"); +** you may not use this file except in compliance with the License. +** You may obtain a copy of the License at +** +** http://www.apache.org/licenses/LICENSE-2.0 +** +** Unless required by applicable law or agreed to in writing, software +** distributed under the License is distributed on an "AS IS" BASIS, +** WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +** See the License for the specific language governing permissions and +** limitations under the License. +*/ + +/* +** This header is generated from the Khronos Vulkan XML API Registry. +** +*/ + + +#define VK_KHR_wayland_surface 1 +#define VK_KHR_WAYLAND_SURFACE_SPEC_VERSION 6 +#define VK_KHR_WAYLAND_SURFACE_EXTENSION_NAME "VK_KHR_wayland_surface" + +typedef VkFlags VkWaylandSurfaceCreateFlagsKHR; + +typedef struct VkWaylandSurfaceCreateInfoKHR { + VkStructureType sType; + const void* pNext; + VkWaylandSurfaceCreateFlagsKHR flags; + struct wl_display* display; + struct wl_surface* surface; +} VkWaylandSurfaceCreateInfoKHR; + + +typedef VkResult (VKAPI_PTR *PFN_vkCreateWaylandSurfaceKHR)(VkInstance instance, const VkWaylandSurfaceCreateInfoKHR* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkSurfaceKHR* pSurface); +typedef VkBool32 (VKAPI_PTR *PFN_vkGetPhysicalDeviceWaylandPresentationSupportKHR)(VkPhysicalDevice physicalDevice, uint32_t queueFamilyIndex, struct wl_display* display); + +#ifndef VK_NO_PROTOTYPES +VKAPI_ATTR VkResult VKAPI_CALL vkCreateWaylandSurfaceKHR( + VkInstance instance, + const VkWaylandSurfaceCreateInfoKHR* pCreateInfo, + const VkAllocationCallbacks* pAllocator, + VkSurfaceKHR* pSurface); + +VKAPI_ATTR VkBool32 VKAPI_CALL vkGetPhysicalDeviceWaylandPresentationSupportKHR( + VkPhysicalDevice physicalDevice, + uint32_t queueFamilyIndex, + struct wl_display* display); +#endif + +#ifdef __cplusplus +} +#endif + +#endif diff --git a/src/video/khronos/vulkan/vulkan_win32.h b/src/video/khronos/vulkan/vulkan_win32.h new file mode 100644 index 000000000..6a85409eb --- /dev/null +++ b/src/video/khronos/vulkan/vulkan_win32.h @@ -0,0 +1,276 @@ +#ifndef VULKAN_WIN32_H_ +#define VULKAN_WIN32_H_ 1 + +#ifdef __cplusplus +extern "C" { +#endif + +/* +** Copyright (c) 2015-2018 The Khronos Group Inc. +** +** Licensed under the Apache License, Version 2.0 (the "License"); +** you may not use this file except in compliance with the License. +** You may obtain a copy of the License at +** +** http://www.apache.org/licenses/LICENSE-2.0 +** +** Unless required by applicable law or agreed to in writing, software +** distributed under the License is distributed on an "AS IS" BASIS, +** WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +** See the License for the specific language governing permissions and +** limitations under the License. +*/ + +/* +** This header is generated from the Khronos Vulkan XML API Registry. +** +*/ + + +#define VK_KHR_win32_surface 1 +#define VK_KHR_WIN32_SURFACE_SPEC_VERSION 6 +#define VK_KHR_WIN32_SURFACE_EXTENSION_NAME "VK_KHR_win32_surface" + +typedef VkFlags VkWin32SurfaceCreateFlagsKHR; + +typedef struct VkWin32SurfaceCreateInfoKHR { + VkStructureType sType; + const void* pNext; + VkWin32SurfaceCreateFlagsKHR flags; + HINSTANCE hinstance; + HWND hwnd; +} VkWin32SurfaceCreateInfoKHR; + + +typedef VkResult (VKAPI_PTR *PFN_vkCreateWin32SurfaceKHR)(VkInstance instance, const VkWin32SurfaceCreateInfoKHR* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkSurfaceKHR* pSurface); +typedef VkBool32 (VKAPI_PTR *PFN_vkGetPhysicalDeviceWin32PresentationSupportKHR)(VkPhysicalDevice physicalDevice, uint32_t queueFamilyIndex); + +#ifndef VK_NO_PROTOTYPES +VKAPI_ATTR VkResult VKAPI_CALL vkCreateWin32SurfaceKHR( + VkInstance instance, + const VkWin32SurfaceCreateInfoKHR* pCreateInfo, + const VkAllocationCallbacks* pAllocator, + VkSurfaceKHR* pSurface); + +VKAPI_ATTR VkBool32 VKAPI_CALL vkGetPhysicalDeviceWin32PresentationSupportKHR( + VkPhysicalDevice physicalDevice, + uint32_t queueFamilyIndex); +#endif + +#define VK_KHR_external_memory_win32 1 +#define VK_KHR_EXTERNAL_MEMORY_WIN32_SPEC_VERSION 1 +#define VK_KHR_EXTERNAL_MEMORY_WIN32_EXTENSION_NAME "VK_KHR_external_memory_win32" + +typedef struct VkImportMemoryWin32HandleInfoKHR { + VkStructureType sType; + const void* pNext; + VkExternalMemoryHandleTypeFlagBits handleType; + HANDLE handle; + LPCWSTR name; +} VkImportMemoryWin32HandleInfoKHR; + +typedef struct VkExportMemoryWin32HandleInfoKHR { + VkStructureType sType; + const void* pNext; + const SECURITY_ATTRIBUTES* pAttributes; + DWORD dwAccess; + LPCWSTR name; +} VkExportMemoryWin32HandleInfoKHR; + +typedef struct VkMemoryWin32HandlePropertiesKHR { + VkStructureType sType; + void* pNext; + uint32_t memoryTypeBits; +} VkMemoryWin32HandlePropertiesKHR; + +typedef struct VkMemoryGetWin32HandleInfoKHR { + VkStructureType sType; + const void* pNext; + VkDeviceMemory memory; + VkExternalMemoryHandleTypeFlagBits handleType; +} VkMemoryGetWin32HandleInfoKHR; + + +typedef VkResult (VKAPI_PTR *PFN_vkGetMemoryWin32HandleKHR)(VkDevice device, const VkMemoryGetWin32HandleInfoKHR* pGetWin32HandleInfo, HANDLE* pHandle); +typedef VkResult (VKAPI_PTR *PFN_vkGetMemoryWin32HandlePropertiesKHR)(VkDevice device, VkExternalMemoryHandleTypeFlagBits handleType, HANDLE handle, VkMemoryWin32HandlePropertiesKHR* pMemoryWin32HandleProperties); + +#ifndef VK_NO_PROTOTYPES +VKAPI_ATTR VkResult VKAPI_CALL vkGetMemoryWin32HandleKHR( + VkDevice device, + const VkMemoryGetWin32HandleInfoKHR* pGetWin32HandleInfo, + HANDLE* pHandle); + +VKAPI_ATTR VkResult VKAPI_CALL vkGetMemoryWin32HandlePropertiesKHR( + VkDevice device, + VkExternalMemoryHandleTypeFlagBits handleType, + HANDLE handle, + VkMemoryWin32HandlePropertiesKHR* pMemoryWin32HandleProperties); +#endif + +#define VK_KHR_win32_keyed_mutex 1 +#define VK_KHR_WIN32_KEYED_MUTEX_SPEC_VERSION 1 +#define VK_KHR_WIN32_KEYED_MUTEX_EXTENSION_NAME "VK_KHR_win32_keyed_mutex" + +typedef struct VkWin32KeyedMutexAcquireReleaseInfoKHR { + VkStructureType sType; + const void* pNext; + uint32_t acquireCount; + const VkDeviceMemory* pAcquireSyncs; + const uint64_t* pAcquireKeys; + const uint32_t* pAcquireTimeouts; + uint32_t releaseCount; + const VkDeviceMemory* pReleaseSyncs; + const uint64_t* pReleaseKeys; +} VkWin32KeyedMutexAcquireReleaseInfoKHR; + + + +#define VK_KHR_external_semaphore_win32 1 +#define VK_KHR_EXTERNAL_SEMAPHORE_WIN32_SPEC_VERSION 1 +#define VK_KHR_EXTERNAL_SEMAPHORE_WIN32_EXTENSION_NAME "VK_KHR_external_semaphore_win32" + +typedef struct VkImportSemaphoreWin32HandleInfoKHR { + VkStructureType sType; + const void* pNext; + VkSemaphore semaphore; + VkSemaphoreImportFlags flags; + VkExternalSemaphoreHandleTypeFlagBits handleType; + HANDLE handle; + LPCWSTR name; +} VkImportSemaphoreWin32HandleInfoKHR; + +typedef struct VkExportSemaphoreWin32HandleInfoKHR { + VkStructureType sType; + const void* pNext; + const SECURITY_ATTRIBUTES* pAttributes; + DWORD dwAccess; + LPCWSTR name; +} VkExportSemaphoreWin32HandleInfoKHR; + +typedef struct VkD3D12FenceSubmitInfoKHR { + VkStructureType sType; + const void* pNext; + uint32_t waitSemaphoreValuesCount; + const uint64_t* pWaitSemaphoreValues; + uint32_t signalSemaphoreValuesCount; + const uint64_t* pSignalSemaphoreValues; +} VkD3D12FenceSubmitInfoKHR; + +typedef struct VkSemaphoreGetWin32HandleInfoKHR { + VkStructureType sType; + const void* pNext; + VkSemaphore semaphore; + VkExternalSemaphoreHandleTypeFlagBits handleType; +} VkSemaphoreGetWin32HandleInfoKHR; + + +typedef VkResult (VKAPI_PTR *PFN_vkImportSemaphoreWin32HandleKHR)(VkDevice device, const VkImportSemaphoreWin32HandleInfoKHR* pImportSemaphoreWin32HandleInfo); +typedef VkResult (VKAPI_PTR *PFN_vkGetSemaphoreWin32HandleKHR)(VkDevice device, const VkSemaphoreGetWin32HandleInfoKHR* pGetWin32HandleInfo, HANDLE* pHandle); + +#ifndef VK_NO_PROTOTYPES +VKAPI_ATTR VkResult VKAPI_CALL vkImportSemaphoreWin32HandleKHR( + VkDevice device, + const VkImportSemaphoreWin32HandleInfoKHR* pImportSemaphoreWin32HandleInfo); + +VKAPI_ATTR VkResult VKAPI_CALL vkGetSemaphoreWin32HandleKHR( + VkDevice device, + const VkSemaphoreGetWin32HandleInfoKHR* pGetWin32HandleInfo, + HANDLE* pHandle); +#endif + +#define VK_KHR_external_fence_win32 1 +#define VK_KHR_EXTERNAL_FENCE_WIN32_SPEC_VERSION 1 +#define VK_KHR_EXTERNAL_FENCE_WIN32_EXTENSION_NAME "VK_KHR_external_fence_win32" + +typedef struct VkImportFenceWin32HandleInfoKHR { + VkStructureType sType; + const void* pNext; + VkFence fence; + VkFenceImportFlags flags; + VkExternalFenceHandleTypeFlagBits handleType; + HANDLE handle; + LPCWSTR name; +} VkImportFenceWin32HandleInfoKHR; + +typedef struct VkExportFenceWin32HandleInfoKHR { + VkStructureType sType; + const void* pNext; + const SECURITY_ATTRIBUTES* pAttributes; + DWORD dwAccess; + LPCWSTR name; +} VkExportFenceWin32HandleInfoKHR; + +typedef struct VkFenceGetWin32HandleInfoKHR { + VkStructureType sType; + const void* pNext; + VkFence fence; + VkExternalFenceHandleTypeFlagBits handleType; +} VkFenceGetWin32HandleInfoKHR; + + +typedef VkResult (VKAPI_PTR *PFN_vkImportFenceWin32HandleKHR)(VkDevice device, const VkImportFenceWin32HandleInfoKHR* pImportFenceWin32HandleInfo); +typedef VkResult (VKAPI_PTR *PFN_vkGetFenceWin32HandleKHR)(VkDevice device, const VkFenceGetWin32HandleInfoKHR* pGetWin32HandleInfo, HANDLE* pHandle); + +#ifndef VK_NO_PROTOTYPES +VKAPI_ATTR VkResult VKAPI_CALL vkImportFenceWin32HandleKHR( + VkDevice device, + const VkImportFenceWin32HandleInfoKHR* pImportFenceWin32HandleInfo); + +VKAPI_ATTR VkResult VKAPI_CALL vkGetFenceWin32HandleKHR( + VkDevice device, + const VkFenceGetWin32HandleInfoKHR* pGetWin32HandleInfo, + HANDLE* pHandle); +#endif + +#define VK_NV_external_memory_win32 1 +#define VK_NV_EXTERNAL_MEMORY_WIN32_SPEC_VERSION 1 +#define VK_NV_EXTERNAL_MEMORY_WIN32_EXTENSION_NAME "VK_NV_external_memory_win32" + +typedef struct VkImportMemoryWin32HandleInfoNV { + VkStructureType sType; + const void* pNext; + VkExternalMemoryHandleTypeFlagsNV handleType; + HANDLE handle; +} VkImportMemoryWin32HandleInfoNV; + +typedef struct VkExportMemoryWin32HandleInfoNV { + VkStructureType sType; + const void* pNext; + const SECURITY_ATTRIBUTES* pAttributes; + DWORD dwAccess; +} VkExportMemoryWin32HandleInfoNV; + + +typedef VkResult (VKAPI_PTR *PFN_vkGetMemoryWin32HandleNV)(VkDevice device, VkDeviceMemory memory, VkExternalMemoryHandleTypeFlagsNV handleType, HANDLE* pHandle); + +#ifndef VK_NO_PROTOTYPES +VKAPI_ATTR VkResult VKAPI_CALL vkGetMemoryWin32HandleNV( + VkDevice device, + VkDeviceMemory memory, + VkExternalMemoryHandleTypeFlagsNV handleType, + HANDLE* pHandle); +#endif + +#define VK_NV_win32_keyed_mutex 1 +#define VK_NV_WIN32_KEYED_MUTEX_SPEC_VERSION 1 +#define VK_NV_WIN32_KEYED_MUTEX_EXTENSION_NAME "VK_NV_win32_keyed_mutex" + +typedef struct VkWin32KeyedMutexAcquireReleaseInfoNV { + VkStructureType sType; + const void* pNext; + uint32_t acquireCount; + const VkDeviceMemory* pAcquireSyncs; + const uint64_t* pAcquireKeys; + const uint32_t* pAcquireTimeoutMilliseconds; + uint32_t releaseCount; + const VkDeviceMemory* pReleaseSyncs; + const uint64_t* pReleaseKeys; +} VkWin32KeyedMutexAcquireReleaseInfoNV; + + + +#ifdef __cplusplus +} +#endif + +#endif diff --git a/src/video/khronos/vulkan/vulkan_xcb.h b/src/video/khronos/vulkan/vulkan_xcb.h new file mode 100644 index 000000000..ba0360060 --- /dev/null +++ b/src/video/khronos/vulkan/vulkan_xcb.h @@ -0,0 +1,66 @@ +#ifndef VULKAN_XCB_H_ +#define VULKAN_XCB_H_ 1 + +#ifdef __cplusplus +extern "C" { +#endif + +/* +** Copyright (c) 2015-2018 The Khronos Group Inc. +** +** Licensed under the Apache License, Version 2.0 (the "License"); +** you may not use this file except in compliance with the License. +** You may obtain a copy of the License at +** +** http://www.apache.org/licenses/LICENSE-2.0 +** +** Unless required by applicable law or agreed to in writing, software +** distributed under the License is distributed on an "AS IS" BASIS, +** WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +** See the License for the specific language governing permissions and +** limitations under the License. +*/ + +/* +** This header is generated from the Khronos Vulkan XML API Registry. +** +*/ + + +#define VK_KHR_xcb_surface 1 +#define VK_KHR_XCB_SURFACE_SPEC_VERSION 6 +#define VK_KHR_XCB_SURFACE_EXTENSION_NAME "VK_KHR_xcb_surface" + +typedef VkFlags VkXcbSurfaceCreateFlagsKHR; + +typedef struct VkXcbSurfaceCreateInfoKHR { + VkStructureType sType; + const void* pNext; + VkXcbSurfaceCreateFlagsKHR flags; + xcb_connection_t* connection; + xcb_window_t window; +} VkXcbSurfaceCreateInfoKHR; + + +typedef VkResult (VKAPI_PTR *PFN_vkCreateXcbSurfaceKHR)(VkInstance instance, const VkXcbSurfaceCreateInfoKHR* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkSurfaceKHR* pSurface); +typedef VkBool32 (VKAPI_PTR *PFN_vkGetPhysicalDeviceXcbPresentationSupportKHR)(VkPhysicalDevice physicalDevice, uint32_t queueFamilyIndex, xcb_connection_t* connection, xcb_visualid_t visual_id); + +#ifndef VK_NO_PROTOTYPES +VKAPI_ATTR VkResult VKAPI_CALL vkCreateXcbSurfaceKHR( + VkInstance instance, + const VkXcbSurfaceCreateInfoKHR* pCreateInfo, + const VkAllocationCallbacks* pAllocator, + VkSurfaceKHR* pSurface); + +VKAPI_ATTR VkBool32 VKAPI_CALL vkGetPhysicalDeviceXcbPresentationSupportKHR( + VkPhysicalDevice physicalDevice, + uint32_t queueFamilyIndex, + xcb_connection_t* connection, + xcb_visualid_t visual_id); +#endif + +#ifdef __cplusplus +} +#endif + +#endif diff --git a/src/video/khronos/vulkan/vulkan_xlib.h b/src/video/khronos/vulkan/vulkan_xlib.h new file mode 100644 index 000000000..e1d967e01 --- /dev/null +++ b/src/video/khronos/vulkan/vulkan_xlib.h @@ -0,0 +1,66 @@ +#ifndef VULKAN_XLIB_H_ +#define VULKAN_XLIB_H_ 1 + +#ifdef __cplusplus +extern "C" { +#endif + +/* +** Copyright (c) 2015-2018 The Khronos Group Inc. +** +** Licensed under the Apache License, Version 2.0 (the "License"); +** you may not use this file except in compliance with the License. +** You may obtain a copy of the License at +** +** http://www.apache.org/licenses/LICENSE-2.0 +** +** Unless required by applicable law or agreed to in writing, software +** distributed under the License is distributed on an "AS IS" BASIS, +** WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +** See the License for the specific language governing permissions and +** limitations under the License. +*/ + +/* +** This header is generated from the Khronos Vulkan XML API Registry. +** +*/ + + +#define VK_KHR_xlib_surface 1 +#define VK_KHR_XLIB_SURFACE_SPEC_VERSION 6 +#define VK_KHR_XLIB_SURFACE_EXTENSION_NAME "VK_KHR_xlib_surface" + +typedef VkFlags VkXlibSurfaceCreateFlagsKHR; + +typedef struct VkXlibSurfaceCreateInfoKHR { + VkStructureType sType; + const void* pNext; + VkXlibSurfaceCreateFlagsKHR flags; + Display* dpy; + Window window; +} VkXlibSurfaceCreateInfoKHR; + + +typedef VkResult (VKAPI_PTR *PFN_vkCreateXlibSurfaceKHR)(VkInstance instance, const VkXlibSurfaceCreateInfoKHR* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkSurfaceKHR* pSurface); +typedef VkBool32 (VKAPI_PTR *PFN_vkGetPhysicalDeviceXlibPresentationSupportKHR)(VkPhysicalDevice physicalDevice, uint32_t queueFamilyIndex, Display* dpy, VisualID visualID); + +#ifndef VK_NO_PROTOTYPES +VKAPI_ATTR VkResult VKAPI_CALL vkCreateXlibSurfaceKHR( + VkInstance instance, + const VkXlibSurfaceCreateInfoKHR* pCreateInfo, + const VkAllocationCallbacks* pAllocator, + VkSurfaceKHR* pSurface); + +VKAPI_ATTR VkBool32 VKAPI_CALL vkGetPhysicalDeviceXlibPresentationSupportKHR( + VkPhysicalDevice physicalDevice, + uint32_t queueFamilyIndex, + Display* dpy, + VisualID visualID); +#endif + +#ifdef __cplusplus +} +#endif + +#endif diff --git a/src/video/khronos/vulkan/vulkan_xlib_xrandr.h b/src/video/khronos/vulkan/vulkan_xlib_xrandr.h new file mode 100644 index 000000000..117d01799 --- /dev/null +++ b/src/video/khronos/vulkan/vulkan_xlib_xrandr.h @@ -0,0 +1,54 @@ +#ifndef VULKAN_XLIB_XRANDR_H_ +#define VULKAN_XLIB_XRANDR_H_ 1 + +#ifdef __cplusplus +extern "C" { +#endif + +/* +** Copyright (c) 2015-2018 The Khronos Group Inc. +** +** Licensed under the Apache License, Version 2.0 (the "License"); +** you may not use this file except in compliance with the License. +** You may obtain a copy of the License at +** +** http://www.apache.org/licenses/LICENSE-2.0 +** +** Unless required by applicable law or agreed to in writing, software +** distributed under the License is distributed on an "AS IS" BASIS, +** WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +** See the License for the specific language governing permissions and +** limitations under the License. +*/ + +/* +** This header is generated from the Khronos Vulkan XML API Registry. +** +*/ + + +#define VK_EXT_acquire_xlib_display 1 +#define VK_EXT_ACQUIRE_XLIB_DISPLAY_SPEC_VERSION 1 +#define VK_EXT_ACQUIRE_XLIB_DISPLAY_EXTENSION_NAME "VK_EXT_acquire_xlib_display" + +typedef VkResult (VKAPI_PTR *PFN_vkAcquireXlibDisplayEXT)(VkPhysicalDevice physicalDevice, Display* dpy, VkDisplayKHR display); +typedef VkResult (VKAPI_PTR *PFN_vkGetRandROutputDisplayEXT)(VkPhysicalDevice physicalDevice, Display* dpy, RROutput rrOutput, VkDisplayKHR* pDisplay); + +#ifndef VK_NO_PROTOTYPES +VKAPI_ATTR VkResult VKAPI_CALL vkAcquireXlibDisplayEXT( + VkPhysicalDevice physicalDevice, + Display* dpy, + VkDisplayKHR display); + +VKAPI_ATTR VkResult VKAPI_CALL vkGetRandROutputDisplayEXT( + VkPhysicalDevice physicalDevice, + Display* dpy, + RROutput rrOutput, + VkDisplayKHR* pDisplay); +#endif + +#ifdef __cplusplus +} +#endif + +#endif